From 381d5453b13261c2dc930e429554208d0f3d63b5 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 27 Nov 2024 21:12:42 +0100 Subject: [PATCH 0001/1198] Improve recorder history queries (#131702) * Improve recorder history queries * Remove some comments * Update StatesManager._oldest_ts when adding pending state * Update after review * Improve tests * Improve post-purge logic * Avoid calling dt_util.utc_to_timestamp in new code --------- Co-authored-by: J. Nick Koston --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 + .../components/recorder/history/legacy.py | 18 +++++------ .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 ++ homeassistant/components/recorder/queries.py | 9 ++++++ .../recorder/table_managers/states.py | 32 +++++++++++++++++++ homeassistant/components/recorder/tasks.py | 2 -- tests/components/recorder/test_purge.py | 17 ++++++++++ 11 files changed, 102 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 365be06fd2d..7241e1fac9a 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before CONF_ORDER = "use_include_order" @@ -107,7 +107,10 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index bd477e7e4ed..2010b7373ff 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,7 +6,6 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -26,8 +25,10 @@ def entities_may_have_state_changes_after( return False -def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has any runs after a specific time.""" - return run_time >= process_timestamp( - get_instance(hass).recorder_runs_manager.first.start - ) +def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has states as old or older than run_time. + + Returns True if there may be such states. + """ + oldest_ts = get_instance(hass).states_manager.oldest_ts + return oldest_ts is not None and run_time.timestamp() >= oldest_ts diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index c85d975c3c9..35f8ed5f1ac 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before _LOGGER = logging.getLogger(__name__) @@ -142,7 +142,10 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 6ba64d4a571..8c2e1c9e006 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1424,6 +1424,7 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) + self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index b59fc43c3d0..3a0fe79455b 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import RecorderRuns, StateAttributes, States +from ..db_schema import StateAttributes, States from ..filters import Filters -from ..models import process_timestamp, process_timestamp_to_utc_isoformat +from ..models import process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start: datetime, + run_start_ts: float, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,7 +447,6 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) stmt += lambda q: q.join( ( @@ -483,7 +482,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - run: RecorderRuns | None = None, + *, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -495,17 +494,16 @@ def _get_rows_with_session( ), ) - if run is None: - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + oldest_ts = get_instance(hass).states_manager.oldest_ts - if run is None or process_timestamp(run.start) > utc_point_in_time: - # History did not run before utc_point_in_time + if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): + # We don't have any states for the requested time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - run.start, utc_point_in_time, entity_ids, no_attributes + oldest_ts, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index b44bec0d0ee..902f1b5dc24 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -34,7 +34,6 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, - process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -246,9 +245,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -264,7 +263,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - run_start_ts, + oldest_ts, ), track_on=[ bool(single_metadata_id), @@ -411,9 +410,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -426,7 +425,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - run_start_ts, + oldest_ts, has_last_reported, ), track_on=[ @@ -600,17 +599,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_run_start_ts_for_utc_point_in_time( +def _get_oldest_possible_ts( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the start time of a run.""" - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if ( - run is not None - and (run_start := process_timestamp(run.start)) < utc_point_in_time - ): - return run_start.timestamp() - # History did not run before utc_point_in_time but we still + """Return the oldest possible timestamp. + + Returns None if there are no states as old as utc_point_in_time. + """ + + oldest_ts = get_instance(hass).states_manager.oldest_ts + if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): + return oldest_ts return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 329f48e5455..28a5a2ed32d 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -123,6 +123,9 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) + with session_scope(session=instance.get_session(), read_only=True) as session: + instance.recorder_runs_manager.load_from_db(session) + instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 2e4b588a0b0..8ca7bef2691 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,6 +637,15 @@ def find_states_to_purge( ) +def find_oldest_state() -> StatementLambdaElement: + """Find the last_updated_ts of the oldest state.""" + return lambda_stmt( + lambda: select(States.last_updated_ts).where( + States.state_id.in_(select(func.min(States.state_id))) + ) + ) + + def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index d5cef759c54..fafcfa0ea61 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,7 +2,15 @@ from __future__ import annotations +from collections.abc import Sequence +from typing import Any, cast + +from sqlalchemy.engine.row import Row +from sqlalchemy.orm.session import Session + from ..db_schema import States +from ..queries import find_oldest_state +from ..util import execute_stmt_lambda_element class StatesManager: @@ -13,6 +21,12 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} + self._oldest_ts: float | None = None + + @property + def oldest_ts(self) -> float | None: + """Return the oldest timestamp.""" + return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -44,6 +58,8 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state + if self._oldest_ts is None: + self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -74,6 +90,22 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() + self._oldest_ts = None + + def load_from_db(self, session: Session) -> None: + """Update the cache. + + Must run in the recorder thread. + """ + result = cast( + Sequence[Row[Any]], + execute_stmt_lambda_element(session, find_oldest_state()), + ) + if not result: + ts = None + else: + ts = result[0].last_updated_ts + self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 783f0a80b8e..fa10c12aa68 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,8 +120,6 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): - with instance.get_session() as session: - instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index ca160e5201b..f721a260c14 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,6 +112,9 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" + assert recorder_mock.states_manager.oldest_ts is None + oldest_ts = recorder_mock.states_manager.oldest_ts + await _add_test_states(hass) # make sure we start with 6 states @@ -127,6 +130,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -140,6 +147,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -162,6 +171,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished + # states_manager.oldest_ts should now be updated + assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -169,6 +180,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -181,6 +196,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From 1635074aae275b9fb3377e4ffadfd674db3311b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Nov 2024 12:15:44 -0800 Subject: [PATCH 0002/1198] Bump aiohttp to 3.11.8 (#131744) --- homeassistant/components/http/__init__.py | 3 ++- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index 3b18b44862a..95cdee9ab9e 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -326,7 +326,8 @@ class HomeAssistantApplication(web.Application): protocol, writer, task, - loop=self._loop, + # loop will never be None when called from aiohttp + loop=self._loop, # type: ignore[arg-type] client_max_size=self._client_max_size, ) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index a4beb141911..0819990cffc 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.7 +aiohttp==3.11.8 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index e281a2429d0..aa74fa8d77c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.7", + "aiohttp==3.11.8", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 5ca03592107..28034d80394 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.7 +aiohttp==3.11.8 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 7110df04e699f9bce0f1b29b52dc517c70bc578c Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 27 Nov 2024 22:32:56 +0100 Subject: [PATCH 0003/1198] Bump version to 2025.1.0dev0 (#131751) --- .github/workflows/ci.yaml | 2 +- homeassistant/const.py | 4 ++-- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b9e5b91aff2..a1840dc4ead 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,7 +40,7 @@ env: CACHE_VERSION: 11 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2024.12" + HA_SHORT_VERSION: "2025.1" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12', '3.13']" # 10.3 is the oldest supported version diff --git a/homeassistant/const.py b/homeassistant/const.py index 514c2154611..7d17e8f7779 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -23,8 +23,8 @@ if TYPE_CHECKING: from .helpers.typing import NoEventData APPLICATION_NAME: Final = "HomeAssistant" -MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 12 +MAJOR_VERSION: Final = 2025 +MINOR_VERSION: Final = 1 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/pyproject.toml b/pyproject.toml index aa74fa8d77c..1b762dc0987 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.0.dev0" +version = "2025.1.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From c82e408138b61677054579b4c3dde2179f6b4b86 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 27 Nov 2024 13:36:17 -0800 Subject: [PATCH 0004/1198] Add a missing rainbird data description (#131740) --- homeassistant/components/rainbird/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/rainbird/strings.json b/homeassistant/components/rainbird/strings.json index 25d3a962b36..6f92b1bdb97 100644 --- a/homeassistant/components/rainbird/strings.json +++ b/homeassistant/components/rainbird/strings.json @@ -40,6 +40,9 @@ "title": "[%key:component::rainbird::config::step::user::title%]", "data": { "duration": "Default irrigation time in minutes" + }, + "data_description": { + "duration": "The default duration the sprinkler will run when turned on." } } } From 44fc5c78717f389732ad84ba23e28d5a76aa1e32 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Wed, 27 Nov 2024 22:37:15 +0100 Subject: [PATCH 0005/1198] Add missing data_description for lamarzocco OptionsFlow (#131708) --- homeassistant/components/lamarzocco/strings.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index f98d5c2a700..666eb7f4a84 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -67,8 +67,10 @@ "step": { "init": { "data": { - "title": "Update Configuration", "use_bluetooth": "Use Bluetooth" + }, + "data_description": { + "use_bluetooth": "Should the integration try to use Bluetooth to control the machine?" } } } From fb4d86196e7504d39b066efc01e78e3148cefaf1 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Wed, 27 Nov 2024 22:55:33 +0100 Subject: [PATCH 0006/1198] Bump pylamarzocco to 1.2.12 (#131765) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index a71da7c4754..43b1c7deb47 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.2.11"] + "requirements": ["pylamarzocco==1.2.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5decd2975fd..2f51df32446 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2027,7 +2027,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.2.11 +pylamarzocco==1.2.12 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3f824a1f212..3f8434734f2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1632,7 +1632,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.2.11 +pylamarzocco==1.2.12 # homeassistant.components.lastfm pylast==5.1.0 From 6edb2c0252296996af96a09270d026128ee3731e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Nov 2024 13:55:51 -0800 Subject: [PATCH 0007/1198] Bump uiprotect to 6.6.3 (#131764) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index a8ad956a667..9a76ba6f984 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.6.2", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.6.3", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 2f51df32446..fcbfdf91a59 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2897,7 +2897,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.2 +uiprotect==6.6.3 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3f8434734f2..cbfb6abf5ea 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2310,7 +2310,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.2 +uiprotect==6.6.3 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From cf7acb5ae87076010ca533ac107e388e3d0bc44d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Nov 2024 15:29:29 -0800 Subject: [PATCH 0008/1198] Bump aioesphomeapi to 27.0.3 (#131773) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 5524e87e2a8..77a3164d94c 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -16,7 +16,7 @@ "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], "requirements": [ - "aioesphomeapi==27.0.2", + "aioesphomeapi==27.0.3", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.1.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index fcbfdf91a59..a047d286fa1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.2 +aioesphomeapi==27.0.3 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cbfb6abf5ea..4e69fdcccf3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.2 +aioesphomeapi==27.0.3 # homeassistant.components.flo aioflo==2021.11.0 From cc9a97a5cf08c6b31c4d9235df95106bc8d4c190 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Thu, 28 Nov 2024 01:34:36 +0100 Subject: [PATCH 0009/1198] Bump music assistant client 1.0.8 (#131739) --- homeassistant/components/music_assistant/manifest.json | 2 +- homeassistant/components/music_assistant/media_player.py | 6 +++--- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/music_assistant/fixtures/players.json | 6 +++--- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json index 65e6652407f..f5cdcf50673 100644 --- a/homeassistant/components/music_assistant/manifest.json +++ b/homeassistant/components/music_assistant/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/music_assistant", "iot_class": "local_push", "loggers": ["music_assistant"], - "requirements": ["music-assistant-client==1.0.5"], + "requirements": ["music-assistant-client==1.0.8"], "zeroconf": ["_mass._tcp.local."] } diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 07d6ddeee03..d1d707c92e1 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -193,7 +193,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): super().__init__(mass, player_id) self._attr_icon = self.player.icon.replace("mdi-", "mdi:") self._attr_supported_features = SUPPORTED_FEATURES - if PlayerFeature.SYNC in self.player.supported_features: + if PlayerFeature.SET_MEMBERS in self.player.supported_features: self._attr_supported_features |= MediaPlayerEntityFeature.GROUPING if PlayerFeature.VOLUME_MUTE in self.player.supported_features: self._attr_supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE @@ -407,12 +407,12 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): if (mass_player_id := hass_state.attributes.get("mass_player_id")) is None: continue player_ids.append(mass_player_id) - await self.mass.players.player_command_sync_many(self.player_id, player_ids) + await self.mass.players.player_command_group_many(self.player_id, player_ids) @catch_musicassistant_error async def async_unjoin_player(self) -> None: """Remove this player from any group.""" - await self.mass.players.player_command_unsync(self.player_id) + await self.mass.players.player_command_ungroup(self.player_id) @catch_musicassistant_error async def _async_handle_play_media( diff --git a/requirements_all.txt b/requirements_all.txt index a047d286fa1..2284fa386b5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1409,7 +1409,7 @@ mozart-api==4.1.1.116.3 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.0.5 +music-assistant-client==1.0.8 # homeassistant.components.tts mutagen==1.47.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4e69fdcccf3..27ffd600131 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1178,7 +1178,7 @@ mozart-api==4.1.1.116.3 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.0.5 +music-assistant-client==1.0.8 # homeassistant.components.tts mutagen==1.47.0 diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index b7ff304a7ee..2d8b88d0e8e 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -16,7 +16,7 @@ "volume_set", "volume_mute", "pause", - "sync", + "set_members", "power", "enqueue" ], @@ -57,7 +57,7 @@ "volume_set", "volume_mute", "pause", - "sync", + "set_members", "power", "enqueue" ], @@ -109,7 +109,7 @@ "volume_set", "volume_mute", "pause", - "sync", + "set_members", "power", "enqueue" ], From f61a5b78ccdbdbad69a0272257335c788e704101 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Wed, 27 Nov 2024 19:34:57 -0500 Subject: [PATCH 0010/1198] Bump ZHA to 0.0.41 (#131776) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index ded37fc4713..1fbbd83bb9c 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.40"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.41"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 2284fa386b5..cc6ada55f72 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3081,7 +3081,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.40 +zha==0.0.41 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 27ffd600131..452e6143c34 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2464,7 +2464,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.40 +zha==0.0.41 # homeassistant.components.zwave_js zwave-js-server-python==0.59.1 From bf4d6d20293395b793f7d1978c0d33e6c5113c4c Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 01:35:23 +0100 Subject: [PATCH 0011/1198] Fix rounding of attributes in Habitica integration (#131772) --- homeassistant/components/habitica/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/habitica/util.py b/homeassistant/components/habitica/util.py index 03acb08baf9..b2b4430c490 100644 --- a/homeassistant/components/habitica/util.py +++ b/homeassistant/components/habitica/util.py @@ -174,7 +174,7 @@ def get_attribute_points( ) return { - "level": min(round(user["stats"]["lvl"] / 2), 50), + "level": min(floor(user["stats"]["lvl"] / 2), 50), "equipment": equipment, "class": class_bonus, "allocated": user["stats"][attribute], From eac6673c2bcc44dc60fb8acef278a68208a61867 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 27 Nov 2024 16:35:49 -0800 Subject: [PATCH 0012/1198] Bump orjson to 3.10.12 (#131752) changelog: https://github.com/ijl/orjson/compare/3.10.11...3.10.12 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0819990cffc..691d80f31bf 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -41,7 +41,7 @@ ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.11 +orjson==3.10.12 packaging>=23.1 paho-mqtt==1.6.1 Pillow==11.0.0 diff --git a/pyproject.toml b/pyproject.toml index 1b762dc0987..4bf14a36948 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ dependencies = [ "Pillow==11.0.0", "propcache==0.2.0", "pyOpenSSL==24.2.1", - "orjson==3.10.11", + "orjson==3.10.12", "packaging>=23.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", diff --git a/requirements.txt b/requirements.txt index 28034d80394..2cbdeb14b98 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,7 +30,7 @@ cryptography==43.0.1 Pillow==11.0.0 propcache==0.2.0 pyOpenSSL==24.2.1 -orjson==3.10.11 +orjson==3.10.12 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 From 0f5e0dd4bfb25b68828ea302a8e2ce3af80e2aef Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Thu, 28 Nov 2024 08:06:31 +0100 Subject: [PATCH 0013/1198] Fix Home Connect microwave programs (#131782) --- homeassistant/components/home_connect/select.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py index 172b959b145..fdd1f38bf97 100644 --- a/homeassistant/components/home_connect/select.py +++ b/homeassistant/components/home_connect/select.py @@ -140,12 +140,12 @@ TRANSLATION_KEYS_PROGRAMS_MAP = { "Cooking.Oven.Program.HeatingMode.HotAir80Steam", "Cooking.Oven.Program.HeatingMode.HotAir100Steam", "Cooking.Oven.Program.HeatingMode.SabbathProgramme", - "Cooking.Oven.Program.Microwave90Watt", - "Cooking.Oven.Program.Microwave180Watt", - "Cooking.Oven.Program.Microwave360Watt", - "Cooking.Oven.Program.Microwave600Watt", - "Cooking.Oven.Program.Microwave900Watt", - "Cooking.Oven.Program.Microwave1000Watt", + "Cooking.Oven.Program.Microwave.90Watt", + "Cooking.Oven.Program.Microwave.180Watt", + "Cooking.Oven.Program.Microwave.360Watt", + "Cooking.Oven.Program.Microwave.600Watt", + "Cooking.Oven.Program.Microwave.900Watt", + "Cooking.Oven.Program.Microwave.1000Watt", "Cooking.Oven.Program.Microwave.Max", "Cooking.Oven.Program.HeatingMode.WarmingDrawer", "LaundryCare.Washer.Program.Cotton", From 39c2a529d129475c5c2ec3def1a1954180f8bd4b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 08:07:19 +0100 Subject: [PATCH 0014/1198] Remove Spotify audio feature sensors (#131754) --- homeassistant/components/spotify/__init__.py | 2 +- .../components/spotify/coordinator.py | 22 - homeassistant/components/spotify/icons.json | 35 -- homeassistant/components/spotify/sensor.py | 179 ------ homeassistant/components/spotify/strings.json | 41 -- tests/components/spotify/conftest.py | 2 - .../spotify/fixtures/audio_features.json | 20 - .../spotify/snapshots/test_diagnostics.ambr | 14 - .../spotify/snapshots/test_sensor.ambr | 595 ------------------ tests/components/spotify/test_sensor.py | 66 -- 10 files changed, 1 insertion(+), 975 deletions(-) delete mode 100644 homeassistant/components/spotify/sensor.py delete mode 100644 tests/components/spotify/fixtures/audio_features.json delete mode 100644 tests/components/spotify/snapshots/test_sensor.ambr delete mode 100644 tests/components/spotify/test_sensor.py diff --git a/homeassistant/components/spotify/__init__.py b/homeassistant/components/spotify/__init__.py index cfcc9011b37..37580ac432d 100644 --- a/homeassistant/components/spotify/__init__.py +++ b/homeassistant/components/spotify/__init__.py @@ -29,7 +29,7 @@ from .util import ( spotify_uri_from_media_browser_url, ) -PLATFORMS = [Platform.MEDIA_PLAYER, Platform.SENSOR] +PLATFORMS = [Platform.MEDIA_PLAYER] __all__ = [ "async_browse_media", diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py index 9e62d5f137e..a7c95e31245 100644 --- a/homeassistant/components/spotify/coordinator.py +++ b/homeassistant/components/spotify/coordinator.py @@ -7,14 +7,12 @@ from typing import TYPE_CHECKING from spotifyaio import ( ContextType, - ItemType, PlaybackState, Playlist, SpotifyClient, SpotifyConnectionError, UserProfile, ) -from spotifyaio.models import AudioFeatures from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -39,7 +37,6 @@ class SpotifyCoordinatorData: current_playback: PlaybackState | None position_updated_at: datetime | None playlist: Playlist | None - audio_features: AudioFeatures | None dj_playlist: bool = False @@ -65,7 +62,6 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): ) self.client = client self._playlist: Playlist | None = None - self._currently_loaded_track: str | None = None async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -84,28 +80,11 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): current_playback=None, position_updated_at=None, playlist=None, - audio_features=None, ) # Record the last updated time, because Spotify's timestamp property is unreliable # and doesn't actually return the fetch time as is mentioned in the API description position_updated_at = dt_util.utcnow() - audio_features: AudioFeatures | None = None - if (item := current.item) is not None and item.type == ItemType.TRACK: - if item.uri != self._currently_loaded_track: - try: - audio_features = await self.client.get_audio_features(item.uri) - except SpotifyConnectionError: - _LOGGER.debug( - "Unable to load audio features for track '%s'. " - "Continuing without audio features", - item.uri, - ) - audio_features = None - else: - self._currently_loaded_track = item.uri - else: - audio_features = self.data.audio_features dj_playlist = False if (context := current.context) is not None: if self._playlist is None or self._playlist.uri != context.uri: @@ -128,6 +107,5 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): current_playback=current, position_updated_at=position_updated_at, playlist=self._playlist, - audio_features=audio_features, dj_playlist=dj_playlist, ) diff --git a/homeassistant/components/spotify/icons.json b/homeassistant/components/spotify/icons.json index e1b08127e43..00c63141eae 100644 --- a/homeassistant/components/spotify/icons.json +++ b/homeassistant/components/spotify/icons.json @@ -4,41 +4,6 @@ "spotify": { "default": "mdi:spotify" } - }, - "sensor": { - "song_tempo": { - "default": "mdi:metronome" - }, - "danceability": { - "default": "mdi:dance-ballroom" - }, - "energy": { - "default": "mdi:lightning-bolt" - }, - "mode": { - "default": "mdi:music" - }, - "speechiness": { - "default": "mdi:speaker-message" - }, - "acousticness": { - "default": "mdi:guitar-acoustic" - }, - "instrumentalness": { - "default": "mdi:guitar-electric" - }, - "valence": { - "default": "mdi:emoticon-happy" - }, - "liveness": { - "default": "mdi:music-note" - }, - "time_signature": { - "default": "mdi:music-clef-treble" - }, - "key": { - "default": "mdi:music-clef-treble" - } } } } diff --git a/homeassistant/components/spotify/sensor.py b/homeassistant/components/spotify/sensor.py deleted file mode 100644 index 3486a911b0d..00000000000 --- a/homeassistant/components/spotify/sensor.py +++ /dev/null @@ -1,179 +0,0 @@ -"""Sensor platform for Spotify.""" - -from collections.abc import Callable -from dataclasses import dataclass - -from spotifyaio.models import AudioFeatures, Key - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, -) -from homeassistant.const import PERCENTAGE -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .coordinator import SpotifyConfigEntry, SpotifyCoordinator -from .entity import SpotifyEntity - - -@dataclass(frozen=True, kw_only=True) -class SpotifyAudioFeaturesSensorEntityDescription(SensorEntityDescription): - """Describes Spotify sensor entity.""" - - value_fn: Callable[[AudioFeatures], float | str | None] - - -KEYS: dict[Key, str] = { - Key.C: "C", - Key.C_SHARP_D_FLAT: "C♯/D♭", - Key.D: "D", - Key.D_SHARP_E_FLAT: "D♯/E♭", - Key.E: "E", - Key.F: "F", - Key.F_SHARP_G_FLAT: "F♯/G♭", - Key.G: "G", - Key.G_SHARP_A_FLAT: "G♯/A♭", - Key.A: "A", - Key.A_SHARP_B_FLAT: "A♯/B♭", - Key.B: "B", -} - -KEY_OPTIONS = list(KEYS.values()) - - -def _get_key(audio_features: AudioFeatures) -> str | None: - if audio_features.key is None: - return None - return KEYS[audio_features.key] - - -AUDIO_FEATURE_SENSORS: tuple[SpotifyAudioFeaturesSensorEntityDescription, ...] = ( - SpotifyAudioFeaturesSensorEntityDescription( - key="bpm", - translation_key="song_tempo", - native_unit_of_measurement="bpm", - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.tempo, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="danceability", - translation_key="danceability", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.danceability * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="energy", - translation_key="energy", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.energy * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="mode", - translation_key="mode", - device_class=SensorDeviceClass.ENUM, - options=["major", "minor"], - value_fn=lambda audio_features: audio_features.mode.name.lower(), - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="speechiness", - translation_key="speechiness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.speechiness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="acousticness", - translation_key="acousticness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.acousticness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="instrumentalness", - translation_key="instrumentalness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.instrumentalness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="liveness", - translation_key="liveness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.liveness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="valence", - translation_key="valence", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.valence * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="time_signature", - translation_key="time_signature", - device_class=SensorDeviceClass.ENUM, - options=["3/4", "4/4", "5/4", "6/4", "7/4"], - value_fn=lambda audio_features: f"{audio_features.time_signature}/4", - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="key", - translation_key="key", - device_class=SensorDeviceClass.ENUM, - options=KEY_OPTIONS, - value_fn=_get_key, - entity_registry_enabled_default=False, - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: SpotifyConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Spotify sensor based on a config entry.""" - coordinator = entry.runtime_data.coordinator - - async_add_entities( - SpotifyAudioFeatureSensor(coordinator, description) - for description in AUDIO_FEATURE_SENSORS - ) - - -class SpotifyAudioFeatureSensor(SpotifyEntity, SensorEntity): - """Representation of a Spotify sensor.""" - - entity_description: SpotifyAudioFeaturesSensorEntityDescription - - def __init__( - self, - coordinator: SpotifyCoordinator, - entity_description: SpotifyAudioFeaturesSensorEntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinator) - self._attr_unique_id = ( - f"{coordinator.current_user.user_id}_{entity_description.key}" - ) - self.entity_description = entity_description - - @property - def native_value(self) -> float | str | None: - """Return the state of the sensor.""" - if (audio_features := self.coordinator.data.audio_features) is None: - return None - return self.entity_description.value_fn(audio_features) diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index faf20d740d9..90e573a1706 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -30,46 +30,5 @@ "info": { "api_endpoint_reachable": "Spotify API endpoint reachable" } - }, - "entity": { - "sensor": { - "song_tempo": { - "name": "Song tempo" - }, - "danceability": { - "name": "Song danceability" - }, - "energy": { - "name": "Song energy" - }, - "mode": { - "name": "Song mode", - "state": { - "minor": "Minor", - "major": "Major" - } - }, - "speechiness": { - "name": "Song speechiness" - }, - "acousticness": { - "name": "Song acousticness" - }, - "instrumentalness": { - "name": "Song instrumentalness" - }, - "valence": { - "name": "Song valence" - }, - "liveness": { - "name": "Song liveness" - }, - "time_signature": { - "name": "Song time signature" - }, - "key": { - "name": "Song key" - } - } } } diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py index d3fc418f1cd..cc1f423246c 100644 --- a/tests/components/spotify/conftest.py +++ b/tests/components/spotify/conftest.py @@ -9,7 +9,6 @@ from spotifyaio.models import ( Album, Artist, ArtistResponse, - AudioFeatures, CategoriesResponse, Category, CategoryPlaylistResponse, @@ -140,7 +139,6 @@ def mock_spotify() -> Generator[AsyncMock]: ("album.json", "get_album", Album), ("artist.json", "get_artist", Artist), ("show.json", "get_show", Show), - ("audio_features.json", "get_audio_features", AudioFeatures), ): getattr(client, method).return_value = obj.from_json( load_fixture(fixture, DOMAIN) diff --git a/tests/components/spotify/fixtures/audio_features.json b/tests/components/spotify/fixtures/audio_features.json deleted file mode 100644 index 52dfee060f7..00000000000 --- a/tests/components/spotify/fixtures/audio_features.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "danceability": 0.696, - "energy": 0.905, - "key": 3, - "loudness": -2.743, - "mode": 1, - "speechiness": 0.103, - "acousticness": 0.011, - "instrumentalness": 0.000905, - "liveness": 0.302, - "valence": 0.625, - "tempo": 114.944, - "type": "audio_features", - "id": "11dFghVXANMlKmJXsNCbNl", - "uri": "spotify:track:11dFghVXANMlKmJXsNCbNl", - "track_href": "https://api.spotify.com/v1/tracks/11dFghVXANMlKmJXsNCbNl", - "analysis_url": "https://api.spotify.com/v1/audio-analysis/11dFghVXANMlKmJXsNCbNl", - "duration_ms": 207960, - "time_signature": 4 -} diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr index 161b6025ff3..40502562da3 100644 --- a/tests/components/spotify/snapshots/test_diagnostics.ambr +++ b/tests/components/spotify/snapshots/test_diagnostics.ambr @@ -14,20 +14,6 @@ }), ]), 'playback': dict({ - 'audio_features': dict({ - 'acousticness': 0.011, - 'danceability': 0.696, - 'energy': 0.905, - 'instrumentalness': 0.000905, - 'key': 3, - 'liveness': 0.302, - 'loudness': -2.743, - 'mode': 1, - 'speechiness': 0.103, - 'tempo': 114.944, - 'time_signature': 4, - 'valence': 0.625, - }), 'current_playback': dict({ 'context': dict({ 'context_type': 'playlist', diff --git a/tests/components/spotify/snapshots/test_sensor.ambr b/tests/components/spotify/snapshots/test_sensor.ambr deleted file mode 100644 index ce77dda479f..00000000000 --- a/tests/components/spotify/snapshots/test_sensor.ambr +++ /dev/null @@ -1,595 +0,0 @@ -# serializer version: 1 -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song acousticness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'acousticness', - 'unique_id': '1112264111_acousticness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song acousticness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.1', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song danceability', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'danceability', - 'unique_id': '1112264111_danceability', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song danceability', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '69.6', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song energy', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy', - 'unique_id': '1112264111_energy', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song energy', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90.5', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song instrumentalness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'instrumentalness', - 'unique_id': '1112264111_instrumentalness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song instrumentalness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0905', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song key', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'key', - 'unique_id': '1112264111_key', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song key', - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'D♯/E♭', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song liveness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'liveness', - 'unique_id': '1112264111_liveness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song liveness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.2', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'major', - 'minor', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song mode', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '1112264111_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song mode', - 'options': list([ - 'major', - 'minor', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'major', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song speechiness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'speechiness', - 'unique_id': '1112264111_speechiness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song speechiness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.3', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song tempo', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'song_tempo', - 'unique_id': '1112264111_bpm', - 'unit_of_measurement': 'bpm', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song tempo', - 'unit_of_measurement': 'bpm', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '114.944', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song time signature', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'time_signature', - 'unique_id': '1112264111_time_signature', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song time signature', - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4/4', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song valence', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valence', - 'unique_id': '1112264111_valence', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song valence', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '62.5', - }) -# --- diff --git a/tests/components/spotify/test_sensor.py b/tests/components/spotify/test_sensor.py deleted file mode 100644 index 11ce361034a..00000000000 --- a/tests/components/spotify/test_sensor.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for the Spotify sensor platform.""" - -from unittest.mock import MagicMock, patch - -import pytest -from spotifyaio import PlaybackState -from syrupy import SnapshotAssertion - -from homeassistant.components.spotify import DOMAIN -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, load_fixture, snapshot_platform - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_entities( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - with patch("homeassistant.components.spotify.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unavailable( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - mock_spotify.return_value.get_audio_features.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unknown_during_podcast( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify audio features sensor during a podcast.""" - mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( - load_fixture("playback_episode.json", DOMAIN) - ) - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN From 17236a56989ff82f6aba9a2231cfc88741709baf Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:08:00 +0100 Subject: [PATCH 0015/1198] Remove unreachable code in Habitica (#131778) --- homeassistant/components/habitica/button.py | 11 +++++------ homeassistant/components/habitica/calendar.py | 3 --- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index 30e326f79a0..2b9a4199133 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -33,7 +33,7 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription): """Describes Habitica button entity.""" press_fn: Callable[[HabiticaDataUpdateCoordinator], Any] - available_fn: Callable[[HabiticaData], bool] | None = None + available_fn: Callable[[HabiticaData], bool] class_needed: str | None = None entity_picture: str | None = None @@ -343,11 +343,10 @@ class HabiticaButton(HabiticaBase, ButtonEntity): @property def available(self) -> bool: """Is entity available.""" - if not super().available: - return False - if self.entity_description.available_fn: - return self.entity_description.available_fn(self.coordinator.data) - return True + + return super().available and self.entity_description.available_fn( + self.coordinator.data + ) @property def entity_picture(self) -> str | None: diff --git a/homeassistant/components/habitica/calendar.py b/homeassistant/components/habitica/calendar.py index be4433cb355..6de22a0314a 100644 --- a/homeassistant/components/habitica/calendar.py +++ b/homeassistant/components/habitica/calendar.py @@ -374,9 +374,6 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): # Event ends before date range continue - if end_date and start > end_date: - # Event starts after date range - continue events.append( CalendarEvent( start=start, From fe2bca51a4151ed814d2bd7e48a6ae17d62fbd06 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:12:52 +0100 Subject: [PATCH 0016/1198] Add translations for units of measurement to Habitica integration (#131761) --- homeassistant/components/habitica/const.py | 2 - homeassistant/components/habitica/sensor.py | 14 +------ .../components/habitica/strings.json | 42 +++++++++++++------ 3 files changed, 30 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index dce417b60a5..42d64ca7d3f 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -25,8 +25,6 @@ ATTR_DATA = "data" MANUFACTURER = "HabitRPG, Inc." NAME = "Habitica" -UNIT_TASKS = "tasks" - ATTR_CONFIG_ENTRY = "config_entry" ATTR_SKILL = "skill" ATTR_TASK = "task" diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 41d0168d748..bead15d109b 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -24,7 +24,7 @@ from homeassistant.helpers.issue_registry import ( ) from homeassistant.helpers.typing import StateType -from .const import ASSETS_URL, DOMAIN, UNIT_TASKS +from .const import ASSETS_URL, DOMAIN from .entity import HabiticaBase from .types import HabiticaConfigEntry from .util import entity_used_in, get_attribute_points, get_attributes_total @@ -84,40 +84,34 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH, translation_key=HabitipySensorEntity.HEALTH, - native_unit_of_measurement="HP", suggested_display_precision=0, value_fn=lambda user, _: user.get("stats", {}).get("hp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH_MAX, translation_key=HabitipySensorEntity.HEALTH_MAX, - native_unit_of_measurement="HP", entity_registry_enabled_default=False, value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA, translation_key=HabitipySensorEntity.MANA, - native_unit_of_measurement="MP", suggested_display_precision=0, value_fn=lambda user, _: user.get("stats", {}).get("mp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA_MAX, translation_key=HabitipySensorEntity.MANA_MAX, - native_unit_of_measurement="MP", value_fn=lambda user, _: user.get("stats", {}).get("maxMP"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE, translation_key=HabitipySensorEntity.EXPERIENCE, - native_unit_of_measurement="XP", value_fn=lambda user, _: user.get("stats", {}).get("exp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE_MAX, translation_key=HabitipySensorEntity.EXPERIENCE_MAX, - native_unit_of_measurement="XP", value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"), ), HabitipySensorEntityDescription( @@ -128,7 +122,6 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.GOLD, translation_key=HabitipySensorEntity.GOLD, - native_unit_of_measurement="GP", suggested_display_precision=2, value_fn=lambda user, _: user.get("stats", {}).get("gp"), ), @@ -144,7 +137,6 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( translation_key=HabitipySensorEntity.GEMS, value_fn=lambda user, _: user.get("balance", 0) * 4, suggested_display_precision=0, - native_unit_of_measurement="gems", entity_picture="shop_gem.png", ), HabitipySensorEntityDescription( @@ -229,20 +221,17 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.HABITS, translation_key=HabitipySensorEntity.HABITS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "habit"], ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.DAILIES, translation_key=HabitipySensorEntity.DAILIES, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "daily"], entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.TODOS, translation_key=HabitipySensorEntity.TODOS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [ r for r in tasks if r.get("type") == "todo" and not r.get("completed") ], @@ -251,7 +240,6 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.REWARDS, translation_key=HabitipySensorEntity.REWARDS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "reward"], ), ) diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 81691327aec..f1b956fe17e 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -2,7 +2,11 @@ "common": { "todos": "To-Do's", "dailies": "Dailies", - "config_entry_name": "Select character" + "config_entry_name": "Select character", + "unit_tasks": "tasks", + "unit_health_points": "HP", + "unit_mana_points": "MP", + "unit_experience_points": "XP" }, "config": { "abort": { @@ -135,31 +139,39 @@ "name": "Display name" }, "health": { - "name": "Health" + "name": "Health", + "unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]" }, "health_max": { - "name": "Max. health" + "name": "Max. health", + "unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]" }, "mana": { - "name": "Mana" + "name": "Mana", + "unit_of_measurement": "[%key:component::habitica::common::unit_mana_points%]" }, "mana_max": { - "name": "Max. mana" + "name": "Max. mana", + "unit_of_measurement": "[%key:component::habitica::common::unit_mana_points%]" }, "experience": { - "name": "Experience" + "name": "Experience", + "unit_of_measurement": "[%key:component::habitica::common::unit_experience_points%]" }, "experience_max": { - "name": "Next level" + "name": "Next level", + "unit_of_measurement": "[%key:component::habitica::common::unit_experience_points%]" }, "level": { "name": "Level" }, "gold": { - "name": "Gold" + "name": "Gold", + "unit_of_measurement": "GP" }, "gems": { - "name": "Gems" + "name": "Gems", + "unit_of_measurement": "gems" }, "trinkets": { "name": "Mystic hourglasses" @@ -174,16 +186,20 @@ } }, "todos": { - "name": "[%key:component::habitica::common::todos%]" + "name": "[%key:component::habitica::common::todos%]", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "dailys": { - "name": "[%key:component::habitica::common::dailies%]" + "name": "[%key:component::habitica::common::dailies%]", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "habits": { - "name": "Habits" + "name": "Habits", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "rewards": { - "name": "Rewards" + "name": "Rewards", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "strength": { "name": "Strength", From 42572770861ef5e72da7f9a229faac89c323f908 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:13:15 +0100 Subject: [PATCH 0017/1198] Add units of measurement to Bring integration (#131763) --- homeassistant/components/bring/const.py | 1 - homeassistant/components/bring/sensor.py | 4 ---- homeassistant/components/bring/strings.json | 9 ++++++--- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/bring/const.py b/homeassistant/components/bring/const.py index d44b7eb9423..911c08a835d 100644 --- a/homeassistant/components/bring/const.py +++ b/homeassistant/components/bring/const.py @@ -9,4 +9,3 @@ ATTR_ITEM_NAME: Final = "item" ATTR_NOTIFICATION_TYPE: Final = "message" SERVICE_PUSH_NOTIFICATION = "send_message" -UNIT_ITEMS = "items" diff --git a/homeassistant/components/bring/sensor.py b/homeassistant/components/bring/sensor.py index 746ed397e1b..eddee46f3bc 100644 --- a/homeassistant/components/bring/sensor.py +++ b/homeassistant/components/bring/sensor.py @@ -20,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import BringConfigEntry -from .const import UNIT_ITEMS from .coordinator import BringData, BringDataUpdateCoordinator from .entity import BringBaseEntity from .util import list_language, sum_attributes @@ -48,19 +47,16 @@ SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = ( key=BringSensor.URGENT, translation_key=BringSensor.URGENT, value_fn=lambda lst, _: sum_attributes(lst, "urgent"), - native_unit_of_measurement=UNIT_ITEMS, ), BringSensorEntityDescription( key=BringSensor.CONVENIENT, translation_key=BringSensor.CONVENIENT, value_fn=lambda lst, _: sum_attributes(lst, "convenient"), - native_unit_of_measurement=UNIT_ITEMS, ), BringSensorEntityDescription( key=BringSensor.DISCOUNTED, translation_key=BringSensor.DISCOUNTED, value_fn=lambda lst, _: sum_attributes(lst, "discounted"), - native_unit_of_measurement=UNIT_ITEMS, ), BringSensorEntityDescription( key=BringSensor.LIST_LANGUAGE, diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index 9a93881b5d2..defed056a3d 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -29,13 +29,16 @@ "entity": { "sensor": { "urgent": { - "name": "Urgent" + "name": "Urgent", + "unit_of_measurement": "items" }, "convenient": { - "name": "On occasion" + "name": "On occasion", + "unit_of_measurement": "items" }, "discounted": { - "name": "Discount only" + "name": "Discount only", + "unit_of_measurement": "items" }, "list_language": { "name": "Region & language", From d26c7a0536983847832352f58a3e877b95dab4e4 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 28 Nov 2024 08:27:24 +0100 Subject: [PATCH 0018/1198] Log warning if via_device reference not exists when creating or updating a device registry entry (#131746) --- homeassistant/helpers/device_registry.py | 11 +++++- tests/helpers/test_device_registry.py | 50 +++++++++++++++++++++++- 2 files changed, 58 insertions(+), 3 deletions(-) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 0e56adc7377..5dfd5d9e8a9 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -38,6 +38,7 @@ from .deprecation import ( check_if_deprecated_constant, dir_with_deprecated_constants, ) +from .frame import ReportBehavior, report_usage from .json import JSON_DUMP, find_paths_unserializable_data, json_bytes, json_fragment from .registry import BaseRegistry, BaseRegistryItems, RegistryIndexType from .singleton import singleton @@ -821,7 +822,15 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): name = default_name if via_device is not None and via_device is not UNDEFINED: - via = self.async_get_device(identifiers={via_device}) + if (via := self.async_get_device(identifiers={via_device})) is None: + report_usage( + "calls `device_registry.async_get_or_create` referencing a " + f"non existing `via_device` {via_device}, " + f"with device info: {device_info}", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.12.0", + ) + via_device_id: str | UndefinedType = via.id if via else UNDEFINED else: via_device_id = UNDEFINED diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 837400d502d..2335b1b93bd 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -1482,7 +1482,9 @@ async def test_removing_area_id( async def test_specifying_via_device_create( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, ) -> None: """Test specifying a via_device and removal of the hub device.""" config_entry_1 = MockConfigEntry() @@ -1513,9 +1515,32 @@ async def test_specifying_via_device_create( light = device_registry.async_get_device(identifiers={("hue", "456")}) assert light.via_device_id is None + # A device with a non existing via_device reference should create + light_via_nonexisting_parent_device = device_registry.async_get_or_create( + config_entry_id=config_entry_2.entry_id, + connections=set(), + identifiers={("hue", "789")}, + manufacturer="manufacturer", + model="light", + via_device=("hue", "non_existing_123"), + ) + assert { + "calls `device_registry.async_get_or_create` " + "referencing a non existing `via_device` " + '("hue","non_existing_123")' in caplog.text + } + assert light_via_nonexisting_parent_device is not None + assert light_via_nonexisting_parent_device.via_device_id is None + nonexisting_parent_device = device_registry.async_get_device( + identifiers={("hue", "non_existing_123")} + ) + assert nonexisting_parent_device is None + async def test_specifying_via_device_update( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, ) -> None: """Test specifying a via_device and updating.""" config_entry_1 = MockConfigEntry() @@ -1529,6 +1554,7 @@ async def test_specifying_via_device_update( identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", + name="Light", via_device=("hue", "0123"), ) @@ -1552,6 +1578,26 @@ async def test_specifying_via_device_update( ) assert light.via_device_id == via.id + assert light.name == "Light" + + # Try updating with a non existing via device + light = device_registry.async_get_or_create( + config_entry_id=config_entry_2.entry_id, + connections=set(), + identifiers={("hue", "456")}, + manufacturer="manufacturer", + model="light", + name="New light", + via_device=("hue", "non_existing_abc"), + ) + assert { + "calls `device_registry.async_get_or_create` " + "referencing a non existing `via_device` " + '("hue","non_existing_123")' in caplog.text + } + # Assert the name was updated correctly + assert light.via_device_id == via.id + assert light.name == "New light" async def test_loading_saving_data( From a831c375110019f3bd1f1140b05483ae38411dcd Mon Sep 17 00:00:00 2001 From: David Knowles Date: Thu, 28 Nov 2024 02:29:15 -0500 Subject: [PATCH 0019/1198] Enable strict typing for Schlage (#131734) --- .strict-typing | 1 + homeassistant/components/schlage/coordinator.py | 4 +--- mypy.ini | 10 ++++++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.strict-typing b/.strict-typing index cb0cab984ee..f1383fa3528 100644 --- a/.strict-typing +++ b/.strict-typing @@ -405,6 +405,7 @@ homeassistant.components.ruuvitag_ble.* homeassistant.components.samsungtv.* homeassistant.components.scene.* homeassistant.components.schedule.* +homeassistant.components.schlage.* homeassistant.components.scrape.* homeassistant.components.script.* homeassistant.components.search.* diff --git a/homeassistant/components/schlage/coordinator.py b/homeassistant/components/schlage/coordinator.py index 53bb43751a9..5d525e3c842 100644 --- a/homeassistant/components/schlage/coordinator.py +++ b/homeassistant/components/schlage/coordinator.py @@ -44,6 +44,7 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): super().__init__( hass, LOGGER, name=f"{DOMAIN} ({username})", update_interval=UPDATE_INTERVAL ) + self.data = SchlageData(locks={}) self.api = api self.new_locks_callbacks: list[Callable[[dict[str, LockData]], None]] = [] self.async_add_listener(self._add_remove_locks) @@ -83,9 +84,6 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): @callback def _add_remove_locks(self) -> None: """Add newly discovered locks and remove nonexistent locks.""" - if self.data is None: - return - device_registry = dr.async_get(self.hass) devices = dr.async_entries_for_config_entry( device_registry, self.config_entry.entry_id diff --git a/mypy.ini b/mypy.ini index a71f980dac9..aa9a2b81095 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3806,6 +3806,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.schlage.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.scrape.*] check_untyped_defs = true disallow_incomplete_defs = true From a0ea9a1e83b496f1028aaf60d7bd4263db5a513f Mon Sep 17 00:00:00 2001 From: David Knowles Date: Thu, 28 Nov 2024 02:29:29 -0500 Subject: [PATCH 0020/1198] Store Schlage runtime data in entry.runtime_data (#131731) --- homeassistant/components/schlage/__init__.py | 14 +++++------- .../components/schlage/binary_sensor.py | 7 +++--- .../components/schlage/diagnostics.py | 8 +++---- homeassistant/components/schlage/lock.py | 7 +++--- homeassistant/components/schlage/select.py | 7 +++--- homeassistant/components/schlage/sensor.py | 3 +-- homeassistant/components/schlage/switch.py | 3 +-- tests/components/schlage/__init__.py | 6 +++++ tests/components/schlage/conftest.py | 8 ++++--- .../components/schlage/test_binary_sensor.py | 7 +++--- tests/components/schlage/test_config_flow.py | 13 +++++------ tests/components/schlage/test_diagnostics.py | 5 +++-- tests/components/schlage/test_init.py | 22 ++++++++++--------- tests/components/schlage/test_lock.py | 11 ++++++---- tests/components/schlage/test_select.py | 7 ++++-- tests/components/schlage/test_sensor.py | 5 +++-- tests/components/schlage/test_switch.py | 11 +++++++--- 17 files changed, 78 insertions(+), 66 deletions(-) diff --git a/homeassistant/components/schlage/__init__.py b/homeassistant/components/schlage/__init__.py index e9fb24f1309..6eae69d9542 100644 --- a/homeassistant/components/schlage/__init__.py +++ b/homeassistant/components/schlage/__init__.py @@ -10,7 +10,6 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN from .coordinator import SchlageDataUpdateCoordinator PLATFORMS: list[Platform] = [ @@ -21,8 +20,10 @@ PLATFORMS: list[Platform] = [ Platform.SWITCH, ] +type SchlageConfigEntry = ConfigEntry[SchlageDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool: """Set up Schlage from a config entry.""" username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] @@ -32,15 +33,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryAuthFailed from ex coordinator = SchlageDataUpdateCoordinator(hass, username, pyschlage.Schlage(auth)) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await coordinator.async_config_entry_first_refresh() await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/schlage/binary_sensor.py b/homeassistant/components/schlage/binary_sensor.py index bc1ee666f9e..f928d42b3ee 100644 --- a/homeassistant/components/schlage/binary_sensor.py +++ b/homeassistant/components/schlage/binary_sensor.py @@ -10,12 +10,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import SchlageConfigEntry from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -40,11 +39,11 @@ _DESCRIPTIONS: tuple[SchlageBinarySensorEntityDescription] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary_sensors based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def _add_new_locks(locks: dict[str, LockData]) -> None: async_add_entities( diff --git a/homeassistant/components/schlage/diagnostics.py b/homeassistant/components/schlage/diagnostics.py index af1bf311676..ec4d9c489e3 100644 --- a/homeassistant/components/schlage/diagnostics.py +++ b/homeassistant/components/schlage/diagnostics.py @@ -4,19 +4,17 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import SchlageDataUpdateCoordinator +from . import SchlageConfigEntry async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # NOTE: Schlage diagnostics are already redacted. return { "locks": [ld.lock.get_diagnostics() for ld in coordinator.data.locks.values()] diff --git a/homeassistant/components/schlage/lock.py b/homeassistant/components/schlage/lock.py index 97dbfc78d41..d203913191d 100644 --- a/homeassistant/components/schlage/lock.py +++ b/homeassistant/components/schlage/lock.py @@ -5,22 +5,21 @@ from __future__ import annotations from typing import Any from homeassistant.components.lock import LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import SchlageConfigEntry from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Schlage WiFi locks based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def _add_new_locks(locks: dict[str, LockData]) -> None: async_add_entities( diff --git a/homeassistant/components/schlage/select.py b/homeassistant/components/schlage/select.py index 6d93eccaa85..6cf0853835f 100644 --- a/homeassistant/components/schlage/select.py +++ b/homeassistant/components/schlage/select.py @@ -3,12 +3,11 @@ from __future__ import annotations from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import SchlageConfigEntry from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -33,11 +32,11 @@ _DESCRIPTIONS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up selects based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def _add_new_locks(locks: dict[str, LockData]) -> None: async_add_entities( diff --git a/homeassistant/components/schlage/sensor.py b/homeassistant/components/schlage/sensor.py index 115412882a2..a15d1740b91 100644 --- a/homeassistant/components/schlage/sensor.py +++ b/homeassistant/components/schlage/sensor.py @@ -13,7 +13,6 @@ from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -34,7 +33,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensors based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def _add_new_locks(locks: dict[str, LockData]) -> None: async_add_entities( diff --git a/homeassistant/components/schlage/switch.py b/homeassistant/components/schlage/switch.py index aaed57fc741..39fe6dbbc99 100644 --- a/homeassistant/components/schlage/switch.py +++ b/homeassistant/components/schlage/switch.py @@ -19,7 +19,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -61,7 +60,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switches based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def _add_new_locks(locks: dict[str, LockData]) -> None: async_add_entities( diff --git a/tests/components/schlage/__init__.py b/tests/components/schlage/__init__.py index c6cd3fec0bc..613621b2fb8 100644 --- a/tests/components/schlage/__init__.py +++ b/tests/components/schlage/__init__.py @@ -1 +1,7 @@ """Tests for the Schlage integration.""" + +from homeassistant.components.schlage.coordinator import SchlageDataUpdateCoordinator + +from tests.common import MockConfigEntry + +type MockSchlageConfigEntry = MockConfigEntry[SchlageDataUpdateCoordinator] diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index f774b8cfb89..6695191dcf0 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -11,11 +11,13 @@ from homeassistant.components.schlage.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry() -> MockSchlageConfigEntry: """Mock ConfigEntry.""" return MockConfigEntry( title="asdf@asdf.com", @@ -31,11 +33,11 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture async def mock_added_config_entry( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, mock_lock: Mock, -) -> MockConfigEntry: +) -> MockSchlageConfigEntry: """Mock ConfigEntry that's been added to HA.""" mock_schlage.locks.return_value = [mock_lock] mock_schlage.users.return_value = [] diff --git a/tests/components/schlage/test_binary_sensor.py b/tests/components/schlage/test_binary_sensor.py index 91bd996ba5b..a073097f755 100644 --- a/tests/components/schlage/test_binary_sensor.py +++ b/tests/components/schlage/test_binary_sensor.py @@ -7,10 +7,11 @@ from freezegun.api import FrozenDateTimeFactory from pyschlage.exceptions import UnknownError from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + from tests.common import async_fire_time_changed @@ -18,7 +19,7 @@ async def test_keypad_disabled_binary_sensor( hass: HomeAssistant, mock_schlage: Mock, mock_lock: Mock, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" @@ -42,7 +43,7 @@ async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( hass: HomeAssistant, mock_schlage: Mock, mock_lock: Mock, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index 7f4a40f9b53..88b5f113863 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.schlage.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from . import MockSchlageConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -95,8 +95,7 @@ async def test_form_unknown(hass: HomeAssistant, mock_pyschlage_auth: Mock) -> N async def test_reauth( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, + mock_added_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, ) -> None: """Test reauth flow.""" @@ -104,8 +103,7 @@ async def test_reauth( await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - [result] = flows + result = flows[-1] assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( @@ -121,12 +119,11 @@ async def test_reauth( "username": "asdf@asdf.com", "password": "new-password", } - assert len(mock_setup_entry.mock_calls) == 1 async def test_reauth_invalid_auth( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock, ) -> None: @@ -154,7 +151,7 @@ async def test_reauth_invalid_auth( async def test_reauth_wrong_account( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock, ) -> None: diff --git a/tests/components/schlage/test_diagnostics.py b/tests/components/schlage/test_diagnostics.py index 15b2316bf38..0b0dc856c1a 100644 --- a/tests/components/schlage/test_diagnostics.py +++ b/tests/components/schlage/test_diagnostics.py @@ -4,7 +4,8 @@ from unittest.mock import Mock from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from . import MockSchlageConfigEntry + from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -12,7 +13,7 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_lock: Mock, ) -> None: """Test Schlage diagnostics.""" diff --git a/tests/components/schlage/test_init.py b/tests/components/schlage/test_init.py index e40fc83a7ac..57a139e582e 100644 --- a/tests/components/schlage/test_init.py +++ b/tests/components/schlage/test_init.py @@ -10,12 +10,14 @@ from pyschlage.lock import Lock from syrupy.assertion import SnapshotAssertion from homeassistant.components.schlage.const import DOMAIN, UPDATE_INTERVAL -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant import homeassistant.helpers.device_registry as dr from homeassistant.helpers.device_registry import DeviceRegistry -from tests.common import MockConfigEntry, async_fire_time_changed +from . import MockSchlageConfigEntry + +from tests.common import async_fire_time_changed @patch( @@ -23,7 +25,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed side_effect=WarrantException, ) async def test_auth_failed( - mock_auth: Mock, hass: HomeAssistant, mock_config_entry: MockConfigEntry + mock_auth: Mock, hass: HomeAssistant, mock_config_entry: MockSchlageConfigEntry ) -> None: """Test failed auth on setup.""" mock_config_entry.add_to_hass(hass) @@ -36,7 +38,7 @@ async def test_auth_failed( async def test_update_data_fails( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -52,7 +54,7 @@ async def test_update_data_fails( async def test_update_data_auth_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -68,7 +70,7 @@ async def test_update_data_auth_error( async def test_update_data_get_logs_auth_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, mock_lock: Mock, @@ -87,7 +89,7 @@ async def test_update_data_get_logs_auth_error( async def test_load_unload_config_entry( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -106,7 +108,7 @@ async def test_load_unload_config_entry( async def test_lock_device_registry( hass: HomeAssistant, device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test lock is added to device registry.""" @@ -117,7 +119,7 @@ async def test_lock_device_registry( async def test_auto_add_device( hass: HomeAssistant, device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_schlage: Mock, mock_lock: Mock, mock_lock_attrs: dict[str, Any], @@ -153,7 +155,7 @@ async def test_auto_add_device( async def test_auto_remove_device( hass: HomeAssistant, device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_schlage: Mock, freezer: FrozenDateTimeFactory, ) -> None: diff --git a/tests/components/schlage/test_lock.py b/tests/components/schlage/test_lock.py index 518c723d581..6a3bb799213 100644 --- a/tests/components/schlage/test_lock.py +++ b/tests/components/schlage/test_lock.py @@ -6,16 +6,17 @@ from unittest.mock import Mock from freezegun.api import FrozenDateTimeFactory from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + from tests.common import async_fire_time_changed async def test_lock_attributes( hass: HomeAssistant, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_schlage: Mock, mock_lock: Mock, freezer: FrozenDateTimeFactory, @@ -38,7 +39,9 @@ async def test_lock_attributes( async def test_lock_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test lock services.""" await hass.services.async_call( @@ -65,7 +68,7 @@ async def test_lock_services( async def test_changed_by( hass: HomeAssistant, mock_lock: Mock, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, freezer: FrozenDateTimeFactory, ) -> None: """Test population of the changed_by attribute.""" diff --git a/tests/components/schlage/test_select.py b/tests/components/schlage/test_select.py index c27fd4c8813..59ff065d449 100644 --- a/tests/components/schlage/test_select.py +++ b/tests/components/schlage/test_select.py @@ -7,13 +7,16 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + async def test_select( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test the auto-lock time select entity.""" entity_id = "select.vault_door_auto_lock_time" diff --git a/tests/components/schlage/test_sensor.py b/tests/components/schlage/test_sensor.py index 9fa90edecbb..9a489f6ff73 100644 --- a/tests/components/schlage/test_sensor.py +++ b/tests/components/schlage/test_sensor.py @@ -1,13 +1,14 @@ """Test schlage sensor.""" from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + async def test_battery_sensor( - hass: HomeAssistant, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, mock_added_config_entry: MockSchlageConfigEntry ) -> None: """Test the battery sensor.""" battery_sensor = hass.states.get("sensor.vault_door_battery") diff --git a/tests/components/schlage/test_switch.py b/tests/components/schlage/test_switch.py index 52b8da81670..fc5acc4399f 100644 --- a/tests/components/schlage/test_switch.py +++ b/tests/components/schlage/test_switch.py @@ -3,13 +3,16 @@ from unittest.mock import Mock from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + async def test_beeper_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test BeeperSwitch services.""" await hass.services.async_call( @@ -35,7 +38,9 @@ async def test_beeper_services( async def test_lock_and_leave_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test LockAndLeaveSwitch services.""" await hass.services.async_call( From 2fcd9be3f2d5ff71d7963571d27a96341b3db8cd Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:48:15 +0100 Subject: [PATCH 0021/1198] Set parallel updates in IronOS integration (#131721) --- homeassistant/components/iron_os/number.py | 2 ++ homeassistant/components/iron_os/update.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index 9230faec1f1..2da80aac327 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -23,6 +23,8 @@ from . import IronOSConfigEntry from .const import DOMAIN, MAX_TEMP, MIN_TEMP from .entity import IronOSBaseEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class IronOSNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/iron_os/update.py b/homeassistant/components/iron_os/update.py index 786ba86f730..0da0786821e 100644 --- a/homeassistant/components/iron_os/update.py +++ b/homeassistant/components/iron_os/update.py @@ -15,6 +15,8 @@ from . import IRON_OS_KEY, IronOSConfigEntry, IronOSLiveDataCoordinator from .coordinator import IronOSFirmwareUpdateCoordinator from .entity import IronOSBaseEntity +PARALLEL_UPDATES = 0 + UPDATE_DESCRIPTION = UpdateEntityDescription( key="firmware", device_class=UpdateDeviceClass.FIRMWARE, From 5972da495a706f62950e79de2961a62764c7883d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 09:18:00 +0100 Subject: [PATCH 0022/1198] Bump samsungtvws to 2.7.1 (#131784) --- homeassistant/components/samsungtv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index d25501b356d..041e9b8fe9b 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -37,7 +37,7 @@ "requirements": [ "getmac==0.9.4", "samsungctl[websocket]==0.7.1", - "samsungtvws[async,encrypted]==2.7.0", + "samsungtvws[async,encrypted]==2.7.1", "wakeonlan==2.1.0", "async-upnp-client==0.41.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index cc6ada55f72..279788e8bbf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2610,7 +2610,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.0 +samsungtvws[async,encrypted]==2.7.1 # homeassistant.components.sanix sanix==1.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 452e6143c34..956ced46bcd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2086,7 +2086,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.0 +samsungtvws[async,encrypted]==2.7.1 # homeassistant.components.sanix sanix==1.0.6 From 717f2ee20646e46b4e282a0cf586f38cb436fafe Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Thu, 28 Nov 2024 09:58:16 +0100 Subject: [PATCH 0023/1198] Bump bimmer_connected to 0.17.0 (#131352) --- homeassistant/components/bmw_connected_drive/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index ed0919a5dcf..d1ca735ce55 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "requirements": ["bimmer-connected[china]==0.16.4"] + "requirements": ["bimmer-connected[china]==0.17.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 279788e8bbf..fc13e72c128 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -582,7 +582,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.17.0 # homeassistant.components.bizkaibus bizkaibus==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 956ced46bcd..0923b497575 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -516,7 +516,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.17.0 # homeassistant.components.eq3btsmart # homeassistant.components.esphome From 28ec8272ee4b1a6d0c74e49be089ff6a2c4e42ef Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 11:05:45 +0100 Subject: [PATCH 0024/1198] Remove deprecated camera constants (#131796) --- homeassistant/components/camera/__init__.py | 14 +-------- homeassistant/components/camera/const.py | 21 ------------- tests/components/camera/test_init.py | 35 +-------------------- 3 files changed, 2 insertions(+), 68 deletions(-) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index 781388f12d6..4d718433fca 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -67,9 +67,7 @@ from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass -from .const import ( # noqa: F401 - _DEPRECATED_STREAM_TYPE_HLS, - _DEPRECATED_STREAM_TYPE_WEB_RTC, +from .const import ( CAMERA_IMAGE_TIMEOUT, CAMERA_STREAM_SOURCE_TIMEOUT, CONF_DURATION, @@ -135,16 +133,6 @@ class CameraEntityFeature(IntFlag): STREAM = 2 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Pleease use the CameraEntityFeature enum instead. -_DEPRECATED_SUPPORT_ON_OFF: Final = DeprecatedConstantEnum( - CameraEntityFeature.ON_OFF, "2025.1" -) -_DEPRECATED_SUPPORT_STREAM: Final = DeprecatedConstantEnum( - CameraEntityFeature.STREAM, "2025.1" -) - - DEFAULT_CONTENT_TYPE: Final = "image/jpeg" ENTITY_IMAGE_URL: Final = "/api/camera_proxy/{0}?token={1}" diff --git a/homeassistant/components/camera/const.py b/homeassistant/components/camera/const.py index 7e4633d410a..65862e66dab 100644 --- a/homeassistant/components/camera/const.py +++ b/homeassistant/components/camera/const.py @@ -3,15 +3,8 @@ from __future__ import annotations from enum import StrEnum -from functools import partial from typing import TYPE_CHECKING, Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: @@ -58,17 +51,3 @@ class StreamType(StrEnum): HLS = "hls" WEB_RTC = "web_rtc" - - -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the StreamType enum instead. -_DEPRECATED_STREAM_TYPE_HLS = DeprecatedConstantEnum(StreamType.HLS, "2025.1") -_DEPRECATED_STREAM_TYPE_WEB_RTC = DeprecatedConstantEnum(StreamType.WEB_RTC, "2025.1") - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index f9d30c240db..32520fcad23 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -802,32 +802,13 @@ async def test_use_stream_for_stills( @pytest.mark.parametrize( "module", - [camera, camera.const], + [camera], ) def test_all(module: ModuleType) -> None: """Test module.__all__ is correctly set.""" help_test_all(module) -@pytest.mark.parametrize( - "enum", - list(camera.const.StreamType), -) -@pytest.mark.parametrize( - "module", - [camera, camera.const], -) -def test_deprecated_stream_type_constants( - caplog: pytest.LogCaptureFixture, - enum: camera.const.StreamType, - module: ModuleType, -) -> None: - """Test deprecated stream type constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "STREAM_TYPE_", "2025.1" - ) - - @pytest.mark.parametrize( "enum", list(camera.const.CameraState), @@ -845,20 +826,6 @@ def test_deprecated_state_constants( import_and_test_deprecated_constant_enum(caplog, module, enum, "STATE_", "2025.10") -@pytest.mark.parametrize( - "entity_feature", - list(camera.CameraEntityFeature), -) -def test_deprecated_support_constants( - caplog: pytest.LogCaptureFixture, - entity_feature: camera.CameraEntityFeature, -) -> None: - """Test deprecated support constants.""" - import_and_test_deprecated_constant_enum( - caplog, camera, entity_feature, "SUPPORT_", "2025.1" - ) - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From be81fd86d3c8094ca2f5f4dd7434032c9037ef66 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 11:06:04 +0100 Subject: [PATCH 0025/1198] Remvove deprecated core constants (#131803) --- homeassistant/core.py | 9 --------- tests/test_core.py | 17 ----------------- 2 files changed, 26 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index f4c819c1262..0640664d64f 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -84,7 +84,6 @@ from .exceptions import ( ) from .helpers.deprecation import ( DeferredDeprecatedAlias, - DeprecatedConstantEnum, EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, @@ -177,14 +176,6 @@ class EventStateReportedData(EventStateEventData): old_last_reported: datetime.datetime -# SOURCE_* are deprecated as of Home Assistant 2022.2, use ConfigSource instead -_DEPRECATED_SOURCE_DISCOVERED = DeprecatedConstantEnum( - ConfigSource.DISCOVERED, "2025.1" -) -_DEPRECATED_SOURCE_STORAGE = DeprecatedConstantEnum(ConfigSource.STORAGE, "2025.1") -_DEPRECATED_SOURCE_YAML = DeprecatedConstantEnum(ConfigSource.YAML, "2025.1") - - def _deprecated_core_config() -> Any: # pylint: disable-next=import-outside-toplevel from . import core_config diff --git a/tests/test_core.py b/tests/test_core.py index df2d916e166..0100c35055e 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -68,7 +68,6 @@ from .common import ( async_mock_service, help_test_all, import_and_test_deprecated_alias, - import_and_test_deprecated_constant_enum, ) PST = dt_util.get_time_zone("America/Los_Angeles") @@ -2980,22 +2979,6 @@ def test_all() -> None: help_test_all(ha) -@pytest.mark.parametrize( - ("enum"), - [ - ha.ConfigSource.DISCOVERED, - ha.ConfigSource.YAML, - ha.ConfigSource.STORAGE, - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: ha.ConfigSource, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, ha, enum, "SOURCE_", "2025.1") - - def test_deprecated_config(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated Config class.""" import_and_test_deprecated_alias(caplog, ha, "Config", Config, "2025.11") From fb152c7d220ec9c7227e9ea495699db1937aee8f Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 11:07:00 +0100 Subject: [PATCH 0026/1198] Remove deprecated automation constants (#131792) --- .../components/automation/__init__.py | 37 +------------------ tests/components/automation/test_init.py | 27 -------------- 2 files changed, 2 insertions(+), 62 deletions(-) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 4fcd8a1416d..bd8af526d75 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -6,7 +6,6 @@ from abc import ABC, abstractmethod import asyncio from collections.abc import Callable, Mapping from dataclasses import dataclass -from functools import partial import logging from typing import Any, Protocol, cast @@ -51,12 +50,6 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError from homeassistant.helpers import condition import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstant, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.issue_registry import ( @@ -86,12 +79,7 @@ from homeassistant.helpers.trace import ( trace_get, trace_path, ) -from homeassistant.helpers.trigger import ( - TriggerActionType, - TriggerData, - TriggerInfo, - async_initialize_triggers, -) +from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.dt import parse_datetime @@ -137,20 +125,6 @@ class IfAction(Protocol): """AND all conditions.""" -# AutomationActionType, AutomationTriggerData, -# and AutomationTriggerInfo are deprecated as of 2022.9. -# Can be removed in 2025.1 -_DEPRECATED_AutomationActionType = DeprecatedConstant( - TriggerActionType, "TriggerActionType", "2025.1" -) -_DEPRECATED_AutomationTriggerData = DeprecatedConstant( - TriggerData, "TriggerData", "2025.1" -) -_DEPRECATED_AutomationTriggerInfo = DeprecatedConstant( - TriggerInfo, "TriggerInfo", "2025.1" -) - - @bind_hass def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Return true if specified automation entity_id is on. @@ -477,6 +451,7 @@ class UnavailableAutomationEntity(BaseAutomationEntity): ) async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() async_delete_issue( self.hass, DOMAIN, f"{self.entity_id}_validation_{self._validation_status}" @@ -1219,11 +1194,3 @@ def websocket_config( "config": automation.raw_config, }, ) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 2bdc0f7516b..98d8bf0396e 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -50,7 +50,6 @@ from homeassistant.helpers.script import ( SCRIPT_MODE_SINGLE, _async_stop_scripts_at_shutdown, ) -from homeassistant.helpers.trigger import TriggerActionType, TriggerData, TriggerInfo from homeassistant.setup import async_setup_component from homeassistant.util import yaml import homeassistant.util.dt as dt_util @@ -62,8 +61,6 @@ from tests.common import ( async_capture_events, async_fire_time_changed, async_mock_service, - help_test_all, - import_and_test_deprecated_constant, mock_restore_cache, ) from tests.components.logbook.common import MockRow, mock_humanify @@ -3153,30 +3150,6 @@ async def test_websocket_config( assert msg["error"]["code"] == "not_found" -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(automation) - - -@pytest.mark.parametrize( - ("constant_name", "replacement"), - [ - ("AutomationActionType", TriggerActionType), - ("AutomationTriggerData", TriggerData), - ("AutomationTriggerInfo", TriggerInfo), - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - constant_name: str, - replacement: Any, -) -> None: - """Test deprecated automation constants.""" - import_and_test_deprecated_constant( - caplog, automation, constant_name, replacement.__name__, replacement, "2025.1" - ) - - async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> None: """Test an automation that turns off another automation.""" hass.set_state(CoreState.not_running) From b28f3529029b55ac5d02b28c50bab49c67cf7e2c Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 11:08:18 +0100 Subject: [PATCH 0027/1198] Remove deprecated binary sensor constants (#131793) --- .../components/binary_sensor/__init__.py | 102 ------------------ tests/components/binary_sensor/test_init.py | 21 ---- 2 files changed, 123 deletions(-) diff --git a/homeassistant/components/binary_sensor/__init__.py b/homeassistant/components/binary_sensor/__init__.py index baf6bf98547..f31c3d102b0 100644 --- a/homeassistant/components/binary_sensor/__init__.py +++ b/homeassistant/components/binary_sensor/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import partial import logging from typing import Literal, final @@ -16,12 +15,6 @@ from homeassistant.const import STATE_OFF, STATE_ON, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -126,94 +119,7 @@ class BinarySensorDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(BinarySensorDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the BinarySensorDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in BinarySensorDeviceClass] -_DEPRECATED_DEVICE_CLASS_BATTERY = DeprecatedConstantEnum( - BinarySensorDeviceClass.BATTERY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BATTERY_CHARGING = DeprecatedConstantEnum( - BinarySensorDeviceClass.BATTERY_CHARGING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CO = DeprecatedConstantEnum( - BinarySensorDeviceClass.CO, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_COLD = DeprecatedConstantEnum( - BinarySensorDeviceClass.COLD, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CONNECTIVITY = DeprecatedConstantEnum( - BinarySensorDeviceClass.CONNECTIVITY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DOOR = DeprecatedConstantEnum( - BinarySensorDeviceClass.DOOR, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GARAGE_DOOR = DeprecatedConstantEnum( - BinarySensorDeviceClass.GARAGE_DOOR, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GAS = DeprecatedConstantEnum( - BinarySensorDeviceClass.GAS, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_HEAT = DeprecatedConstantEnum( - BinarySensorDeviceClass.HEAT, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_LIGHT = DeprecatedConstantEnum( - BinarySensorDeviceClass.LIGHT, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_LOCK = DeprecatedConstantEnum( - BinarySensorDeviceClass.LOCK, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOISTURE = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOISTURE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOTION = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOTION, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOVING = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOVING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_OCCUPANCY = DeprecatedConstantEnum( - BinarySensorDeviceClass.OCCUPANCY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_OPENING = DeprecatedConstantEnum( - BinarySensorDeviceClass.OPENING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PLUG = DeprecatedConstantEnum( - BinarySensorDeviceClass.PLUG, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_POWER = DeprecatedConstantEnum( - BinarySensorDeviceClass.POWER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PRESENCE = DeprecatedConstantEnum( - BinarySensorDeviceClass.PRESENCE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PROBLEM = DeprecatedConstantEnum( - BinarySensorDeviceClass.PROBLEM, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_RUNNING = DeprecatedConstantEnum( - BinarySensorDeviceClass.RUNNING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SAFETY = DeprecatedConstantEnum( - BinarySensorDeviceClass.SAFETY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SMOKE = DeprecatedConstantEnum( - BinarySensorDeviceClass.SMOKE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SOUND = DeprecatedConstantEnum( - BinarySensorDeviceClass.SOUND, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_TAMPER = DeprecatedConstantEnum( - BinarySensorDeviceClass.TAMPER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_UPDATE = DeprecatedConstantEnum( - BinarySensorDeviceClass.UPDATE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_VIBRATION = DeprecatedConstantEnum( - BinarySensorDeviceClass.VIBRATION, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_WINDOW = DeprecatedConstantEnum( - BinarySensorDeviceClass.WINDOW, "2025.1" -) # mypy: disallow-any-generics @@ -294,11 +200,3 @@ class BinarySensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) if (is_on := self.is_on) is None: return None return STATE_ON if is_on else STATE_OFF - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/binary_sensor/test_init.py b/tests/components/binary_sensor/test_init.py index ea0ad05a0db..26b8d919d72 100644 --- a/tests/components/binary_sensor/test_init.py +++ b/tests/components/binary_sensor/test_init.py @@ -17,8 +17,6 @@ from tests.common import ( MockConfigEntry, MockModule, MockPlatform, - help_test_all, - import_and_test_deprecated_constant_enum, mock_config_flow, mock_integration, mock_platform, @@ -198,22 +196,3 @@ async def test_entity_category_config_raises_error( "Entity binary_sensor.test2 cannot be added as the entity category is set to config" in caplog.text ) - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(binary_sensor) - - -@pytest.mark.parametrize( - "device_class", - list(binary_sensor.BinarySensorDeviceClass), -) -def test_deprecated_constant_device_class( - caplog: pytest.LogCaptureFixture, - device_class: binary_sensor.BinarySensorDeviceClass, -) -> None: - """Test deprecated binary sensor device classes.""" - import_and_test_deprecated_constant_enum( - caplog, binary_sensor, device_class, "DEVICE_CLASS_", "2025.1" - ) From fd14add67beb8f3b62c6cc999fc1f34ffb64eff7 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 11:20:44 +0100 Subject: [PATCH 0028/1198] Remove deprecated device registry constants (#131802) --- homeassistant/helpers/device_registry.py | 26 +----------------------- tests/helpers/test_device_registry.py | 22 +------------------- 2 files changed, 2 insertions(+), 46 deletions(-) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 5dfd5d9e8a9..981430f192d 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -6,7 +6,7 @@ from collections import defaultdict from collections.abc import Mapping from datetime import datetime from enum import StrEnum -from functools import lru_cache, partial +from functools import lru_cache import logging import time from typing import TYPE_CHECKING, Any, Literal, TypedDict @@ -32,12 +32,6 @@ import homeassistant.util.uuid as uuid_util from . import storage, translation from .debounce import Debouncer -from .deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from .frame import ReportBehavior, report_usage from .json import JSON_DUMP, find_paths_unserializable_data, json_bytes, json_fragment from .registry import BaseRegistry, BaseRegistryItems, RegistryIndexType @@ -86,16 +80,6 @@ class DeviceEntryDisabler(StrEnum): USER = "user" -# DISABLED_* are deprecated, to be removed in 2022.3 -_DEPRECATED_DISABLED_CONFIG_ENTRY = DeprecatedConstantEnum( - DeviceEntryDisabler.CONFIG_ENTRY, "2025.1" -) -_DEPRECATED_DISABLED_INTEGRATION = DeprecatedConstantEnum( - DeviceEntryDisabler.INTEGRATION, "2025.1" -) -_DEPRECATED_DISABLED_USER = DeprecatedConstantEnum(DeviceEntryDisabler.USER, "2025.1") - - class DeviceInfo(TypedDict, total=False): """Entity device information for device registry.""" @@ -1480,11 +1464,3 @@ def _normalize_connections(connections: set[tuple[str, str]]) -> set[tuple[str, (key, format_mac(value)) if key == CONNECTION_NETWORK_MAC else (key, value) for key, value in connections } - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 2335b1b93bd..cf7bbe7d1e2 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -23,13 +23,7 @@ from homeassistant.helpers import ( ) from homeassistant.util.dt import utcnow -from tests.common import ( - MockConfigEntry, - async_capture_events, - flush_store, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from tests.common import MockConfigEntry, async_capture_events, flush_store @pytest.fixture @@ -2904,20 +2898,6 @@ async def test_loading_invalid_configuration_url_from_storage( assert entry.configuration_url == "invalid" -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(dr) - - -@pytest.mark.parametrize(("enum"), list(dr.DeviceEntryDisabler)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: dr.DeviceEntryDisabler, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, dr, enum, "DISABLED_", "2025.1") - - async def test_removing_labels( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: From 54ff6feadcfb4be0c6450530ffb96ee1f7db5931 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:11:08 +0100 Subject: [PATCH 0029/1198] Remove deprecated alarm control panel constants (#131790) --- .../alarm_control_panel/__init__.py | 26 +--------- .../components/alarm_control_panel/const.py | 42 ---------------- .../alarm_control_panel/test_init.py | 50 ------------------- 3 files changed, 1 insertion(+), 117 deletions(-) diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index 4bcd2adb60f..5bb00360177 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations import asyncio from datetime import timedelta -from functools import partial import logging from typing import TYPE_CHECKING, Any, Final, final @@ -27,11 +26,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import make_entity_service_schema -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -39,15 +33,7 @@ from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType from homeassistant.util.hass_dict import HassKey -from .const import ( # noqa: F401 - _DEPRECATED_FORMAT_NUMBER, - _DEPRECATED_FORMAT_TEXT, - _DEPRECATED_SUPPORT_ALARM_ARM_AWAY, - _DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS, - _DEPRECATED_SUPPORT_ALARM_ARM_HOME, - _DEPRECATED_SUPPORT_ALARM_ARM_NIGHT, - _DEPRECATED_SUPPORT_ALARM_ARM_VACATION, - _DEPRECATED_SUPPORT_ALARM_TRIGGER, +from .const import ( ATTR_CHANGED_BY, ATTR_CODE_ARM_REQUIRED, DOMAIN, @@ -412,13 +398,3 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A self._alarm_control_panel_option_default_code = default_code return self._alarm_control_panel_option_default_code = None - - -# As we import constants of the const module here, we need to add the following -# functions to check for deprecated constants again -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/alarm_control_panel/const.py b/homeassistant/components/alarm_control_panel/const.py index f3218626ead..f9a5887513c 100644 --- a/homeassistant/components/alarm_control_panel/const.py +++ b/homeassistant/components/alarm_control_panel/const.py @@ -1,16 +1,8 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) - DOMAIN: Final = "alarm_control_panel" ATTR_CHANGED_BY: Final = "changed_by" @@ -39,12 +31,6 @@ class CodeFormat(StrEnum): NUMBER = "number" -# These constants are deprecated as of Home Assistant 2022.5, can be removed in 2025.1 -# Please use the CodeFormat enum instead. -_DEPRECATED_FORMAT_TEXT: Final = DeprecatedConstantEnum(CodeFormat.TEXT, "2025.1") -_DEPRECATED_FORMAT_NUMBER: Final = DeprecatedConstantEnum(CodeFormat.NUMBER, "2025.1") - - class AlarmControlPanelEntityFeature(IntFlag): """Supported features of the alarm control panel entity.""" @@ -56,27 +42,6 @@ class AlarmControlPanelEntityFeature(IntFlag): ARM_VACATION = 32 -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the AlarmControlPanelEntityFeature enum instead. -_DEPRECATED_SUPPORT_ALARM_ARM_HOME: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_HOME, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_AWAY: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_AWAY, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_NIGHT, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_TRIGGER: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.TRIGGER, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_VACATION: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_VACATION, "2025.1" -) - CONDITION_TRIGGERED: Final = "is_triggered" CONDITION_DISARMED: Final = "is_disarmed" CONDITION_ARMED_HOME: Final = "is_armed_home" @@ -84,10 +49,3 @@ CONDITION_ARMED_AWAY: Final = "is_armed_away" CONDITION_ARMED_NIGHT: Final = "is_armed_night" CONDITION_ARMED_VACATION: Final = "is_armed_vacation" CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass" - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 58f585b40ea..84d27a96db2 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -1,6 +1,5 @@ """Test for the alarm control panel const module.""" -from types import ModuleType from typing import Any from unittest.mock import patch @@ -33,8 +32,6 @@ from .conftest import MockAlarmControlPanel from tests.common import ( MockConfigEntry, MockModule, - help_test_all, - import_and_test_deprecated_constant_enum, mock_integration, setup_test_component_platform, ) @@ -57,53 +54,6 @@ async def help_test_async_alarm_control_panel_service( await hass.async_block_till_done() -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - "code_format", - list(alarm_control_panel.CodeFormat), -) -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_deprecated_constant_code_format( - caplog: pytest.LogCaptureFixture, - code_format: alarm_control_panel.CodeFormat, - module: ModuleType, -) -> None: - """Test deprecated format constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, code_format, "FORMAT_", "2025.1" - ) - - -@pytest.mark.parametrize( - "entity_feature", - list(alarm_control_panel.AlarmControlPanelEntityFeature), -) -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_deprecated_support_alarm_constants( - caplog: pytest.LogCaptureFixture, - entity_feature: alarm_control_panel.AlarmControlPanelEntityFeature, - module: ModuleType, -) -> None: - """Test deprecated support alarm constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, entity_feature, "SUPPORT_ALARM_", "2025.1" - ) - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From a67045ee6ce4d8d562dad4bf26f3d2c4b37c7aad Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:12:37 +0100 Subject: [PATCH 0030/1198] Remove deprecated home assistant const constants (#131799) --- homeassistant/components/sensor/__init__.py | 28 - homeassistant/const.py | 677 -------------------- tests/components/sensor/test_init.py | 43 -- tests/test_const.py | 157 +---- 4 files changed, 1 insertion(+), 904 deletions(-) diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index f1864458ce8..3c92506a45e 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -17,34 +17,6 @@ from propcache import cached_property from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( # noqa: F401 - _DEPRECATED_DEVICE_CLASS_AQI, - _DEPRECATED_DEVICE_CLASS_BATTERY, - _DEPRECATED_DEVICE_CLASS_CO, - _DEPRECATED_DEVICE_CLASS_CO2, - _DEPRECATED_DEVICE_CLASS_CURRENT, - _DEPRECATED_DEVICE_CLASS_DATE, - _DEPRECATED_DEVICE_CLASS_ENERGY, - _DEPRECATED_DEVICE_CLASS_FREQUENCY, - _DEPRECATED_DEVICE_CLASS_GAS, - _DEPRECATED_DEVICE_CLASS_HUMIDITY, - _DEPRECATED_DEVICE_CLASS_ILLUMINANCE, - _DEPRECATED_DEVICE_CLASS_MONETARY, - _DEPRECATED_DEVICE_CLASS_NITROGEN_DIOXIDE, - _DEPRECATED_DEVICE_CLASS_NITROGEN_MONOXIDE, - _DEPRECATED_DEVICE_CLASS_NITROUS_OXIDE, - _DEPRECATED_DEVICE_CLASS_OZONE, - _DEPRECATED_DEVICE_CLASS_PM1, - _DEPRECATED_DEVICE_CLASS_PM10, - _DEPRECATED_DEVICE_CLASS_PM25, - _DEPRECATED_DEVICE_CLASS_POWER, - _DEPRECATED_DEVICE_CLASS_POWER_FACTOR, - _DEPRECATED_DEVICE_CLASS_PRESSURE, - _DEPRECATED_DEVICE_CLASS_SIGNAL_STRENGTH, - _DEPRECATED_DEVICE_CLASS_SULPHUR_DIOXIDE, - _DEPRECATED_DEVICE_CLASS_TEMPERATURE, - _DEPRECATED_DEVICE_CLASS_TIMESTAMP, - _DEPRECATED_DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS, - _DEPRECATED_DEVICE_CLASS_VOLTAGE, ATTR_UNIT_OF_MEASUREMENT, CONF_UNIT_OF_MEASUREMENT, EntityCategory, diff --git a/homeassistant/const.py b/homeassistant/const.py index 7d17e8f7779..2eb4194ad15 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -336,133 +336,6 @@ EVENT_RECORDER_HOURLY_STATISTICS_GENERATED: Final = ( ) EVENT_SHOPPING_LIST_UPDATED: Final = "shopping_list_updated" -# #### DEVICE CLASSES #### -# DEVICE_CLASS_* below are deprecated as of 2021.12 -# use the SensorDeviceClass enum instead. -_DEPRECATED_DEVICE_CLASS_AQI: Final = DeprecatedConstant( - "aqi", "SensorDeviceClass.AQI", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BATTERY: Final = DeprecatedConstant( - "battery", - "SensorDeviceClass.BATTERY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CO: Final = DeprecatedConstant( - "carbon_monoxide", - "SensorDeviceClass.CO", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CO2: Final = DeprecatedConstant( - "carbon_dioxide", - "SensorDeviceClass.CO2", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CURRENT: Final = DeprecatedConstant( - "current", - "SensorDeviceClass.CURRENT", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_DATE: Final = DeprecatedConstant( - "date", "SensorDeviceClass.DATE", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_ENERGY: Final = DeprecatedConstant( - "energy", - "SensorDeviceClass.ENERGY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_FREQUENCY: Final = DeprecatedConstant( - "frequency", - "SensorDeviceClass.FREQUENCY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_GAS: Final = DeprecatedConstant( - "gas", "SensorDeviceClass.GAS", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_HUMIDITY: Final = DeprecatedConstant( - "humidity", - "SensorDeviceClass.HUMIDITY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_ILLUMINANCE: Final = DeprecatedConstant( - "illuminance", - "SensorDeviceClass.ILLUMINANCE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_MONETARY: Final = DeprecatedConstant( - "monetary", - "SensorDeviceClass.MONETARY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROGEN_DIOXIDE: Final = DeprecatedConstant( - "nitrogen_dioxide", - "SensorDeviceClass.NITROGEN_DIOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROGEN_MONOXIDE: Final = DeprecatedConstant( - "nitrogen_monoxide", - "SensorDeviceClass.NITROGEN_MONOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROUS_OXIDE: Final = DeprecatedConstant( - "nitrous_oxide", - "SensorDeviceClass.NITROUS_OXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_OZONE: Final = DeprecatedConstant( - "ozone", "SensorDeviceClass.OZONE", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM1: Final = DeprecatedConstant( - "pm1", "SensorDeviceClass.PM1", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM10: Final = DeprecatedConstant( - "pm10", "SensorDeviceClass.PM10", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM25: Final = DeprecatedConstant( - "pm25", "SensorDeviceClass.PM25", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_POWER_FACTOR: Final = DeprecatedConstant( - "power_factor", - "SensorDeviceClass.POWER_FACTOR", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_POWER: Final = DeprecatedConstant( - "power", "SensorDeviceClass.POWER", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PRESSURE: Final = DeprecatedConstant( - "pressure", - "SensorDeviceClass.PRESSURE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_SIGNAL_STRENGTH: Final = DeprecatedConstant( - "signal_strength", - "SensorDeviceClass.SIGNAL_STRENGTH", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_SULPHUR_DIOXIDE: Final = DeprecatedConstant( - "sulphur_dioxide", - "SensorDeviceClass.SULPHUR_DIOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_TEMPERATURE: Final = DeprecatedConstant( - "temperature", - "SensorDeviceClass.TEMPERATURE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_TIMESTAMP: Final = DeprecatedConstant( - "timestamp", - "SensorDeviceClass.TIMESTAMP", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS: Final = DeprecatedConstant( - "volatile_organic_compounds", - "SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_VOLTAGE: Final = DeprecatedConstant( - "voltage", - "SensorDeviceClass.VOLTAGE", - "2025.1", -) # #### STATES #### STATE_ON: Final = "on" @@ -712,13 +585,6 @@ class UnitOfApparentPower(StrEnum): VOLT_AMPERE = "VA" -_DEPRECATED_POWER_VOLT_AMPERE: Final = DeprecatedConstantEnum( - UnitOfApparentPower.VOLT_AMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfApparentPower.VOLT_AMPERE.""" - - # Power units class UnitOfPower(StrEnum): """Power units.""" @@ -731,23 +597,6 @@ class UnitOfPower(StrEnum): BTU_PER_HOUR = "BTU/h" -_DEPRECATED_POWER_WATT: Final = DeprecatedConstantEnum( - UnitOfPower.WATT, - "2025.1", -) -"""Deprecated: please use UnitOfPower.WATT.""" -_DEPRECATED_POWER_KILO_WATT: Final = DeprecatedConstantEnum( - UnitOfPower.KILO_WATT, - "2025.1", -) -"""Deprecated: please use UnitOfPower.KILO_WATT.""" -_DEPRECATED_POWER_BTU_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfPower.BTU_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfPower.BTU_PER_HOUR.""" - - # Reactive power units class UnitOfReactivePower(StrEnum): """Reactive power units.""" @@ -781,23 +630,6 @@ class UnitOfEnergy(StrEnum): GIGA_CALORIE = "Gcal" -_DEPRECATED_ENERGY_KILO_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.KILO_WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.KILO_WATT_HOUR.""" -_DEPRECATED_ENERGY_MEGA_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.MEGA_WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.MEGA_WATT_HOUR.""" -_DEPRECATED_ENERGY_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.WATT_HOUR.""" - - # Electric_current units class UnitOfElectricCurrent(StrEnum): """Electric current units.""" @@ -806,18 +638,6 @@ class UnitOfElectricCurrent(StrEnum): AMPERE = "A" -_DEPRECATED_ELECTRIC_CURRENT_MILLIAMPERE: Final = DeprecatedConstantEnum( - UnitOfElectricCurrent.MILLIAMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfElectricCurrent.MILLIAMPERE.""" -_DEPRECATED_ELECTRIC_CURRENT_AMPERE: Final = DeprecatedConstantEnum( - UnitOfElectricCurrent.AMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfElectricCurrent.AMPERE.""" - - # Electric_potential units class UnitOfElectricPotential(StrEnum): """Electric potential units.""" @@ -827,17 +647,6 @@ class UnitOfElectricPotential(StrEnum): VOLT = "V" -_DEPRECATED_ELECTRIC_POTENTIAL_MILLIVOLT: Final = DeprecatedConstantEnum( - UnitOfElectricPotential.MILLIVOLT, - "2025.1", -) -"""Deprecated: please use UnitOfElectricPotential.MILLIVOLT.""" -_DEPRECATED_ELECTRIC_POTENTIAL_VOLT: Final = DeprecatedConstantEnum( - UnitOfElectricPotential.VOLT, - "2025.1", -) -"""Deprecated: please use UnitOfElectricPotential.VOLT.""" - # Degree units DEGREE: Final = "°" @@ -856,23 +665,6 @@ class UnitOfTemperature(StrEnum): KELVIN = "K" -_DEPRECATED_TEMP_CELSIUS: Final = DeprecatedConstantEnum( - UnitOfTemperature.CELSIUS, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.CELSIUS""" -_DEPRECATED_TEMP_FAHRENHEIT: Final = DeprecatedConstantEnum( - UnitOfTemperature.FAHRENHEIT, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.FAHRENHEIT""" -_DEPRECATED_TEMP_KELVIN: Final = DeprecatedConstantEnum( - UnitOfTemperature.KELVIN, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.KELVIN""" - - # Time units class UnitOfTime(StrEnum): """Time units.""" @@ -888,53 +680,6 @@ class UnitOfTime(StrEnum): YEARS = "y" -_DEPRECATED_TIME_MICROSECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.MICROSECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MICROSECONDS.""" -_DEPRECATED_TIME_MILLISECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.MILLISECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MILLISECONDS.""" -_DEPRECATED_TIME_SECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.SECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.SECONDS.""" -_DEPRECATED_TIME_MINUTES: Final = DeprecatedConstantEnum( - UnitOfTime.MINUTES, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MINUTES.""" -_DEPRECATED_TIME_HOURS: Final = DeprecatedConstantEnum( - UnitOfTime.HOURS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.HOURS.""" -_DEPRECATED_TIME_DAYS: Final = DeprecatedConstantEnum( - UnitOfTime.DAYS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.DAYS.""" -_DEPRECATED_TIME_WEEKS: Final = DeprecatedConstantEnum( - UnitOfTime.WEEKS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.WEEKS.""" -_DEPRECATED_TIME_MONTHS: Final = DeprecatedConstantEnum( - UnitOfTime.MONTHS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MONTHS.""" -_DEPRECATED_TIME_YEARS: Final = DeprecatedConstantEnum( - UnitOfTime.YEARS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.YEARS.""" - - # Length units class UnitOfLength(StrEnum): """Length units.""" @@ -950,48 +695,6 @@ class UnitOfLength(StrEnum): NAUTICAL_MILES = "nmi" -_DEPRECATED_LENGTH_MILLIMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.MILLIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.MILLIMETERS.""" -_DEPRECATED_LENGTH_CENTIMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.CENTIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.CENTIMETERS.""" -_DEPRECATED_LENGTH_METERS: Final = DeprecatedConstantEnum( - UnitOfLength.METERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.METERS.""" -_DEPRECATED_LENGTH_KILOMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.KILOMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.KILOMETERS.""" -_DEPRECATED_LENGTH_INCHES: Final = DeprecatedConstantEnum( - UnitOfLength.INCHES, - "2025.1", -) -"""Deprecated: please use UnitOfLength.INCHES.""" -_DEPRECATED_LENGTH_FEET: Final = DeprecatedConstantEnum( - UnitOfLength.FEET, - "2025.1", -) -"""Deprecated: please use UnitOfLength.FEET.""" -_DEPRECATED_LENGTH_YARD: Final = DeprecatedConstantEnum( - UnitOfLength.YARDS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.YARDS.""" -_DEPRECATED_LENGTH_MILES: Final = DeprecatedConstantEnum( - UnitOfLength.MILES, - "2025.1", -) -"""Deprecated: please use UnitOfLength.MILES.""" - - # Frequency units class UnitOfFrequency(StrEnum): """Frequency units.""" @@ -1002,28 +705,6 @@ class UnitOfFrequency(StrEnum): GIGAHERTZ = "GHz" -_DEPRECATED_FREQUENCY_HERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.HERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.HERTZ""" -_DEPRECATED_FREQUENCY_KILOHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.KILOHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.KILOHERTZ""" -_DEPRECATED_FREQUENCY_MEGAHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.MEGAHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.MEGAHERTZ""" -_DEPRECATED_FREQUENCY_GIGAHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.GIGAHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.GIGAHERTZ""" - - # Pressure units class UnitOfPressure(StrEnum): """Pressure units.""" @@ -1039,53 +720,6 @@ class UnitOfPressure(StrEnum): PSI = "psi" -_DEPRECATED_PRESSURE_PA: Final = DeprecatedConstantEnum( - UnitOfPressure.PA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.PA""" -_DEPRECATED_PRESSURE_HPA: Final = DeprecatedConstantEnum( - UnitOfPressure.HPA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.HPA""" -_DEPRECATED_PRESSURE_KPA: Final = DeprecatedConstantEnum( - UnitOfPressure.KPA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.KPA""" -_DEPRECATED_PRESSURE_BAR: Final = DeprecatedConstantEnum( - UnitOfPressure.BAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.BAR""" -_DEPRECATED_PRESSURE_CBAR: Final = DeprecatedConstantEnum( - UnitOfPressure.CBAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.CBAR""" -_DEPRECATED_PRESSURE_MBAR: Final = DeprecatedConstantEnum( - UnitOfPressure.MBAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.MBAR""" -_DEPRECATED_PRESSURE_MMHG: Final = DeprecatedConstantEnum( - UnitOfPressure.MMHG, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.MMHG""" -_DEPRECATED_PRESSURE_INHG: Final = DeprecatedConstantEnum( - UnitOfPressure.INHG, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.INHG""" -_DEPRECATED_PRESSURE_PSI: Final = DeprecatedConstantEnum( - UnitOfPressure.PSI, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.PSI""" - - # Sound pressure units class UnitOfSoundPressure(StrEnum): """Sound pressure units.""" @@ -1094,18 +728,6 @@ class UnitOfSoundPressure(StrEnum): WEIGHTED_DECIBEL_A = "dBA" -_DEPRECATED_SOUND_PRESSURE_DB: Final = DeprecatedConstantEnum( - UnitOfSoundPressure.DECIBEL, - "2025.1", -) -"""Deprecated: please use UnitOfSoundPressure.DECIBEL""" -_DEPRECATED_SOUND_PRESSURE_WEIGHTED_DBA: Final = DeprecatedConstantEnum( - UnitOfSoundPressure.WEIGHTED_DECIBEL_A, - "2025.1", -) -"""Deprecated: please use UnitOfSoundPressure.WEIGHTED_DECIBEL_A""" - - # Volume units class UnitOfVolume(StrEnum): """Volume units.""" @@ -1125,39 +747,6 @@ class UnitOfVolume(StrEnum): British/Imperial fluid ounces are not yet supported""" -_DEPRECATED_VOLUME_LITERS: Final = DeprecatedConstantEnum( - UnitOfVolume.LITERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.LITERS""" -_DEPRECATED_VOLUME_MILLILITERS: Final = DeprecatedConstantEnum( - UnitOfVolume.MILLILITERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.MILLILITERS""" -_DEPRECATED_VOLUME_CUBIC_METERS: Final = DeprecatedConstantEnum( - UnitOfVolume.CUBIC_METERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.CUBIC_METERS""" -_DEPRECATED_VOLUME_CUBIC_FEET: Final = DeprecatedConstantEnum( - UnitOfVolume.CUBIC_FEET, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.CUBIC_FEET""" - -_DEPRECATED_VOLUME_GALLONS: Final = DeprecatedConstantEnum( - UnitOfVolume.GALLONS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.GALLONS""" -_DEPRECATED_VOLUME_FLUID_OUNCE: Final = DeprecatedConstantEnum( - UnitOfVolume.FLUID_OUNCES, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.FLUID_OUNCES""" - - # Volume Flow Rate units class UnitOfVolumeFlowRate(StrEnum): """Volume flow rate units.""" @@ -1169,18 +758,6 @@ class UnitOfVolumeFlowRate(StrEnum): MILLILITERS_PER_SECOND = "mL/s" -_DEPRECATED_VOLUME_FLOW_RATE_CUBIC_METERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR""" -_DEPRECATED_VOLUME_FLOW_RATE_CUBIC_FEET_PER_MINUTE: Final = DeprecatedConstantEnum( - UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - "2025.1", -) -"""Deprecated: please use UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE""" - - class UnitOfArea(StrEnum): """Area units.""" @@ -1216,38 +793,6 @@ class UnitOfMass(StrEnum): STONES = "st" -_DEPRECATED_MASS_GRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.GRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.GRAMS""" -_DEPRECATED_MASS_KILOGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.KILOGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.KILOGRAMS""" -_DEPRECATED_MASS_MILLIGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.MILLIGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.MILLIGRAMS""" -_DEPRECATED_MASS_MICROGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.MICROGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.MICROGRAMS""" -_DEPRECATED_MASS_OUNCES: Final = DeprecatedConstantEnum( - UnitOfMass.OUNCES, - "2025.1", -) -"""Deprecated: please use UnitOfMass.OUNCES""" -_DEPRECATED_MASS_POUNDS: Final = DeprecatedConstantEnum( - UnitOfMass.POUNDS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.POUNDS""" - - class UnitOfConductivity( StrEnum, metaclass=EnumWithDeprecatedMembers, @@ -1299,19 +844,6 @@ class UnitOfIrradiance(StrEnum): BTUS_PER_HOUR_SQUARE_FOOT = "BTU/(h⋅ft²)" -# Irradiation units -_DEPRECATED_IRRADIATION_WATTS_PER_SQUARE_METER: Final = DeprecatedConstantEnum( - UnitOfIrradiance.WATTS_PER_SQUARE_METER, - "2025.1", -) -"""Deprecated: please use UnitOfIrradiance.WATTS_PER_SQUARE_METER""" -_DEPRECATED_IRRADIATION_BTUS_PER_HOUR_SQUARE_FOOT: Final = DeprecatedConstantEnum( - UnitOfIrradiance.BTUS_PER_HOUR_SQUARE_FOOT, - "2025.1", -) -"""Deprecated: please use UnitOfIrradiance.BTUS_PER_HOUR_SQUARE_FOOT""" - - class UnitOfVolumetricFlux(StrEnum): """Volumetric flux, commonly used for precipitation intensity. @@ -1349,27 +881,6 @@ class UnitOfPrecipitationDepth(StrEnum): """Derived from cm³/cm²""" -# Precipitation units -_DEPRECATED_PRECIPITATION_INCHES: Final = DeprecatedConstantEnum( - UnitOfPrecipitationDepth.INCHES, "2025.1" -) -"""Deprecated: please use UnitOfPrecipitationDepth.INCHES""" -_DEPRECATED_PRECIPITATION_MILLIMETERS: Final = DeprecatedConstantEnum( - UnitOfPrecipitationDepth.MILLIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfPrecipitationDepth.MILLIMETERS""" -_DEPRECATED_PRECIPITATION_MILLIMETERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR""" -_DEPRECATED_PRECIPITATION_INCHES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_HOUR""" - # Concentration units CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: Final = "µg/m³" CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: Final = "mg/m³" @@ -1400,45 +911,6 @@ class UnitOfSpeed(StrEnum): MILLIMETERS_PER_SECOND = "mm/s" -_DEPRECATED_SPEED_FEET_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfSpeed.FEET_PER_SECOND, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.FEET_PER_SECOND""" -_DEPRECATED_SPEED_METERS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfSpeed.METERS_PER_SECOND, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.METERS_PER_SECOND""" -_DEPRECATED_SPEED_KILOMETERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfSpeed.KILOMETERS_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfSpeed.KILOMETERS_PER_HOUR""" -_DEPRECATED_SPEED_KNOTS: Final = DeprecatedConstantEnum(UnitOfSpeed.KNOTS, "2025.1") -"""Deprecated: please use UnitOfSpeed.KNOTS""" -_DEPRECATED_SPEED_MILES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfSpeed.MILES_PER_HOUR, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.MILES_PER_HOUR""" - -_DEPRECATED_SPEED_MILLIMETERS_PER_DAY: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.MILLIMETERS_PER_DAY""" - -_DEPRECATED_SPEED_INCHES_PER_DAY: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_DAY, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_DAY""" - -_DEPRECATED_SPEED_INCHES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_HOUR""" - - # Signal_strength units SIGNAL_STRENGTH_DECIBELS: Final = "dB" SIGNAL_STRENGTH_DECIBELS_MILLIWATT: Final = "dBm" @@ -1471,90 +943,6 @@ class UnitOfInformation(StrEnum): YOBIBYTES = "YiB" -_DEPRECATED_DATA_BITS: Final = DeprecatedConstantEnum(UnitOfInformation.BITS, "2025.1") -"""Deprecated: please use UnitOfInformation.BITS""" -_DEPRECATED_DATA_KILOBITS: Final = DeprecatedConstantEnum( - UnitOfInformation.KILOBITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KILOBITS""" -_DEPRECATED_DATA_MEGABITS: Final = DeprecatedConstantEnum( - UnitOfInformation.MEGABITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEGABITS""" -_DEPRECATED_DATA_GIGABITS: Final = DeprecatedConstantEnum( - UnitOfInformation.GIGABITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIGABITS""" -_DEPRECATED_DATA_BYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.BYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.BYTES""" -_DEPRECATED_DATA_KILOBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.KILOBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KILOBYTES""" -_DEPRECATED_DATA_MEGABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.MEGABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEGABYTES""" -_DEPRECATED_DATA_GIGABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.GIGABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIGABYTES""" -_DEPRECATED_DATA_TERABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.TERABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.TERABYTES""" -_DEPRECATED_DATA_PETABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.PETABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.PETABYTES""" -_DEPRECATED_DATA_EXABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.EXABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.EXABYTES""" -_DEPRECATED_DATA_ZETTABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.ZETTABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.ZETTABYTES""" -_DEPRECATED_DATA_YOTTABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.YOTTABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.YOTTABYTES""" -_DEPRECATED_DATA_KIBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.KIBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KIBIBYTES""" -_DEPRECATED_DATA_MEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.MEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEBIBYTES""" -_DEPRECATED_DATA_GIBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.GIBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIBIBYTES""" -_DEPRECATED_DATA_TEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.TEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.TEBIBYTES""" -_DEPRECATED_DATA_PEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.PEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.PEBIBYTES""" -_DEPRECATED_DATA_EXBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.EXBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.EXBIBYTES""" -_DEPRECATED_DATA_ZEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.ZEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.ZEBIBYTES""" -_DEPRECATED_DATA_YOBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.YOBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.YOBIBYTES""" - - # Data_rate units class UnitOfDataRate(StrEnum): """Data rate units.""" @@ -1572,63 +960,6 @@ class UnitOfDataRate(StrEnum): GIBIBYTES_PER_SECOND = "GiB/s" -_DEPRECATED_DATA_RATE_BITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.BITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.BITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_KILOBITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KILOBITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KILOBITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEGABITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEGABITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEGABITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIGABITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIGABITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIGABITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_BYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.BYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.BYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_KILOBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KILOBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KILOBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEGABYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEGABYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEGABYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIGABYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIGABYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIGABYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_KIBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KIBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KIBIBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEBIBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIBIBYTES_PER_SECOND""" - - # States COMPRESSED_STATE_STATE: Final = "s" COMPRESSED_STATE_ATTRIBUTES: Final = "a" @@ -1762,14 +1093,6 @@ class EntityCategory(StrEnum): DIAGNOSTIC = "diagnostic" -# ENTITY_CATEGOR* below are deprecated as of 2021.12 -# use the EntityCategory enum instead. -_DEPRECATED_ENTITY_CATEGORY_CONFIG: Final = DeprecatedConstantEnum( - EntityCategory.CONFIG, "2025.1" -) -_DEPRECATED_ENTITY_CATEGORY_DIAGNOSTIC: Final = DeprecatedConstantEnum( - EntityCategory.DIAGNOSTIC, "2025.1" -) ENTITY_CATEGORIES: Final[list[str]] = [cls.value for cls in EntityCategory] # The ID of the Home Assistant Media Player Cast App diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 19c25d819b6..6c2d73cb68c 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -2665,49 +2665,6 @@ def test_deprecated_constants( ) -@pytest.mark.parametrize( - ("enum"), - [ - sensor.SensorDeviceClass.AQI, - sensor.SensorDeviceClass.BATTERY, - sensor.SensorDeviceClass.CO, - sensor.SensorDeviceClass.CO2, - sensor.SensorDeviceClass.CURRENT, - sensor.SensorDeviceClass.DATE, - sensor.SensorDeviceClass.ENERGY, - sensor.SensorDeviceClass.FREQUENCY, - sensor.SensorDeviceClass.GAS, - sensor.SensorDeviceClass.HUMIDITY, - sensor.SensorDeviceClass.ILLUMINANCE, - sensor.SensorDeviceClass.MONETARY, - sensor.SensorDeviceClass.NITROGEN_DIOXIDE, - sensor.SensorDeviceClass.NITROGEN_MONOXIDE, - sensor.SensorDeviceClass.NITROUS_OXIDE, - sensor.SensorDeviceClass.OZONE, - sensor.SensorDeviceClass.PM1, - sensor.SensorDeviceClass.PM10, - sensor.SensorDeviceClass.PM25, - sensor.SensorDeviceClass.POWER_FACTOR, - sensor.SensorDeviceClass.POWER, - sensor.SensorDeviceClass.PRESSURE, - sensor.SensorDeviceClass.SIGNAL_STRENGTH, - sensor.SensorDeviceClass.SULPHUR_DIOXIDE, - sensor.SensorDeviceClass.TEMPERATURE, - sensor.SensorDeviceClass.TIMESTAMP, - sensor.SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - sensor.SensorDeviceClass.VOLTAGE, - ], -) -def test_deprecated_constants_sensor_device_class( - caplog: pytest.LogCaptureFixture, - enum: sensor.SensorStateClass, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, sensor, enum, "DEVICE_CLASS_", "2025.1" - ) - - @pytest.mark.parametrize( ("device_class", "native_unit"), [ diff --git a/tests/test_const.py b/tests/test_const.py index ca598de39e1..a039545a004 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -8,7 +8,7 @@ from unittest.mock import Mock, patch import pytest from homeassistant import const -from homeassistant.components import alarm_control_panel, lock, sensor +from homeassistant.components import alarm_control_panel, lock from .common import ( extract_stack_to_frame, @@ -29,164 +29,9 @@ def test_all() -> None: help_test_all(const) -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(const.EntityCategory, "ENTITY_CATEGORY_") - + _create_tuples( - [ - sensor.SensorDeviceClass.AQI, - sensor.SensorDeviceClass.BATTERY, - sensor.SensorDeviceClass.CO, - sensor.SensorDeviceClass.CO2, - sensor.SensorDeviceClass.CURRENT, - sensor.SensorDeviceClass.DATE, - sensor.SensorDeviceClass.ENERGY, - sensor.SensorDeviceClass.FREQUENCY, - sensor.SensorDeviceClass.GAS, - sensor.SensorDeviceClass.HUMIDITY, - sensor.SensorDeviceClass.ILLUMINANCE, - sensor.SensorDeviceClass.MONETARY, - sensor.SensorDeviceClass.NITROGEN_DIOXIDE, - sensor.SensorDeviceClass.NITROGEN_MONOXIDE, - sensor.SensorDeviceClass.NITROUS_OXIDE, - sensor.SensorDeviceClass.OZONE, - sensor.SensorDeviceClass.PM1, - sensor.SensorDeviceClass.PM10, - sensor.SensorDeviceClass.PM25, - sensor.SensorDeviceClass.POWER_FACTOR, - sensor.SensorDeviceClass.POWER, - sensor.SensorDeviceClass.PRESSURE, - sensor.SensorDeviceClass.SIGNAL_STRENGTH, - sensor.SensorDeviceClass.SULPHUR_DIOXIDE, - sensor.SensorDeviceClass.TEMPERATURE, - sensor.SensorDeviceClass.TIMESTAMP, - sensor.SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - sensor.SensorDeviceClass.VOLTAGE, - ], - "DEVICE_CLASS_", - ) - + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples( - [ - const.UnitOfPower.WATT, - const.UnitOfPower.KILO_WATT, - const.UnitOfPower.BTU_PER_HOUR, - ], - "POWER_", - ) - + _create_tuples( - [ - const.UnitOfEnergy.KILO_WATT_HOUR, - const.UnitOfEnergy.MEGA_WATT_HOUR, - const.UnitOfEnergy.WATT_HOUR, - ], - "ENERGY_", - ) - + _create_tuples(const.UnitOfElectricCurrent, "ELECTRIC_CURRENT_") - + _create_tuples( - [ - const.UnitOfElectricPotential.MILLIVOLT, - const.UnitOfElectricPotential.VOLT, - ], - "ELECTRIC_POTENTIAL_", - ) - + _create_tuples(const.UnitOfTemperature, "TEMP_") - + _create_tuples(const.UnitOfTime, "TIME_") - + _create_tuples( - [ - const.UnitOfLength.MILLIMETERS, - const.UnitOfLength.CENTIMETERS, - const.UnitOfLength.METERS, - const.UnitOfLength.KILOMETERS, - const.UnitOfLength.INCHES, - const.UnitOfLength.FEET, - const.UnitOfLength.MILES, - ], - "LENGTH_", - ) - + _create_tuples(const.UnitOfFrequency, "FREQUENCY_") - + _create_tuples(const.UnitOfPressure, "PRESSURE_") - + _create_tuples( - [ - const.UnitOfVolume.CUBIC_FEET, - const.UnitOfVolume.CUBIC_METERS, - const.UnitOfVolume.LITERS, - const.UnitOfVolume.MILLILITERS, - const.UnitOfVolume.GALLONS, - ], - "VOLUME_", - ) - + _create_tuples( - [ - const.UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - const.UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - ], - "VOLUME_FLOW_RATE_", - ) - + _create_tuples( - [ - const.UnitOfMass.GRAMS, - const.UnitOfMass.KILOGRAMS, - const.UnitOfMass.MILLIGRAMS, - const.UnitOfMass.MICROGRAMS, - const.UnitOfMass.OUNCES, - const.UnitOfMass.POUNDS, - ], - "MASS_", - ) - + _create_tuples(const.UnitOfIrradiance, "IRRADIATION_") - + _create_tuples( - [ - const.UnitOfPrecipitationDepth.INCHES, - const.UnitOfPrecipitationDepth.MILLIMETERS, - const.UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, - const.UnitOfVolumetricFlux.INCHES_PER_HOUR, - ], - "PRECIPITATION_", - ) - + _create_tuples( - [ - const.UnitOfSpeed.FEET_PER_SECOND, - const.UnitOfSpeed.METERS_PER_SECOND, - const.UnitOfSpeed.KILOMETERS_PER_HOUR, - const.UnitOfSpeed.KNOTS, - const.UnitOfSpeed.MILES_PER_HOUR, - ], - "SPEED_", - ) - + _create_tuples( - [ - const.UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, - const.UnitOfVolumetricFlux.INCHES_PER_DAY, - const.UnitOfVolumetricFlux.INCHES_PER_HOUR, - ], - "SPEED_", - ) - + _create_tuples(const.UnitOfInformation, "DATA_") - + _create_tuples(const.UnitOfDataRate, "DATA_RATE_"), -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, const, enum, constant_prefix, "2025.1" - ) - - @pytest.mark.parametrize( ("replacement", "constant_name", "breaks_in_version"), [ - (const.UnitOfLength.YARDS, "LENGTH_YARD", "2025.1"), - (const.UnitOfSoundPressure.DECIBEL, "SOUND_PRESSURE_DB", "2025.1"), - ( - const.UnitOfSoundPressure.WEIGHTED_DECIBEL_A, - "SOUND_PRESSURE_WEIGHTED_DBA", - "2025.1", - ), - (const.UnitOfVolume.FLUID_OUNCES, "VOLUME_FLUID_OUNCE", "2025.1"), (const.UnitOfArea.SQUARE_METERS, "AREA_SQUARE_METERS", "2025.12"), ], ) From 3866176e1d13440630cc8e5e4989b90359f2a523 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:13:03 +0100 Subject: [PATCH 0031/1198] Remove deprecated water heater constants (#131805) --- .../components/water_heater/__init__.py | 26 ------------------- tests/components/water_heater/test_init.py | 26 ------------------- 2 files changed, 52 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 4bfe1ce4481..dbd697f2367 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -25,12 +25,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp @@ -70,18 +64,6 @@ class WaterHeaterEntityFeature(IntFlag): ON_OFF = 8 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the WaterHeaterEntityFeature enum instead. -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.TARGET_TEMPERATURE, "2025.1" -) -_DEPRECATED_SUPPORT_OPERATION_MODE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.OPERATION_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_AWAY_MODE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.AWAY_MODE, "2025.1" -) - ATTR_MAX_TEMP = "max_temp" ATTR_MIN_TEMP = "min_temp" ATTR_AWAY_MODE = "away_mode" @@ -437,11 +419,3 @@ async def async_service_temperature_set( kwargs[value] = temp await entity.async_set_temperature(**kwargs) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 4e0f860366c..0c5651058ed 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -8,7 +8,6 @@ from unittest.mock import AsyncMock, MagicMock import pytest import voluptuous as vol -from homeassistant.components import water_heater from homeassistant.components.water_heater import ( ATTR_OPERATION_LIST, ATTR_OPERATION_MODE, @@ -30,8 +29,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_service, - help_test_all, - import_and_test_deprecated_constant_enum, mock_integration, mock_platform, ) @@ -211,29 +208,6 @@ async def test_operation_mode_validation( water_heater_entity.set_operation_mode.assert_has_calls([mock.call("eco")]) -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(water_heater) - - -@pytest.mark.parametrize( - ("enum"), - [ - WaterHeaterEntityFeature.TARGET_TEMPERATURE, - WaterHeaterEntityFeature.OPERATION_MODE, - WaterHeaterEntityFeature.AWAY_MODE, - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: WaterHeaterEntityFeature, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, water_heater, enum, "SUPPORT_", "2025.1" - ) - - def test_deprecated_supported_features_ints( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: From c5f68bcc58e8f1034bad2536e9c9b4c5fe9e8558 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:14:06 +0100 Subject: [PATCH 0032/1198] Remove deprecated remote constants (#131809) --- homeassistant/components/remote/__init__.py | 27 --------------------- tests/components/remote/test_init.py | 20 +-------------- 2 files changed, 1 insertion(+), 46 deletions(-) diff --git a/homeassistant/components/remote/__init__.py b/homeassistant/components/remote/__init__.py index 6a007bde0b4..9c54a40be70 100644 --- a/homeassistant/components/remote/__init__.py +++ b/homeassistant/components/remote/__init__.py @@ -22,12 +22,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -74,19 +68,6 @@ class RemoteEntityFeature(IntFlag): ACTIVITY = 4 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the RemoteEntityFeature enum instead. -_DEPRECATED_SUPPORT_LEARN_COMMAND = DeprecatedConstantEnum( - RemoteEntityFeature.LEARN_COMMAND, "2025.1" -) -_DEPRECATED_SUPPORT_DELETE_COMMAND = DeprecatedConstantEnum( - RemoteEntityFeature.DELETE_COMMAND, "2025.1" -) -_DEPRECATED_SUPPORT_ACTIVITY = DeprecatedConstantEnum( - RemoteEntityFeature.ACTIVITY, "2025.1" -) - - REMOTE_SERVICE_ACTIVITY_SCHEMA = cv.make_entity_service_schema( {vol.Optional(ATTR_ACTIVITY): cv.string} ) @@ -251,11 +232,3 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) await self.hass.async_add_executor_job( ft.partial(self.delete_command, **kwargs) ) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/remote/test_init.py b/tests/components/remote/test_init.py index 575e69015fe..27219788906 100644 --- a/tests/components/remote/test_init.py +++ b/tests/components/remote/test_init.py @@ -23,11 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from tests.common import ( - async_mock_service, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from tests.common import async_mock_service TEST_PLATFORM = {DOMAIN: {CONF_PLATFORM: "test"}} SERVICE_SEND_COMMAND = "send_command" @@ -148,20 +144,6 @@ async def test_delete_command(hass: HomeAssistant) -> None: assert call.data[ATTR_ENTITY_ID] == ENTITY_ID -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(remote) - - -@pytest.mark.parametrize(("enum"), list(remote.RemoteEntityFeature)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: remote.RemoteEntityFeature, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, remote, enum, "SUPPORT_", "2025.1") - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From 4d27a32905eb2e543846eef51c37e125b2b128c4 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:14:25 +0100 Subject: [PATCH 0033/1198] Remove deprecated cover constants (#131797) --- homeassistant/components/cover/__init__.py | 51 +--------------------- tests/components/cover/test_init.py | 22 +--------- 2 files changed, 2 insertions(+), 71 deletions(-) diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index ea11761a753..001bff51991 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -89,36 +89,8 @@ class CoverDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(CoverDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the CoverDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in CoverDeviceClass] -_DEPRECATED_DEVICE_CLASS_AWNING = DeprecatedConstantEnum( - CoverDeviceClass.AWNING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BLIND = DeprecatedConstantEnum( - CoverDeviceClass.BLIND, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CURTAIN = DeprecatedConstantEnum( - CoverDeviceClass.CURTAIN, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DAMPER = DeprecatedConstantEnum( - CoverDeviceClass.DAMPER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DOOR = DeprecatedConstantEnum(CoverDeviceClass.DOOR, "2025.1") -_DEPRECATED_DEVICE_CLASS_GARAGE = DeprecatedConstantEnum( - CoverDeviceClass.GARAGE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GATE = DeprecatedConstantEnum(CoverDeviceClass.GATE, "2025.1") -_DEPRECATED_DEVICE_CLASS_SHADE = DeprecatedConstantEnum( - CoverDeviceClass.SHADE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SHUTTER = DeprecatedConstantEnum( - CoverDeviceClass.SHUTTER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_WINDOW = DeprecatedConstantEnum( - CoverDeviceClass.WINDOW, "2025.1" -) + # mypy: disallow-any-generics @@ -136,27 +108,6 @@ class CoverEntityFeature(IntFlag): SET_TILT_POSITION = 128 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the CoverEntityFeature enum instead. -_DEPRECATED_SUPPORT_OPEN = DeprecatedConstantEnum(CoverEntityFeature.OPEN, "2025.1") -_DEPRECATED_SUPPORT_CLOSE = DeprecatedConstantEnum(CoverEntityFeature.CLOSE, "2025.1") -_DEPRECATED_SUPPORT_SET_POSITION = DeprecatedConstantEnum( - CoverEntityFeature.SET_POSITION, "2025.1" -) -_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum(CoverEntityFeature.STOP, "2025.1") -_DEPRECATED_SUPPORT_OPEN_TILT = DeprecatedConstantEnum( - CoverEntityFeature.OPEN_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_CLOSE_TILT = DeprecatedConstantEnum( - CoverEntityFeature.CLOSE_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_STOP_TILT = DeprecatedConstantEnum( - CoverEntityFeature.STOP_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_SET_TILT_POSITION = DeprecatedConstantEnum( - CoverEntityFeature.SET_TILT_POSITION, "2025.1" -) - ATTR_CURRENT_POSITION = "current_position" ATTR_CURRENT_TILT_POSITION = "current_tilt_position" ATTR_POSITION = "position" diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 6b80dd1ab9a..646c44e4ac2 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -13,11 +13,7 @@ from homeassistant.setup import async_setup_component from .common import MockCover -from tests.common import ( - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import help_test_all, setup_test_component_platform async def test_services( @@ -161,22 +157,6 @@ def test_all() -> None: help_test_all(cover) -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(cover.CoverEntityFeature, "SUPPORT_") - + _create_tuples(cover.CoverDeviceClass, "DEVICE_CLASS_"), -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, cover, enum, constant_prefix, "2025.1" - ) - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From 3e0326dd66dd97e4f9977af2e23937426e706e3e Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:14:43 +0100 Subject: [PATCH 0034/1198] Remove deprecated siren constants (#131807) --- homeassistant/components/siren/__init__.py | 23 +-------------- homeassistant/components/siren/const.py | 34 ---------------------- tests/components/siren/test_init.py | 23 --------------- 3 files changed, 1 insertion(+), 79 deletions(-) diff --git a/homeassistant/components/siren/__init__.py b/homeassistant/components/siren/__init__.py index 91456d6fa3b..8fab0dfe96d 100644 --- a/homeassistant/components/siren/__init__.py +++ b/homeassistant/components/siren/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations from datetime import timedelta -from functools import partial import logging from typing import Any, TypedDict, cast, final @@ -14,22 +13,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.util.hass_dict import HassKey -from .const import ( # noqa: F401 - _DEPRECATED_SUPPORT_DURATION, - _DEPRECATED_SUPPORT_TONES, - _DEPRECATED_SUPPORT_TURN_OFF, - _DEPRECATED_SUPPORT_TURN_ON, - _DEPRECATED_SUPPORT_VOLUME_SET, +from .const import ( ATTR_AVAILABLE_TONES, ATTR_DURATION, ATTR_TONE, @@ -208,13 +197,3 @@ class SirenEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self._report_deprecated_supported_features_values(new_features) return new_features return features - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/siren/const.py b/homeassistant/components/siren/const.py index 9e46d8dc997..26a158bd8ea 100644 --- a/homeassistant/components/siren/const.py +++ b/homeassistant/components/siren/const.py @@ -1,16 +1,8 @@ """Constants for the siren component.""" from enum import IntFlag -from functools import partial from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) - DOMAIN: Final = "siren" ATTR_TONE: Final = "tone" @@ -28,29 +20,3 @@ class SirenEntityFeature(IntFlag): TONES = 4 VOLUME_SET = 8 DURATION = 16 - - -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the SirenEntityFeature enum instead. -_DEPRECATED_SUPPORT_TURN_ON: Final = DeprecatedConstantEnum( - SirenEntityFeature.TURN_ON, "2025.1" -) -_DEPRECATED_SUPPORT_TURN_OFF: Final = DeprecatedConstantEnum( - SirenEntityFeature.TURN_OFF, "2025.1" -) -_DEPRECATED_SUPPORT_TONES: Final = DeprecatedConstantEnum( - SirenEntityFeature.TONES, "2025.1" -) -_DEPRECATED_SUPPORT_VOLUME_SET: Final = DeprecatedConstantEnum( - SirenEntityFeature.VOLUME_SET, "2025.1" -) -_DEPRECATED_SUPPORT_DURATION: Final = DeprecatedConstantEnum( - SirenEntityFeature.DURATION, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 475b32540b4..68a4eb03998 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -1,6 +1,5 @@ """The tests for the siren component.""" -from types import ModuleType from unittest.mock import MagicMock import pytest @@ -14,8 +13,6 @@ from homeassistant.components.siren import ( from homeassistant.components.siren.const import SirenEntityFeature from homeassistant.core import HomeAssistant -from tests.common import help_test_all, import_and_test_deprecated_constant_enum - class MockSirenEntity(SirenEntity): """Mock siren device to use in tests.""" @@ -111,26 +108,6 @@ async def test_missing_tones_dict(hass: HomeAssistant) -> None: process_turn_on_params(siren, {"tone": 3}) -@pytest.mark.parametrize( - "module", - [siren, siren.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(SirenEntityFeature)) -@pytest.mark.parametrize(("module"), [siren, siren.const]) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: SirenEntityFeature, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, module, enum, "SUPPORT_", "2025.1") - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From a01e7cd6cfeed033c5f440ca619efa6e61daf133 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:20:43 +0100 Subject: [PATCH 0035/1198] Remove deprecated number constants (#131810) --- homeassistant/components/number/const.py | 20 -------------------- tests/components/number/test_const.py | 21 --------------------- 2 files changed, 41 deletions(-) delete mode 100644 tests/components/number/test_const.py diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 7330b781e75..5a2f4c8675c 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -3,7 +3,6 @@ from __future__ import annotations from enum import StrEnum -from functools import partial from typing import Final import voluptuous as vol @@ -41,12 +40,6 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, UnitOfVolumetricFlux, ) -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.unit_conversion import ( BaseUnitConverter, TemperatureConverter, @@ -76,12 +69,6 @@ class NumberMode(StrEnum): SLIDER = "slider" -# MODE_* are deprecated as of 2021.12, use the NumberMode enum instead. -_DEPRECATED_MODE_AUTO: Final = DeprecatedConstantEnum(NumberMode.AUTO, "2025.1") -_DEPRECATED_MODE_BOX: Final = DeprecatedConstantEnum(NumberMode.BOX, "2025.1") -_DEPRECATED_MODE_SLIDER: Final = DeprecatedConstantEnum(NumberMode.SLIDER, "2025.1") - - class NumberDeviceClass(StrEnum): """Device class for numbers.""" @@ -519,10 +506,3 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = { NumberDeviceClass.TEMPERATURE: TemperatureConverter, NumberDeviceClass.VOLUME_FLOW_RATE: VolumeFlowRateConverter, } - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/number/test_const.py b/tests/components/number/test_const.py deleted file mode 100644 index 13d94e2eeaf..00000000000 --- a/tests/components/number/test_const.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Test the number const module.""" - -import pytest - -from homeassistant.components.number import const - -from tests.common import help_test_all, import_and_test_deprecated_constant_enum - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(const) - - -@pytest.mark.parametrize(("enum"), list(const.NumberMode)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: const.NumberMode, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, const, enum, "MODE_", "2025.1") From 1d09a5bf89ae5cedfa84ca91db994c7405e6e427 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 12:21:13 +0100 Subject: [PATCH 0036/1198] Remove deprecated lock constants (#131812) --- homeassistant/components/lock/__init__.py | 5 ----- tests/components/lock/test_init.py | 3 +-- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/homeassistant/components/lock/__init__.py b/homeassistant/components/lock/__init__.py index fad87145e00..9363d388637 100644 --- a/homeassistant/components/lock/__init__.py +++ b/homeassistant/components/lock/__init__.py @@ -31,7 +31,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, @@ -67,10 +66,6 @@ class LockEntityFeature(IntFlag): OPEN = 1 -# The SUPPORT_OPEN constant is deprecated as of Home Assistant 2022.5. -# Please use the LockEntityFeature enum instead. -_DEPRECATED_SUPPORT_OPEN = DeprecatedConstantEnum(LockEntityFeature.OPEN, "2025.1") - PROP_TO_ATTR = {"changed_by": ATTR_CHANGED_BY, "code_format": ATTR_CODE_FORMAT} # mypy: disallow-any-generics diff --git a/tests/components/lock/test_init.py b/tests/components/lock/test_init.py index a80aa78cec2..a1fed9fe7e1 100644 --- a/tests/components/lock/test_init.py +++ b/tests/components/lock/test_init.py @@ -405,8 +405,7 @@ def _create_tuples( @pytest.mark.parametrize( ("enum", "constant_prefix", "remove_in_version"), - _create_tuples(lock.LockEntityFeature, "SUPPORT_", "2025.1") - + _create_tuples(lock.LockState, "STATE_", "2025.10"), + _create_tuples(lock.LockState, "STATE_", "2025.10"), ) def test_deprecated_constants( caplog: pytest.LogCaptureFixture, From 9d387acb97399b199859ab37fa87ca6f4cedd530 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 28 Nov 2024 12:25:16 +0100 Subject: [PATCH 0037/1198] Ensure custom integrations are assigned the custom IQS scale (#131795) --- homeassistant/loader.py | 3 +++ tests/test_loader.py | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 4313cd2d6e0..1fa9d0cd49d 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -830,6 +830,9 @@ class Integration: @cached_property def quality_scale(self) -> str | None: """Return Integration Quality Scale.""" + # Custom integrations default to "custom" quality scale. + if not self.is_built_in or self.overwrites_built_in: + return "custom" return self.manifest.get("quality_scale") @cached_property diff --git a/tests/test_loader.py b/tests/test_loader.py index a39bd63ad0d..4c3c4eb309f 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -547,6 +547,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: ], "mqtt": ["hue/discovery"], "version": "1.0.0", + "quality_scale": "gold", }, ) assert integration.name == "Philips Hue" @@ -585,6 +586,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.is_built_in is True assert integration.overwrites_built_in is False assert integration.version == "1.0.0" + assert integration.quality_scale == "gold" integration = loader.Integration( hass, @@ -595,6 +597,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: "domain": "hue", "dependencies": ["test-dep"], "requirements": ["test-req==1.0.0"], + "quality_scale": "gold", }, ) assert integration.is_built_in is False @@ -607,6 +610,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.ssdp is None assert integration.mqtt is None assert integration.version is None + assert integration.quality_scale == "custom" integration = loader.Integration( hass, From 6ce5c897116b2b7a4c0e1b89fade20388bc10f4d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:29:38 +0100 Subject: [PATCH 0038/1198] Fix group flaky test (#131815) --- tests/components/group/test_notify.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index bbf2d98b492..e3a01c05eca 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -161,7 +161,8 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No "data": {"hello": "world", "test": "message", "default": "default"}, }, ), - ] + ], + any_order=True, ) send_message_mock.reset_mock() From 3a76bfb8578d9279425b6eed72098a948c26968d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 12:34:06 +0100 Subject: [PATCH 0039/1198] Remove Spotify featured playlists and categories from media browser (#131758) --- .../components/spotify/browse_media.py | 72 ---------- tests/components/spotify/conftest.py | 14 -- .../spotify/fixtures/categories.json | 36 ----- .../components/spotify/fixtures/category.json | 12 -- .../spotify/fixtures/category_playlists.json | 84 ------------ .../spotify/fixtures/featured_playlists.json | 85 ------------ .../spotify/snapshots/test_media_browser.ambr | 125 ------------------ .../components/spotify/test_media_browser.py | 3 - 8 files changed, 431 deletions(-) delete mode 100644 tests/components/spotify/fixtures/categories.json delete mode 100644 tests/components/spotify/fixtures/category.json delete mode 100644 tests/components/spotify/fixtures/category_playlists.json delete mode 100644 tests/components/spotify/fixtures/featured_playlists.json diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index 403ec608a7c..1ae5804ea66 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -101,8 +101,6 @@ class BrowsableMedia(StrEnum): CURRENT_USER_RECENTLY_PLAYED = "current_user_recently_played" CURRENT_USER_TOP_ARTISTS = "current_user_top_artists" CURRENT_USER_TOP_TRACKS = "current_user_top_tracks" - CATEGORIES = "categories" - FEATURED_PLAYLISTS = "featured_playlists" NEW_RELEASES = "new_releases" @@ -115,8 +113,6 @@ LIBRARY_MAP = { BrowsableMedia.CURRENT_USER_RECENTLY_PLAYED.value: "Recently played", BrowsableMedia.CURRENT_USER_TOP_ARTISTS.value: "Top Artists", BrowsableMedia.CURRENT_USER_TOP_TRACKS.value: "Top Tracks", - BrowsableMedia.CATEGORIES.value: "Categories", - BrowsableMedia.FEATURED_PLAYLISTS.value: "Featured Playlists", BrowsableMedia.NEW_RELEASES.value: "New Releases", } @@ -153,18 +149,6 @@ CONTENT_TYPE_MEDIA_CLASS: dict[str, Any] = { "parent": MediaClass.DIRECTORY, "children": MediaClass.TRACK, }, - BrowsableMedia.FEATURED_PLAYLISTS.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, - BrowsableMedia.CATEGORIES.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.GENRE, - }, - "category_playlists": { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, BrowsableMedia.NEW_RELEASES.value: { "parent": MediaClass.DIRECTORY, "children": MediaClass.ALBUM, @@ -354,32 +338,6 @@ async def build_item_response( # noqa: C901 elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_TRACKS: if top_tracks := await spotify.get_top_tracks(): items = [_get_track_item_payload(track) for track in top_tracks] - elif media_content_type == BrowsableMedia.FEATURED_PLAYLISTS: - if featured_playlists := await spotify.get_featured_playlists(): - items = [ - _get_playlist_item_payload(playlist) for playlist in featured_playlists - ] - elif media_content_type == BrowsableMedia.CATEGORIES: - if categories := await spotify.get_categories(): - items = [ - { - "id": category.category_id, - "name": category.name, - "type": "category_playlists", - "uri": category.category_id, - "thumbnail": category.icons[0].url if category.icons else None, - } - for category in categories - ] - elif media_content_type == "category_playlists": - if ( - playlists := await spotify.get_category_playlists( - category_id=media_content_id - ) - ) and (category := await spotify.get_category(media_content_id)): - title = category.name - image = category.icons[0].url if category.icons else None - items = [_get_playlist_item_payload(playlist) for playlist in playlists] elif media_content_type == BrowsableMedia.NEW_RELEASES: if new_releases := await spotify.get_new_releases(): items = [_get_album_item_payload(album) for album in new_releases] @@ -429,36 +387,6 @@ async def build_item_response( # noqa: C901 _LOGGER.debug("Unknown media type received: %s", media_content_type) return None - if media_content_type == BrowsableMedia.CATEGORIES: - media_item = BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=media_class["children"], - media_class=media_class["parent"], - media_content_id=media_content_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}{media_content_type}", - title=LIBRARY_MAP.get(media_content_id, "Unknown"), - ) - - media_item.children = [] - for item in items: - if (item_id := item["id"]) is None: - _LOGGER.debug("Missing ID for media item: %s", item) - continue - media_item.children.append( - BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=MediaClass.TRACK, - media_class=MediaClass.PLAYLIST, - media_content_id=item_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}category_playlists", - thumbnail=item["thumbnail"], - title=item["name"], - ) - ) - return media_item - if title is None: title = LIBRARY_MAP.get(media_content_id, "Unknown") diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py index cc1f423246c..67d4eac3960 100644 --- a/tests/components/spotify/conftest.py +++ b/tests/components/spotify/conftest.py @@ -9,11 +9,7 @@ from spotifyaio.models import ( Album, Artist, ArtistResponse, - CategoriesResponse, - Category, - CategoryPlaylistResponse, Devices, - FeaturedPlaylistResponse, NewReleasesResponse, NewReleasesResponseInner, PlaybackState, @@ -134,7 +130,6 @@ def mock_spotify() -> Generator[AsyncMock]: PlaybackState, ), ("current_user.json", "get_current_user", UserProfile), - ("category.json", "get_category", Category), ("playlist.json", "get_playlist", Playlist), ("album.json", "get_album", Album), ("artist.json", "get_artist", Artist), @@ -146,15 +141,6 @@ def mock_spotify() -> Generator[AsyncMock]: client.get_followed_artists.return_value = ArtistResponse.from_json( load_fixture("followed_artists.json", DOMAIN) ).artists.items - client.get_featured_playlists.return_value = FeaturedPlaylistResponse.from_json( - load_fixture("featured_playlists.json", DOMAIN) - ).playlists.items - client.get_categories.return_value = CategoriesResponse.from_json( - load_fixture("categories.json", DOMAIN) - ).categories.items - client.get_category_playlists.return_value = CategoryPlaylistResponse.from_json( - load_fixture("category_playlists.json", DOMAIN) - ).playlists.items client.get_new_releases.return_value = NewReleasesResponse.from_json( load_fixture("new_releases.json", DOMAIN) ).albums.items diff --git a/tests/components/spotify/fixtures/categories.json b/tests/components/spotify/fixtures/categories.json deleted file mode 100644 index ed873c95c30..00000000000 --- a/tests/components/spotify/fixtures/categories.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "categories": { - "href": "https://api.spotify.com/v1/browse/categories?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAt0tbjZptfcdMSKl3", - "id": "0JQ5DAt0tbjZptfcdMSKl3", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "Made For You" - }, - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFz6FAsUtgAab", - "id": "0JQ5DAqbMKFz6FAsUtgAab", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "New Releases" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 56 - } -} diff --git a/tests/components/spotify/fixtures/category.json b/tests/components/spotify/fixtures/category.json deleted file mode 100644 index d60605cf94f..00000000000 --- a/tests/components/spotify/fixtures/category.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0", - "id": "0JQ5DAqbMKFRY5ok2pxXJ0", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg", - "width": 274 - } - ], - "name": "Cooking & Dining" -} diff --git a/tests/components/spotify/fixtures/category_playlists.json b/tests/components/spotify/fixtures/category_playlists.json deleted file mode 100644 index c2262708d5a..00000000000 --- a/tests/components/spotify/fixtures/category_playlists.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "playlists": { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "Lekker eten en lang natafelen? Daar hoort muziek bij.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX7yhuKT9G4qk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk", - "id": "37i9dQZF1DX7yhuKT9G4qk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588", - "width": null - } - ], - "name": "eten met vrienden", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMTY5Njk3NywwMDAwMDAwMDkyY2JjZDA1MjA2YTBmNzMxMmFlNGI0YzRhMjg0ZWZl", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk/tracks", - "total": 313 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX7yhuKT9G4qk" - }, - { - "collaborative": false, - "description": "From new retro to classic country blues, honky tonk, rockabilly, and more.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DXbvE0SE0Cczh" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh", - "id": "37i9dQZF1DXbvE0SE0Cczh", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8", - "width": null - } - ], - "name": "Jukebox Joint", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTY4NjkxODgwMiwwMDAwMDAwMGUwNWRkNjY5N2UzM2Q4NzI4NzRiZmNhMGVmMzAyZTA5", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh/tracks", - "total": 60 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DXbvE0SE0Cczh" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 46 - } -} diff --git a/tests/components/spotify/fixtures/featured_playlists.json b/tests/components/spotify/fixtures/featured_playlists.json deleted file mode 100644 index 5e6e53a7ee1..00000000000 --- a/tests/components/spotify/fixtures/featured_playlists.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "message": "Popular Playlists", - "playlists": { - "href": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "De ideale playlist voor het fijne kerstgevoel bij de boom!", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX4dopZ9vOp1t" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t", - "id": "37i9dQZF1DX4dopZ9vOp1t", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe", - "width": null - } - ], - "name": "Kerst Hits 2023", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU2ODI4MSwwMDAwMDAwMDE1ZGRiNzI3OGY4OGU2MzA1MWNkZGMyNTdmNDUwMTc1", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t/tracks", - "total": 298 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX4dopZ9vOp1t" - }, - { - "collaborative": false, - "description": "De 50 populairste hits van Nederland. Cover: Jack Harlow", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DWSBi5svWQ9Nk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk", - "id": "37i9dQZF1DWSBi5svWQ9Nk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf", - "width": null - } - ], - "name": "Top Hits NL", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU5NDgwMCwwMDAwMDAwMDU4NWY2MTE4NmU4NmIwMDdlMGE4ZGRkOTZkN2U2MzAx", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk/tracks", - "total": 50 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 24 - } -} diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr index e1ff42cb7c8..764dc7a10e1 100644 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -84,26 +84,6 @@ 'thumbnail': None, 'title': 'Top Tracks', }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'thumbnail': None, - 'title': 'Categories', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'thumbnail': None, - 'title': 'Featured Playlists', - }), dict({ 'can_expand': True, 'can_play': False, @@ -299,76 +279,6 @@ 'title': 'Pitbull', }) # --- -# name: test_browsing[categories-categories] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAt0tbjZptfcdMSKl3', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'Made For You', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAqbMKFz6FAsUtgAab', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'New Releases', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Categories', - }) -# --- -# name: test_browsing[category_playlists-dinner] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX7yhuKT9G4qk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588', - 'title': 'eten met vrienden', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DXbvE0SE0Cczh', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8', - 'title': 'Jukebox Joint', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/dinner', - 'media_content_type': 'spotify://category_playlists', - 'not_shown': 0, - 'thumbnail': 'https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg', - 'title': 'Cooking & Dining', - }) -# --- # name: test_browsing[current_user_followed_artists-current_user_followed_artists] dict({ 'can_expand': True, @@ -649,41 +559,6 @@ 'title': 'Top Tracks', }) # --- -# name: test_browsing[featured_playlists-featured_playlists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX4dopZ9vOp1t', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe', - 'title': 'Kerst Hits 2023', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf', - 'title': 'Top Hits NL', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Featured Playlists', - }) -# --- # name: test_browsing[new_releases-new_releases] dict({ 'can_expand': True, diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py index dcacc23bbee..ff3404dcfe9 100644 --- a/tests/components/spotify/test_media_browser.py +++ b/tests/components/spotify/test_media_browser.py @@ -112,9 +112,6 @@ async def test_browse_media_playlists( ("current_user_recently_played", "current_user_recently_played"), ("current_user_top_artists", "current_user_top_artists"), ("current_user_top_tracks", "current_user_top_tracks"), - ("featured_playlists", "featured_playlists"), - ("categories", "categories"), - ("category_playlists", "dinner"), ("new_releases", "new_releases"), ("playlist", "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n"), ("album", "spotify:album:3IqzqH6ShrRtie9Yd2ODyG"), From f41bc98fe2dad97e3008a6f5d955808900800d90 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:40:34 +0100 Subject: [PATCH 0040/1198] Cleanup deprecated exception in websocket tests (#131808) --- tests/components/samsungtv/test_config_flow.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 32e169ffb24..3a849c9d4b1 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -15,13 +15,8 @@ from samsungtvws.exceptions import ( ) from websockets import frames -# WebSocketProtocolError was deprecated in websockets '14.0' # pylint: disable-next=no-name-in-module -from websockets.exceptions import ( - ConnectionClosedError, - WebSocketException, - WebSocketProtocolError, -) +from websockets.exceptions import ConnectionClosedError, WebSocketException from homeassistant import config_entries from homeassistant.components import dhcp, ssdp, zeroconf @@ -404,7 +399,7 @@ async def test_user_websocket_not_supported(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVWSAsyncRemote.open", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), ): # websocket device not supported @@ -787,12 +782,12 @@ async def test_ssdp_websocket_cannot_connect(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVEncryptedWSAsyncRemote.start_listening", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVWSAsyncRemote", ) as remotews, - patch.object(remotews, "open", side_effect=WebSocketProtocolError("Boom")), + patch.object(remotews, "open", side_effect=WebSocketException("Boom")), ): # device not supported result = await hass.config_entries.flow.async_init( @@ -1742,7 +1737,7 @@ async def test_update_legacy_missing_mac_from_dhcp_no_unique_id( ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVEncryptedWSAsyncRemote.start_listening", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), ): result = await hass.config_entries.flow.async_init( From d9832f8c3ac731555e72ef880c294d4932667a48 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Nov 2024 13:26:58 +0100 Subject: [PATCH 0041/1198] Rename constant in tests/components/recorder/test_migration_from_schema_32.py (#131819) --- .../recorder/test_migration_from_schema_32.py | 64 +++++++++---------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index dcf2d792407..6ef97f3bbd1 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -55,7 +55,7 @@ from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" @pytest.fixture @@ -81,8 +81,8 @@ def _create_engine_test(*args, **kwargs): This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] engine = create_engine(*args, **kwargs) old_db_schema.Base.metadata.create_all(engine) with Session(engine) as session: @@ -101,8 +101,8 @@ def _create_engine_test(*args, **kwargs): @pytest.fixture def db_schema_32(): """Fixture to initialize the db with the old schema.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] with ( patch.object(recorder, "db_schema", old_db_schema), @@ -127,8 +127,8 @@ async def test_migrate_events_context_ids( async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] test_uuid = uuid.uuid4() uuid_hex = test_uuid.hex @@ -386,8 +386,8 @@ async def test_finish_migrate_events_context_ids( mark the migration as done before ensuring unused indices were dropped. This test makes sure we drop the unused indices. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_migration(): with session_scope(hass=hass) as session: @@ -489,8 +489,8 @@ async def test_migrate_states_context_ids( async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] test_uuid = uuid.uuid4() uuid_hex = test_uuid.hex @@ -730,8 +730,8 @@ async def test_finish_migrate_states_context_ids( mark the migration as done before ensuring unused indices were dropped. This test makes sure we drop the unused indices. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_migration(): with session_scope(hass=hass) as session: @@ -833,8 +833,8 @@ async def test_migrate_event_type_ids( ) -> None: """Test we can migrate event_types to the EventTypes table.""" await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -924,8 +924,8 @@ async def test_migrate_event_type_ids( async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_states(): with session_scope(hass=hass) as session: @@ -1004,8 +1004,8 @@ async def test_post_migrate_entity_ids( ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -1060,8 +1060,8 @@ async def test_migrate_null_entity_ids( ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_states(): with session_scope(hass=hass) as session: @@ -1145,8 +1145,8 @@ async def test_migrate_null_event_type_ids( ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -1233,8 +1233,8 @@ async def test_stats_timestamp_conversion_is_reentrant( """Test stats migration is reentrant.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1386,8 +1386,8 @@ async def test_stats_timestamp_with_one_by_one( """Test stats migration with one by one.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1606,8 +1606,8 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( """Test stats migration with one by one removes duplicates.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1798,13 +1798,13 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_migrate_times( +async def test_stats_migrate_times( async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, ) -> None: """Test we can migrate times in the statistics tables.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() now_timestamp = now.timestamp() From c4e5b59326b38f21396ec04c07c0002f70fd5d44 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 13:41:30 +0100 Subject: [PATCH 0042/1198] Fix more flaky translation checks (#131824) --- tests/components/stt/test_init.py | 4 ++++ tests/components/tts/test_init.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index 92225123995..3d5daab2bec 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -34,6 +34,7 @@ from tests.common import ( mock_integration, mock_platform, mock_restore_cache, + reset_translation_cache, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -518,6 +519,9 @@ async def test_default_engine_prefer_cloud_entity( assert provider_engine.name == "test" assert async_default_engine(hass) == "stt.cloud_stt_entity" + # Reset the `cloud` translations cache to avoid flaky translation checks + reset_translation_cache(hass, ["cloud"]) + async def test_get_engine_legacy( hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 9d8dbf3ef94..0b01a24720d 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1990,5 +1990,5 @@ async def test_default_engine_prefer_cloud_entity( assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" - # Reset the `cloud` translations cache + # Reset the `cloud` translations cache to avoid flaky translation checks reset_translation_cache(hass, ["cloud"]) From 00d82363feecf975508fef6c267aa4729456d3bb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 13:44:02 +0100 Subject: [PATCH 0043/1198] Delay "Split tests for full run" in CI (#131813) Adjust split tests requirements in CI --- .github/workflows/ci.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a1840dc4ead..a0ac973e960 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -819,6 +819,12 @@ jobs: needs: - info - base + - gen-requirements-all + - hassfest + - lint-other + - lint-ruff + - lint-ruff-format + - mypy name: Split tests for full run steps: - name: Install additional OS dependencies From 96dfa0e0cf283c83c2089fceba151340301ef473 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 28 Nov 2024 13:44:40 +0100 Subject: [PATCH 0044/1198] Remove wrong plural "s" in 'todo.remove_item' action (#131814) --- homeassistant/components/todo/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 45e378c3de5..245e5c82fc8 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -78,7 +78,7 @@ "fields": { "item": { "name": "Item name", - "description": "The name for the to-do list items." + "description": "The name for the to-do list item." } } } From a0584a0516459b88473f7ee90421f676ae65b352 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 13:45:00 +0100 Subject: [PATCH 0045/1198] Remove deprecated switch constants (#131806) * Remove deprecated switch constants * Fix --- homeassistant/components/switch/__init__.py | 25 +-------------------- tests/components/switch/test_init.py | 23 +------------------ 2 files changed, 2 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/switch/__init__.py b/homeassistant/components/switch/__init__.py index 9838d9501f7..61ee2908009 100644 --- a/homeassistant/components/switch/__init__.py +++ b/homeassistant/components/switch/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import partial import logging from propcache import cached_property @@ -19,12 +18,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -52,16 +45,8 @@ class SwitchDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(SwitchDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the SwitchDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in SwitchDeviceClass] -_DEPRECATED_DEVICE_CLASS_OUTLET = DeprecatedConstantEnum( - SwitchDeviceClass.OUTLET, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SWITCH = DeprecatedConstantEnum( - SwitchDeviceClass.SWITCH, "2025.1" -) + # mypy: disallow-any-generics @@ -124,11 +109,3 @@ class SwitchEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) if hasattr(self, "entity_description"): return self.entity_description.device_class return None - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/switch/test_init.py b/tests/components/switch/test_init.py index 989b10c11d6..f52c455dabd 100644 --- a/tests/components/switch/test_init.py +++ b/tests/components/switch/test_init.py @@ -11,12 +11,7 @@ from homeassistant.setup import async_setup_component from . import common from .common import MockSwitch -from tests.common import ( - MockUser, - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import MockUser, setup_test_component_platform @pytest.fixture(autouse=True) @@ -87,19 +82,3 @@ async def test_switch_context( assert state2 is not None assert state.state != state2.state assert state2.context.user_id == hass_admin_user.id - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(switch) - - -@pytest.mark.parametrize(("enum"), list(switch.SwitchDeviceClass)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: switch.SwitchDeviceClass, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, switch, enum, "DEVICE_CLASS_", "2025.1" - ) From dc064237ca4eef1571d03213079591ac83bbe13d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 13:45:10 +0100 Subject: [PATCH 0046/1198] Bump spotifyaio to 0.8.10 (#131827) --- .../components/spotify/browse_media.py | 35 +- .../components/spotify/manifest.json | 4 +- .../components/spotify/media_player.py | 2 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/spotify/fixtures/playlist.json | 466 ++++++++++++++++++ .../spotify/snapshots/test_diagnostics.ambr | 63 +++ .../spotify/snapshots/test_media_browser.ambr | 10 + 8 files changed, 566 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index 1ae5804ea66..81cdfdfb3cf 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -14,6 +14,7 @@ from spotifyaio import ( SpotifyClient, Track, ) +from spotifyaio.models import ItemType, SimplifiedEpisode import yarl from homeassistant.components.media_player import ( @@ -90,6 +91,16 @@ def _get_track_item_payload( } +def _get_episode_item_payload(episode: SimplifiedEpisode) -> ItemPayload: + return { + "id": episode.episode_id, + "name": episode.name, + "type": MediaType.EPISODE, + "uri": episode.uri, + "thumbnail": fetch_image_url(episode.images), + } + + class BrowsableMedia(StrEnum): """Enum of browsable media.""" @@ -345,10 +356,15 @@ async def build_item_response( # noqa: C901 if playlist := await spotify.get_playlist(media_content_id): title = playlist.name image = playlist.images[0].url if playlist.images else None - items = [ - _get_track_item_payload(playlist_track.track) - for playlist_track in playlist.tracks.items - ] + for playlist_item in playlist.tracks.items: + if playlist_item.track.type is ItemType.TRACK: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, Track) + items.append(_get_track_item_payload(playlist_item.track)) + elif playlist_item.track.type is ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, SimplifiedEpisode) + items.append(_get_episode_item_payload(playlist_item.track)) elif media_content_type == MediaType.ALBUM: if album := await spotify.get_album(media_content_id): title = album.name @@ -370,16 +386,7 @@ async def build_item_response( # noqa: C901 ): title = show.name image = show.images[0].url if show.images else None - items = [ - { - "id": episode.episode_id, - "name": episode.name, - "type": MediaType.EPISODE, - "uri": episode.uri, - "thumbnail": fetch_image_url(episode.images), - } - for episode in show_episodes - ] + items = [_get_episode_item_payload(episode) for episode in show_episodes] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type] diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index e7b24cb3e1d..6c5b7382bbb 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/spotify", "integration_type": "service", "iot_class": "cloud_polling", - "loggers": ["spotipy"], - "requirements": ["spotifyaio==0.8.8"], + "loggers": ["spotifyaio"], + "requirements": ["spotifyaio==0.8.10"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index 7687936fe4c..20a634efb42 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -361,6 +361,8 @@ class SpotifyMediaPlayer(SpotifyEntity, MediaPlayerEntity): """Select playback device.""" for device in self.devices.data: if device.name == source: + if TYPE_CHECKING: + assert device.device_id is not None await self.coordinator.client.transfer_playback(device.device_id) return diff --git a/requirements_all.txt b/requirements_all.txt index fc13e72c128..9a4c93ee96e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2719,7 +2719,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.10 # homeassistant.components.sql sqlparse==0.5.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0923b497575..b17bd38a849 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2171,7 +2171,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.10 # homeassistant.components.sql sqlparse==0.5.0 diff --git a/tests/components/spotify/fixtures/playlist.json b/tests/components/spotify/fixtures/playlist.json index 36c28cc814b..5680ac9109c 100644 --- a/tests/components/spotify/fixtures/playlist.json +++ b/tests/components/spotify/fixtures/playlist.json @@ -514,6 +514,472 @@ "uri": "spotify:track:2E2znCPaS8anQe21GLxcvJ", "is_local": false } + }, + { + "added_at": "2024-11-28T11:20:58Z", + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/1112264649" + }, + "href": "https://api.spotify.com/v1/users/1112264649", + "id": "1112264649", + "type": "user", + "uri": "spotify:user:1112264649" + }, + "is_local": false, + "primary_color": null, + "track": { + "explicit": false, + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "episode": true, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "total_episodes": 120, + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "track": false, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + "video_thumbnail": { + "url": null + } } ] } diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr index 40502562da3..0ac375d18e3 100644 --- a/tests/components/spotify/snapshots/test_diagnostics.ambr +++ b/tests/components/spotify/snapshots/test_diagnostics.ambr @@ -409,6 +409,69 @@ 'uri': 'spotify:track:2E2znCPaS8anQe21GLxcvJ', }), }), + dict({ + 'track': dict({ + 'description': 'Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy', + 'duration_ms': 3690161, + 'episode_id': '3o0RYoo5iOMKSmEbunsbvW', + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW', + }), + 'href': 'https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'My Squirrel Has Brain Damage - Safety Third 119', + 'release_date': '2024-07-26', + 'release_date_precision': 'day', + 'show': dict({ + 'description': 'Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it\'s just us, but always: safety is our number three priority.', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'href': 'https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'Safety Third', + 'publisher': 'Safety Third ', + 'show_id': '1Y9ExMgMxoBVrgrfU7u0nD', + 'total_episodes': 120, + 'uri': 'spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'type': 'episode', + 'uri': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + }), + }), ]), }), 'uri': 'spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr index 764dc7a10e1..6b217977227 100644 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -649,6 +649,16 @@ 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', 'title': 'You Are So Beautiful', }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'title': 'My Squirrel Has Brain Damage - Safety Third 119', + }), ]), 'children_media_class': , 'media_class': , From 474544abd8a27d1410fd5cfdedab22e60cadd2cc Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 13:45:51 +0100 Subject: [PATCH 0047/1198] Make wake word selection part of configuration (#131832) --- homeassistant/components/esphome/select.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/esphome/select.py b/homeassistant/components/esphome/select.py index ab7654478a7..71a21186d3d 100644 --- a/homeassistant/components/esphome/select.py +++ b/homeassistant/components/esphome/select.py @@ -10,6 +10,7 @@ from homeassistant.components.assist_pipeline.select import ( ) from homeassistant.components.assist_satellite import AssistSatelliteConfiguration from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import restore_state from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -100,7 +101,9 @@ class EsphomeAssistSatelliteWakeWordSelect( """Wake word selector for esphome devices.""" entity_description = SelectEntityDescription( - key="wake_word", translation_key="wake_word" + key="wake_word", + translation_key="wake_word", + entity_category=EntityCategory.CONFIG, ) _attr_should_poll = False _attr_current_option: str | None = None From 3071aa2da1ab20a9df29c135d263006b430f6e82 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 14:59:16 +0100 Subject: [PATCH 0048/1198] Use common string for items unit in Bring (#131834) --- homeassistant/components/bring/strings.json | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index defed056a3d..c8c12090118 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -1,4 +1,7 @@ { + "common": { + "shopping_list_items": "items" + }, "config": { "step": { "user": { @@ -30,15 +33,15 @@ "sensor": { "urgent": { "name": "Urgent", - "unit_of_measurement": "items" + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" }, "convenient": { "name": "On occasion", - "unit_of_measurement": "items" + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" }, "discounted": { "name": "Discount only", - "unit_of_measurement": "items" + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" }, "list_language": { "name": "Region & language", From f7d2d06c9b267be8d5e7fa12ac049a4813ee9376 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Nov 2024 16:22:56 +0100 Subject: [PATCH 0049/1198] Add comments in homeassistant/components/recorder/migration.py (#131820) * Add comments in homeassistant/components/recorder/migration.py * Update homeassistant/components/recorder/migration.py --- homeassistant/components/recorder/migration.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 02ab05288c5..c9e36f47218 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2742,7 +2742,10 @@ class EventIDPostMigration(BaseRunTimeMigration): class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): - """Migration to remove old entity_id strings from states.""" + """Migration to remove old entity_id strings from states. + + Introduced in HA Core 2023.4 by PR #89557. + """ migration_id = "entity_id_post_migration" task = MigrationTask @@ -2764,9 +2767,9 @@ NON_LIVE_DATA_MIGRATORS = ( ) LIVE_DATA_MIGRATORS = ( - EventTypeIDMigration, - EntityIDMigration, - EventIDPostMigration, + EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 + EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 + EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 ) From ed408eb1a10ff93f15984b6cd9e378b32c5da18b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 16:54:23 +0100 Subject: [PATCH 0050/1198] Remove deprecated device tracker constants (#131846) --- .../components/device_tracker/__init__.py | 21 -------------- .../components/device_tracker/const.py | 27 ------------------ tests/components/device_tracker/test_init.py | 28 ------------------- 3 files changed, 76 deletions(-) diff --git a/homeassistant/components/device_tracker/__init__.py b/homeassistant/components/device_tracker/__init__.py index 28991483cda..313373e3181 100644 --- a/homeassistant/components/device_tracker/__init__.py +++ b/homeassistant/components/device_tracker/__init__.py @@ -2,15 +2,8 @@ from __future__ import annotations -from functools import partial - from homeassistant.const import ATTR_GPS_ACCURACY, STATE_HOME # noqa: F401 from homeassistant.core import HomeAssistant -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass @@ -23,10 +16,6 @@ from .config_entry import ( # noqa: F401 async_unload_entry, ) from .const import ( # noqa: F401 - _DEPRECATED_SOURCE_TYPE_BLUETOOTH, - _DEPRECATED_SOURCE_TYPE_BLUETOOTH_LE, - _DEPRECATED_SOURCE_TYPE_GPS, - _DEPRECATED_SOURCE_TYPE_ROUTER, ATTR_ATTRIBUTES, ATTR_BATTERY, ATTR_DEV_ID, @@ -72,13 +61,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the device tracker.""" async_setup_legacy_integration(hass, config) return True - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/device_tracker/const.py b/homeassistant/components/device_tracker/const.py index 964b7faab9b..c9e4d4e910a 100644 --- a/homeassistant/components/device_tracker/const.py +++ b/homeassistant/components/device_tracker/const.py @@ -4,16 +4,9 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import partial import logging from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.signal_type import SignalType LOGGER: Final = logging.getLogger(__package__) @@ -34,19 +27,6 @@ class SourceType(StrEnum): BLUETOOTH_LE = "bluetooth_le" -# SOURCE_TYPE_* below are deprecated as of 2022.9 -# use the SourceType enum instead. -_DEPRECATED_SOURCE_TYPE_GPS: Final = DeprecatedConstantEnum(SourceType.GPS, "2025.1") -_DEPRECATED_SOURCE_TYPE_ROUTER: Final = DeprecatedConstantEnum( - SourceType.ROUTER, "2025.1" -) -_DEPRECATED_SOURCE_TYPE_BLUETOOTH: Final = DeprecatedConstantEnum( - SourceType.BLUETOOTH, "2025.1" -) -_DEPRECATED_SOURCE_TYPE_BLUETOOTH_LE: Final = DeprecatedConstantEnum( - SourceType.BLUETOOTH_LE, "2025.1" -) - CONF_SCAN_INTERVAL: Final = "interval_seconds" SCAN_INTERVAL: Final = timedelta(seconds=12) @@ -72,10 +52,3 @@ ATTR_IP: Final = "ip" CONNECTED_DEVICE_REGISTERED = SignalType[dict[str, str | None]]( "device_tracker_connected_device_registered" ) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index 362258b035a..e73c18919c5 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -5,7 +5,6 @@ from datetime import datetime, timedelta import json import logging import os -from types import ModuleType from unittest.mock import call, patch import pytest @@ -37,8 +36,6 @@ from .common import MockScanner, mock_legacy_device_tracker_setup from tests.common import ( assert_setup_component, async_fire_time_changed, - help_test_all, - import_and_test_deprecated_constant_enum, mock_registry, mock_restore_cache, patch_yaml_files, @@ -739,28 +736,3 @@ def test_see_schema_allowing_ios_calls() -> None: "hostname": "beer", } ) - - -@pytest.mark.parametrize( - "module", - [device_tracker, device_tracker.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(SourceType)) -@pytest.mark.parametrize( - "module", - [device_tracker, device_tracker.const], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: SourceType, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "SOURCE_TYPE_", "2025.1" - ) From 57b099c2aafdd15a5b51bea302b424cc3464c8b5 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Thu, 28 Nov 2024 16:55:07 +0100 Subject: [PATCH 0051/1198] Add unit translations to Ista EcoTrend integration (#131768) --- homeassistant/components/ista_ecotrend/sensor.py | 1 - homeassistant/components/ista_ecotrend/strings.json | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index 7aa1adfe4c9..779a5d5c55f 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -71,7 +71,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = ( translation_key=IstaSensorEntity.HEATING, suggested_display_precision=0, consumption_type=IstaConsumptionType.HEATING, - native_unit_of_measurement="units", state_class=SensorStateClass.TOTAL, ), IstaSensorEntityDescription( diff --git a/homeassistant/components/ista_ecotrend/strings.json b/homeassistant/components/ista_ecotrend/strings.json index 0757977a8ea..e7c37461b19 100644 --- a/homeassistant/components/ista_ecotrend/strings.json +++ b/homeassistant/components/ista_ecotrend/strings.json @@ -38,7 +38,8 @@ "entity": { "sensor": { "heating": { - "name": "Heating" + "name": "Heating", + "unit_of_measurement": "units" }, "heating_cost": { "name": "Heating cost" From 0c5c09390c49e802c0bb1331ab447b566f6f1f9e Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 16:56:04 +0100 Subject: [PATCH 0052/1198] Remove deprecated fan constants (#131845) --- homeassistant/components/fan/__init__.py | 29 ------------------------ tests/components/fan/test_init.py | 20 ---------------- 2 files changed, 49 deletions(-) diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index b31a18d0eac..71fb9c53353 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -23,12 +23,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -61,21 +55,6 @@ class FanEntityFeature(IntFlag): TURN_ON = 32 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the FanEntityFeature enum instead. -_DEPRECATED_SUPPORT_SET_SPEED = DeprecatedConstantEnum( - FanEntityFeature.SET_SPEED, "2025.1" -) -_DEPRECATED_SUPPORT_OSCILLATE = DeprecatedConstantEnum( - FanEntityFeature.OSCILLATE, "2025.1" -) -_DEPRECATED_SUPPORT_DIRECTION = DeprecatedConstantEnum( - FanEntityFeature.DIRECTION, "2025.1" -) -_DEPRECATED_SUPPORT_PRESET_MODE = DeprecatedConstantEnum( - FanEntityFeature.PRESET_MODE, "2025.1" -) - SERVICE_INCREASE_SPEED = "increase_speed" SERVICE_DECREASE_SPEED = "decrease_speed" SERVICE_OSCILLATE = "oscillate" @@ -543,11 +522,3 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Requires FanEntityFeature.SET_SPEED. """ return self._attr_preset_modes - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index a7dc544a97a..fbb09ab879c 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import patch import pytest -from homeassistant.components import fan from homeassistant.components.fan import ( ATTR_PRESET_MODE, ATTR_PRESET_MODES, @@ -27,8 +26,6 @@ from tests.common import ( MockConfigEntry, MockModule, MockPlatform, - help_test_all, - import_and_test_deprecated_constant_enum, mock_integration, mock_platform, setup_test_component_platform, @@ -166,23 +163,6 @@ async def test_preset_mode_validation( assert exc.value.translation_key == "not_valid_preset_mode" -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(fan) - - -@pytest.mark.parametrize(("enum"), list(fan.FanEntityFeature)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: fan.FanEntityFeature, -) -> None: - """Test deprecated constants.""" - if not FanEntityFeature.TURN_OFF and not FanEntityFeature.TURN_ON: - import_and_test_deprecated_constant_enum( - caplog, fan, enum, "SUPPORT_", "2025.1" - ) - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From 0389800e2adb185766c4b40c5b2688ef92a9b6b3 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 16:59:11 +0100 Subject: [PATCH 0053/1198] Remove deprecated humidifier constants (#131844) --- .../components/humidifier/__init__.py | 19 --------- homeassistant/components/humidifier/const.py | 32 -------------- tests/components/humidifier/test_init.py | 42 +------------------ 3 files changed, 1 insertion(+), 92 deletions(-) diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py index b556a6961bb..1498c4f6e3d 100644 --- a/homeassistant/components/humidifier/__init__.py +++ b/homeassistant/components/humidifier/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import partial import logging from typing import Any, final @@ -22,11 +21,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -34,9 +28,6 @@ from homeassistant.loader import bind_hass from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 - _DEPRECATED_DEVICE_CLASS_DEHUMIDIFIER, - _DEPRECATED_DEVICE_CLASS_HUMIDIFIER, - _DEPRECATED_SUPPORT_MODES, ATTR_ACTION, ATTR_AVAILABLE_MODES, ATTR_CURRENT_HUMIDITY, @@ -314,13 +305,3 @@ async def async_service_humidity_set( ) await entity.async_set_humidity(humidity) - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/humidifier/const.py b/homeassistant/components/humidifier/const.py index 03ff0774ca0..ceef0c5a890 100644 --- a/homeassistant/components/humidifier/const.py +++ b/homeassistant/components/humidifier/const.py @@ -1,15 +1,6 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial - -from homeassistant.helpers.deprecation import ( - DeprecatedConstant, - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) MODE_NORMAL = "normal" MODE_ECO = "eco" @@ -43,15 +34,6 @@ DEFAULT_MAX_HUMIDITY = 100 DOMAIN = "humidifier" -# DEVICE_CLASS_* below are deprecated as of 2021.12 -# use the HumidifierDeviceClass enum instead. -_DEPRECATED_DEVICE_CLASS_HUMIDIFIER = DeprecatedConstant( - "humidifier", "HumidifierDeviceClass.HUMIDIFIER", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DEHUMIDIFIER = DeprecatedConstant( - "dehumidifier", "HumidifierDeviceClass.DEHUMIDIFIER", "2025.1" -) - SERVICE_SET_MODE = "set_mode" SERVICE_SET_HUMIDITY = "set_humidity" @@ -60,17 +42,3 @@ class HumidifierEntityFeature(IntFlag): """Supported features of the humidifier entity.""" MODES = 1 - - -# The SUPPORT_MODES constant is deprecated as of Home Assistant 2022.5. -# Please use the HumidifierEntityFeature enum instead. -_DEPRECATED_SUPPORT_MODES = DeprecatedConstantEnum( - HumidifierEntityFeature.MODES, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index 2725f942576..9c10d5e39e1 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -1,12 +1,9 @@ """The tests for the humidifier component.""" -from enum import Enum -from types import ModuleType from unittest.mock import MagicMock import pytest -from homeassistant.components import humidifier from homeassistant.components.humidifier import ( ATTR_HUMIDITY, ATTR_MODE, @@ -20,13 +17,7 @@ from homeassistant.components.humidifier import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from tests.common import ( - MockConfigEntry, - MockEntity, - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import MockConfigEntry, MockEntity, setup_test_component_platform class MockHumidifierEntity(MockEntity, HumidifierEntity): @@ -60,37 +51,6 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: - return [(enum_field, constant_prefix) for enum_field in enum] - - -@pytest.mark.parametrize( - "module", - [humidifier, humidifier.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(humidifier.HumidifierEntityFeature, "SUPPORT_") - + _create_tuples(humidifier.HumidifierDeviceClass, "DEVICE_CLASS_"), -) -@pytest.mark.parametrize(("module"), [humidifier, humidifier.const]) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.1" - ) - - def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: """Test deprecated supported features ints.""" From bbce183faffc8d5c2f1e1b897e101172a2de8530 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 28 Nov 2024 17:00:20 +0100 Subject: [PATCH 0054/1198] Deprecate dt_util.utc_to_timestamp (#131787) --- .../components/recorder/history/legacy.py | 4 +- .../components/recorder/history/modern.py | 4 +- .../components/recorder/models/legacy.py | 4 +- homeassistant/util/dt.py | 3 + tests/common.py | 2 +- tests/components/logbook/common.py | 2 +- tests/components/logbook/test_init.py | 2 +- tests/components/recorder/db_schema_32.py | 8 +- tests/components/recorder/db_schema_42.py | 2 +- tests/components/recorder/db_schema_43.py | 2 +- tests/components/recorder/test_purge.py | 96 +++++++++---------- .../recorder/test_purge_v32_schema.py | 32 +++---- tests/util/test_dt.py | 6 +- 13 files changed, 87 insertions(+), 80 deletions(-) diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index 3a0fe79455b..dc49ebb9768 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -447,7 +447,7 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) + utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += lambda q: q.join( ( most_recent_states_for_entities_by_date := ( @@ -518,7 +518,7 @@ def _get_single_entity_states_stmt( stmt, join_attributes = _lambda_stmt_and_join_attributes( no_attributes, include_last_changed=True ) - utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) + utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += ( lambda q: q.filter( States.last_updated_ts < utc_point_in_time_ts, diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 902f1b5dc24..01551de1f28 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -250,7 +250,7 @@ def get_significant_states_with_session( oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False - start_time_ts = dt_util.utc_to_timestamp(start_time) + start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None stmt = lambda_stmt( @@ -415,7 +415,7 @@ def state_changes_during_period( oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False - start_time_ts = dt_util.utc_to_timestamp(start_time) + start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) stmt = lambda_stmt( lambda: _state_changed_during_period_stmt( diff --git a/homeassistant/components/recorder/models/legacy.py b/homeassistant/components/recorder/models/legacy.py index 21a8a39ba0f..a469aa49ab2 100644 --- a/homeassistant/components/recorder/models/legacy.py +++ b/homeassistant/components/recorder/models/legacy.py @@ -46,7 +46,7 @@ class LegacyLazyState(State): self.state = self._row.state or "" self._attributes: dict[str, Any] | None = None self._last_updated_ts: float | None = self._row.last_updated_ts or ( - dt_util.utc_to_timestamp(start_time) if start_time else None + start_time.timestamp() if start_time else None ) self._last_changed_ts: float | None = ( self._row.last_changed_ts or self._last_updated_ts @@ -146,7 +146,7 @@ def legacy_row_to_compressed_state( COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row_legacy(row, attr_cache), } if start_time: - comp_state[COMPRESSED_STATE_LAST_UPDATED] = dt_util.utc_to_timestamp(start_time) + comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp() else: row_last_updated_ts: float = row.last_updated_ts comp_state[COMPRESSED_STATE_LAST_UPDATED] = row_last_updated_ts diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index ee2b6c762d8..eb898e4b544 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -13,6 +13,8 @@ import zoneinfo from aiozoneinfo import async_get_time_zone as _async_get_time_zone import ciso8601 +from homeassistant.helpers.deprecation import deprecated_function + DATE_STR_FORMAT = "%Y-%m-%d" UTC = dt.UTC DEFAULT_TIME_ZONE: dt.tzinfo = dt.UTC @@ -170,6 +172,7 @@ utc_from_timestamp = partial(dt.datetime.fromtimestamp, tz=UTC) """Return a UTC time from a timestamp.""" +@deprecated_function("datetime.timestamp", breaks_in_ha_version="2026.1") def utc_to_timestamp(utc_dt: dt.datetime) -> float: """Fast conversion of a datetime in UTC to a timestamp.""" # Taken from diff --git a/tests/common.py b/tests/common.py index 3ec3f6d844c..ac6f10b8c44 100644 --- a/tests/common.py +++ b/tests/common.py @@ -491,7 +491,7 @@ _MONOTONIC_RESOLUTION = time.get_clock_info("monotonic").resolution def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: - timestamp = dt_util.utc_to_timestamp(utc_datetime) + timestamp = utc_datetime.timestamp() for task in list(get_scheduled_timer_handles(hass.loop)): if not isinstance(task, asyncio.TimerHandle): continue diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index afa8b7fcde5..abb118467f4 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -35,7 +35,7 @@ class MockRow: self.event_data = json.dumps(data, cls=JSONEncoder) self.data = data self.time_fired = dt_util.utcnow() - self.time_fired_ts = dt_util.utc_to_timestamp(self.time_fired) + self.time_fired_ts = self.time_fired.timestamp() self.context_parent_id_bin = ( ulid_to_bytes_or_none(context.parent_id) if context else None ) diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 8ac7dde67ab..841c8ed1247 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -330,7 +330,7 @@ def create_state_changed_event_from_old_new( row_id=1, event_type=PSEUDO_EVENT_STATE_CHANGED, event_data="{}", - time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), + time_fired_ts=event_time_fired.timestamp(), context_id_bin=None, context_user_id_bin=None, context_parent_id_bin=None, diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 6da0272da87..39ddb8e3148 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -254,7 +254,7 @@ class Events(Base): # type: ignore[misc,valid-type] event_data=None, origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin), time_fired=None, - time_fired_ts=dt_util.utc_to_timestamp(event.time_fired), + time_fired_ts=event.time_fired.timestamp(), context_id=event.context.id, context_user_id=event.context.user_id, context_parent_id=event.context.parent_id, @@ -429,16 +429,16 @@ class States(Base): # type: ignore[misc,valid-type] # None state means the state was removed from the state machine if state is None: dbstate.state = "" - dbstate.last_updated_ts = dt_util.utc_to_timestamp(event.time_fired) + dbstate.last_updated_ts = event.time_fired.timestamp() dbstate.last_changed_ts = None return dbstate dbstate.state = state.state - dbstate.last_updated_ts = dt_util.utc_to_timestamp(state.last_updated) + dbstate.last_updated_ts = state.last_updated.timestamp() if state.last_updated == state.last_changed: dbstate.last_changed_ts = None else: - dbstate.last_changed_ts = dt_util.utc_to_timestamp(state.last_changed) + dbstate.last_changed_ts = state.last_changed.timestamp() return dbstate diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py index 99bdbb28f2c..efeade46562 100644 --- a/tests/components/recorder/db_schema_42.py +++ b/tests/components/recorder/db_schema_42.py @@ -687,7 +687,7 @@ class StatisticsBase: created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/tests/components/recorder/db_schema_43.py b/tests/components/recorder/db_schema_43.py index 26d8ecd6856..8e77e8782ee 100644 --- a/tests/components/recorder/db_schema_43.py +++ b/tests/components/recorder/db_schema_43.py @@ -697,7 +697,7 @@ class StatisticsBase: created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index f721a260c14..076f6ae8bab 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -564,7 +564,7 @@ async def test_purge_edge_case( event_type="EVENT_TEST_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -572,8 +572,8 @@ async def test_purge_edge_case( entity_id="test.recorder2", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=1001, attributes_id=1002, ) @@ -635,7 +635,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_keep), + time_fired_ts=timestamp_keep.timestamp(), ) ) session.add( @@ -643,8 +643,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_keep), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_keep), + last_changed_ts=timestamp_keep.timestamp(), + last_updated_ts=timestamp_keep.timestamp(), event_id=1000, attributes_id=1000, ) @@ -663,7 +663,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_purge), + time_fired_ts=timestamp_purge.timestamp(), ) ) session.add( @@ -671,8 +671,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="purge", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_purge), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_purge), + last_changed_ts=timestamp_purge.timestamp(), + last_updated_ts=timestamp_purge.timestamp(), event_id=1000 + row, attributes_id=1000 + row, ) @@ -821,8 +821,8 @@ async def test_purge_filtered_states( entity_id="sensor.excluded", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), ) ) # Add states and state_changed events that should be keeped @@ -847,8 +847,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=1, state_attributes=state_attrs, ) @@ -857,8 +857,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=2, state_attributes=state_attrs, ) @@ -866,8 +866,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=62, # keep state_attributes=state_attrs, ) @@ -879,7 +879,7 @@ async def test_purge_filtered_states( event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) convert_pending_states_to_meta(recorder_mock, session) @@ -1016,8 +1016,8 @@ async def test_purge_filtered_states_multiple_rounds( entity_id="sensor.excluded", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), ) ) # Add states and state_changed events that should be keeped @@ -1042,8 +1042,8 @@ async def test_purge_filtered_states_multiple_rounds( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=1, state_attributes=state_attrs, ) @@ -1052,8 +1052,8 @@ async def test_purge_filtered_states_multiple_rounds( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=2, state_attributes=state_attrs, ) @@ -1061,8 +1061,8 @@ async def test_purge_filtered_states_multiple_rounds( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=62, # keep state_attributes=state_attrs, ) @@ -1074,7 +1074,7 @@ async def test_purge_filtered_states_multiple_rounds( event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) convert_pending_states_to_meta(recorder_mock, session) @@ -1228,8 +1228,8 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( entity_id="sensor.old_format", state=STATE_ON, attributes=json.dumps({"old": "not_using_state_attributes"}), - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=None, ) @@ -1240,7 +1240,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( event_type=EVENT_STATE_CHANGED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -1249,7 +1249,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( event_type=EVENT_THEMES_UPDATED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) convert_pending_states_to_meta(recorder_mock, session) @@ -1304,7 +1304,7 @@ async def test_purge_filtered_events( event_type="EVENT_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) @@ -1411,7 +1411,7 @@ async def test_purge_filtered_events_state_changed( event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) # Add states with linked old_state_ids that need to be handled @@ -1420,8 +1420,8 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=1, ) timestamp = dt_util.utcnow() - timedelta(days=4) @@ -1429,16 +1429,16 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=2, ) state_3 = States( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=62, # keep ) session.add_all((state_1, state_2, state_3)) @@ -1448,7 +1448,7 @@ async def test_purge_filtered_events_state_changed( event_type="excluded_event", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -1456,8 +1456,8 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.old_format", state="remove", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), ) ) convert_pending_events_to_event_types(recorder_mock, session) @@ -1823,8 +1823,8 @@ def _add_state_without_event_linkage( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=None, state_attributes=state_attrs, ) @@ -1848,8 +1848,8 @@ def _add_state_with_state_attributes( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=state_attrs, ) @@ -1971,7 +1971,7 @@ async def test_purge_old_events_purges_the_event_type_ids( Events( event_type=None, event_type_id=event_type.event_type_id, - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) return recorder_mock.event_type_manager.get_many( @@ -2101,7 +2101,7 @@ async def test_purge_old_states_purges_the_state_metadata_ids( States( metadata_id=metadata_id, state="any", - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_updated_ts=timestamp.timestamp(), ) ) return recorder_mock.states_meta_manager.get_many( diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 468fd38c855..2bd1e7fd7f7 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -509,7 +509,7 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: event_type="EVENT_TEST_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -517,8 +517,8 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: entity_id="test.recorder2", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=1001, attributes_id=1002, ) @@ -576,7 +576,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_keep), + time_fired_ts=timestamp_keep.timestamp(), ) ) session.add( @@ -584,8 +584,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_keep), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_keep), + last_changed_ts=timestamp_keep.timestamp(), + last_updated_ts=timestamp_keep.timestamp(), event_id=1000, attributes_id=1000, ) @@ -604,7 +604,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_purge), + time_fired_ts=timestamp_purge.timestamp(), ) ) session.add( @@ -612,8 +612,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="purge", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_purge), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_purge), + last_changed_ts=timestamp_purge.timestamp(), + last_updated_ts=timestamp_purge.timestamp(), event_id=1000 + row, attributes_id=1000 + row, ) @@ -771,7 +771,7 @@ async def _add_test_events(hass: HomeAssistant, iterations: int = 1): event_type=event_type, event_data=json.dumps(event_data), origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) @@ -808,7 +808,7 @@ async def _add_events_with_event_data(hass: HomeAssistant, iterations: int = 1): Events( event_type=event_type, origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), event_data_rel=event_data, ) ) @@ -910,8 +910,8 @@ def _add_state_without_event_linkage( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=None, state_attributes=state_attrs, ) @@ -935,8 +935,8 @@ def _add_state_and_state_changed_event( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=state_attrs, ) @@ -947,7 +947,7 @@ def _add_state_and_state_changed_event( event_type=EVENT_STATE_CHANGED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 0e8432bbb83..347e92d6056 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -116,10 +116,14 @@ def test_utc_from_timestamp() -> None: ) -def test_timestamp_to_utc() -> None: +def test_timestamp_to_utc(caplog: pytest.LogCaptureFixture) -> None: """Test we can convert a utc datetime to a timestamp.""" utc_now = dt_util.utcnow() assert dt_util.utc_to_timestamp(utc_now) == utc_now.timestamp() + assert ( + "utc_to_timestamp is a deprecated function which will be removed " + "in HA Core 2026.1. Use datetime.timestamp instead" in caplog.text + ) def test_as_timestamp() -> None: From 62e788c7da921e7ac81b6830a5f002fc10b3ee7d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 17:58:56 +0100 Subject: [PATCH 0055/1198] Add config flow rules to quality_scale hassfest validation (#131791) * Add config flow rules to quality_scale hassfest validation * Use integration.config_flow property --- .../components/mqtt/quality_scale.yaml | 5 +++- script/hassfest/quality_scale.py | 16 +++++++--- .../config_entry_unloading.py | 5 +++- .../quality_scale_validation/config_flow.py | 24 +++++++++++++++ .../reauthentication_flow.py | 30 +++++++++++++++++++ .../reconfiguration_flow.py | 30 +++++++++++++++++++ 6 files changed, 104 insertions(+), 6 deletions(-) create mode 100644 script/hassfest/quality_scale_validation/config_flow.py create mode 100644 script/hassfest/quality_scale_validation/reauthentication_flow.py create mode 100644 script/hassfest/quality_scale_validation/reconfiguration_flow.py diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index b3084f67da3..d459f0420f1 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -86,7 +86,10 @@ rules: comment: > This is not possible because the integrations generates entities based on a user supplied config or discovery. - reconfiguration-flow: done + reconfiguration-flow: + status: exempt + comment: > + This integration is reconfigured via options flow. dynamic-devices: status: done comment: | diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 980d659b03e..3e8d25c1f32 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -12,7 +12,13 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util.yaml import load_yaml_dict from .model import Config, Integration, ScaledQualityScaleTiers -from .quality_scale_validation import RuleValidationProtocol, config_entry_unloading +from .quality_scale_validation import ( + RuleValidationProtocol, + config_entry_unloading, + config_flow, + reauthentication_flow, + reconfiguration_flow, +) QUALITY_SCALE_TIERS = {value.name.lower(): value for value in ScaledQualityScaleTiers} @@ -32,7 +38,7 @@ ALL_RULES = [ Rule("appropriate-polling", ScaledQualityScaleTiers.BRONZE), Rule("brands", ScaledQualityScaleTiers.BRONZE), Rule("common-modules", ScaledQualityScaleTiers.BRONZE), - Rule("config-flow", ScaledQualityScaleTiers.BRONZE), + Rule("config-flow", ScaledQualityScaleTiers.BRONZE, config_flow), Rule("config-flow-test-coverage", ScaledQualityScaleTiers.BRONZE), Rule("dependency-transparency", ScaledQualityScaleTiers.BRONZE), Rule("docs-actions", ScaledQualityScaleTiers.BRONZE), @@ -57,7 +63,9 @@ ALL_RULES = [ Rule("integration-owner", ScaledQualityScaleTiers.SILVER), Rule("log-when-unavailable", ScaledQualityScaleTiers.SILVER), Rule("parallel-updates", ScaledQualityScaleTiers.SILVER), - Rule("reauthentication-flow", ScaledQualityScaleTiers.SILVER), + Rule( + "reauthentication-flow", ScaledQualityScaleTiers.SILVER, reauthentication_flow + ), Rule("test-coverage", ScaledQualityScaleTiers.SILVER), # GOLD: [ Rule("devices", ScaledQualityScaleTiers.GOLD), @@ -78,7 +86,7 @@ ALL_RULES = [ Rule("entity-translations", ScaledQualityScaleTiers.GOLD), Rule("exception-translations", ScaledQualityScaleTiers.GOLD), Rule("icon-translations", ScaledQualityScaleTiers.GOLD), - Rule("reconfiguration-flow", ScaledQualityScaleTiers.GOLD), + Rule("reconfiguration-flow", ScaledQualityScaleTiers.GOLD, reconfiguration_flow), Rule("repair-issues", ScaledQualityScaleTiers.GOLD), Rule("stale-devices", ScaledQualityScaleTiers.GOLD), # PLATINUM diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index 42134e0391e..63b0117498e 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -1,4 +1,7 @@ -"""Enforce that the integration implements entry unloading.""" +"""Enforce that the integration implements entry unloading. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-entry-unloading/ +""" import ast diff --git a/script/hassfest/quality_scale_validation/config_flow.py b/script/hassfest/quality_scale_validation/config_flow.py new file mode 100644 index 00000000000..e1361d6550f --- /dev/null +++ b/script/hassfest/quality_scale_validation/config_flow.py @@ -0,0 +1,24 @@ +"""Enforce that the integration implements config flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-flow/ +""" + +from script.hassfest.model import Integration + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration implements config flow.""" + + if not integration.config_flow: + return [ + "Integration does not set config_flow in its manifest " + f"homeassistant/components/{integration.domain}/manifest.json", + ] + + config_flow_file = integration.path / "config_flow.py" + if not config_flow_file.exists(): + return [ + "Integration does not implement config flow (is missing config_flow.py)", + ] + + return None diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py new file mode 100644 index 00000000000..d4bc8ed6e96 --- /dev/null +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -0,0 +1,30 @@ +"""Enforce that the integration implements reauthentication flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/reauthentication-flow/ +""" + +import ast + +from script.hassfest.model import Integration + + +def _has_async_function(module: ast.Module, name: str) -> bool: + """Test if the module defines a function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == name + for item in ast.walk(module) + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration has a reauthentication flow.""" + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast.parse(config_flow_file.read_text()) + + if not _has_async_function(config_flow, "async_step_reauth"): + return [ + "Integration does not support a reauthentication flow " + f"(is missing `async_step_reauth` in {config_flow_file})" + ] + return None diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py new file mode 100644 index 00000000000..94547e95625 --- /dev/null +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -0,0 +1,30 @@ +"""Enforce that the integration implements reconfiguration flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/reconfiguration-flow/ +""" + +import ast + +from script.hassfest.model import Integration + + +def _has_async_function(module: ast.Module, name: str) -> bool: + """Test if the module defines a function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == name + for item in ast.walk(module) + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration has a reconfiguration flow.""" + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast.parse(config_flow_file.read_text()) + + if not _has_async_function(config_flow, "async_step_reconfigure"): + return [ + "Integration does not support a reconfiguration flow " + f"(is missing `async_step_reconfigure` in {config_flow_file})" + ] + return None From 87320609dc0cab6c6e617a0571dd89cebf8ea424 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 18:04:00 +0100 Subject: [PATCH 0056/1198] Bump pyatv to 0.16.0 (#131852) --- homeassistant/components/apple_tv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index b4e1b354878..b10a14af32b 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.15.1"], + "requirements": ["pyatv==0.16.0"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 9a4c93ee96e..535a5bbc34b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1778,7 +1778,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.16.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b17bd38a849..077847d260b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1449,7 +1449,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.16.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 From 8862c5c4d85e2a049999e56d5db330f3414df8e0 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 28 Nov 2024 09:16:58 -0800 Subject: [PATCH 0057/1198] Remove unnecessary hass.data defaults from Rainbird (#131858) --- homeassistant/components/rainbird/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/rainbird/__init__.py b/homeassistant/components/rainbird/__init__.py index 97dec9a681e..db88902bc3e 100644 --- a/homeassistant/components/rainbird/__init__.py +++ b/homeassistant/components/rainbird/__init__.py @@ -65,8 +65,6 @@ def _async_register_clientsession_shutdown( async def async_setup_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> bool: """Set up the config entry for Rain Bird.""" - hass.data.setdefault(DOMAIN, {}) - clientsession = async_create_clientsession() _async_register_clientsession_shutdown(hass, entry, clientsession) From 1a9ab0774215169270c3b7d003b0357c7ea06376 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Thu, 28 Nov 2024 18:30:05 +0100 Subject: [PATCH 0058/1198] Allow empty trigger sentence responses in conversations (#131849) allow empty trigger sentence responses --- homeassistant/components/assist_pipeline/pipeline.py | 2 +- tests/components/conversation/test_init.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 96beaf792a7..5bbc81adb86 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -1040,7 +1040,7 @@ class PipelineRun: := await conversation.async_handle_sentence_triggers( self.hass, user_input ) - ): + ) is not None: # Sentence trigger matched trigger_response = intent.IntentResponse( self.pipeline.conversation_language diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index 0100e62cf81..6900ba2d419 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -236,12 +236,17 @@ async def test_prepare_agent( assert len(mock_prepare.mock_calls) == 1 -async def test_async_handle_sentence_triggers(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("response_template", "expected_response"), + [("response {{ trigger.device_id }}", "response 1234"), ("", "")], +) +async def test_async_handle_sentence_triggers( + hass: HomeAssistant, response_template: str, expected_response: str +) -> None: """Test handling sentence triggers with async_handle_sentence_triggers.""" assert await async_setup_component(hass, "homeassistant", {}) assert await async_setup_component(hass, "conversation", {}) - response_template = "response {{ trigger.device_id }}" assert await async_setup_component( hass, "automation", @@ -260,7 +265,6 @@ async def test_async_handle_sentence_triggers(hass: HomeAssistant) -> None: # Device id will be available in response template device_id = "1234" - expected_response = f"response {device_id}" actual_response = await async_handle_sentence_triggers( hass, ConversationInput( From 837716b69e3082940e89aa1b7aab71c5830c1a65 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 19:42:31 +0100 Subject: [PATCH 0059/1198] Add diagnostics rule to quality_scale hassfest validation (#131859) --- script/hassfest/quality_scale.py | 3 +- .../config_entry_unloading.py | 9 ++-- .../quality_scale_validation/diagnostics.py | 42 +++++++++++++++++++ .../reauthentication_flow.py | 8 ++-- .../reconfiguration_flow.py | 6 +-- 5 files changed, 56 insertions(+), 12 deletions(-) create mode 100644 script/hassfest/quality_scale_validation/diagnostics.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 3e8d25c1f32..9d4c236b7b6 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -16,6 +16,7 @@ from .quality_scale_validation import ( RuleValidationProtocol, config_entry_unloading, config_flow, + diagnostics, reauthentication_flow, reconfiguration_flow, ) @@ -69,7 +70,7 @@ ALL_RULES = [ Rule("test-coverage", ScaledQualityScaleTiers.SILVER), # GOLD: [ Rule("devices", ScaledQualityScaleTiers.GOLD), - Rule("diagnostics", ScaledQualityScaleTiers.GOLD), + Rule("diagnostics", ScaledQualityScaleTiers.GOLD, diagnostics), Rule("discovery", ScaledQualityScaleTiers.GOLD), Rule("discovery-update-info", ScaledQualityScaleTiers.GOLD), Rule("docs-data-update", ScaledQualityScaleTiers.GOLD), diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index 63b0117498e..50f42752bf6 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -8,10 +8,11 @@ import ast from script.hassfest.model import Integration -def _has_async_function(module: ast.Module, name: str) -> bool: - """Test if the module defines a function.""" +def _has_unload_entry_function(module: ast.Module) -> bool: + """Test if the module defines `async_unload_entry` function.""" return any( - type(item) is ast.AsyncFunctionDef and item.name == name for item in module.body + type(item) is ast.AsyncFunctionDef and item.name == "async_unload_entry" + for item in module.body ) @@ -21,7 +22,7 @@ def validate(integration: Integration) -> list[str] | None: init_file = integration.path / "__init__.py" init = ast.parse(init_file.read_text()) - if not _has_async_function(init, "async_unload_entry"): + if not _has_unload_entry_function(init): return [ "Integration does not support config entry unloading " "(is missing `async_unload_entry` in __init__.py)" diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py new file mode 100644 index 00000000000..99f067d6500 --- /dev/null +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -0,0 +1,42 @@ +"""Enforce that the integration implements diagnostics. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/diagnostics/ +""" + +import ast + +from script.hassfest.model import Integration + +DIAGNOSTICS_FUNCTIONS = { + "async_get_config_entry_diagnostics", + "async_get_device_diagnostics", +} + + +def _has_diagnostics_function(module: ast.Module) -> bool: + """Test if the module defines at least one of diagnostic functions.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name in DIAGNOSTICS_FUNCTIONS + for item in ast.walk(module) + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration implements diagnostics.""" + + diagnostics_file = integration.path / "diagnostics.py" + if not diagnostics_file.exists(): + return [ + "Integration does implement diagnostics platform " + "(is missing diagnostics.py)", + ] + + diagnostics = ast.parse(diagnostics_file.read_text()) + + if not _has_diagnostics_function(diagnostics): + return [ + f"Integration is missing one of {DIAGNOSTICS_FUNCTIONS} " + f"in {diagnostics_file}" + ] + + return None diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py index d4bc8ed6e96..311f8a2429d 100644 --- a/script/hassfest/quality_scale_validation/reauthentication_flow.py +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -8,10 +8,10 @@ import ast from script.hassfest.model import Integration -def _has_async_function(module: ast.Module, name: str) -> bool: - """Test if the module defines a function.""" +def _has_step_reauth_function(module: ast.Module) -> bool: + """Test if the module defines `async_step_reauth` function.""" return any( - type(item) is ast.AsyncFunctionDef and item.name == name + type(item) is ast.AsyncFunctionDef and item.name == "async_step_reauth" for item in ast.walk(module) ) @@ -22,7 +22,7 @@ def validate(integration: Integration) -> list[str] | None: config_flow_file = integration.path / "config_flow.py" config_flow = ast.parse(config_flow_file.read_text()) - if not _has_async_function(config_flow, "async_step_reauth"): + if not _has_step_reauth_function(config_flow): return [ "Integration does not support a reauthentication flow " f"(is missing `async_step_reauth` in {config_flow_file})" diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py index 94547e95625..de3b5dcba62 100644 --- a/script/hassfest/quality_scale_validation/reconfiguration_flow.py +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -8,10 +8,10 @@ import ast from script.hassfest.model import Integration -def _has_async_function(module: ast.Module, name: str) -> bool: +def _has_step_reconfigure_function(module: ast.Module) -> bool: """Test if the module defines a function.""" return any( - type(item) is ast.AsyncFunctionDef and item.name == name + type(item) is ast.AsyncFunctionDef and item.name == "async_step_reconfigure" for item in ast.walk(module) ) @@ -22,7 +22,7 @@ def validate(integration: Integration) -> list[str] | None: config_flow_file = integration.path / "config_flow.py" config_flow = ast.parse(config_flow_file.read_text()) - if not _has_async_function(config_flow, "async_step_reconfigure"): + if not _has_step_reconfigure_function(config_flow): return [ "Integration does not support a reconfiguration flow " f"(is missing `async_step_reconfigure` in {config_flow_file})" From 0b36a6d7f3152ebf37a5e2c3c17eb55eb65e2ac1 Mon Sep 17 00:00:00 2001 From: Madhan Date: Thu, 28 Nov 2024 18:48:38 +0000 Subject: [PATCH 0060/1198] Bump PyMetEireann to 2024.11.0 (#131860) Co-authored-by: Joostlek --- homeassistant/components/met_eireann/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/met_eireann/manifest.json b/homeassistant/components/met_eireann/manifest.json index 72afc6977dd..7b913df4d3c 100644 --- a/homeassistant/components/met_eireann/manifest.json +++ b/homeassistant/components/met_eireann/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/met_eireann", "iot_class": "cloud_polling", "loggers": ["meteireann"], - "requirements": ["PyMetEireann==2021.8.0"] + "requirements": ["PyMetEireann==2024.11.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 535a5bbc34b..a089fbc8b07 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -60,7 +60,7 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 077847d260b..3a2cfb9d620 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -57,7 +57,7 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air From 8feb6c7e061af9dedbfb6c8e1b340bd2ac855ddc Mon Sep 17 00:00:00 2001 From: rd-blue <170836573+rd-blue@users.noreply.github.com> Date: Thu, 28 Nov 2024 19:58:38 +0100 Subject: [PATCH 0061/1198] Correction of prices update time in Tibber integration (with CLA now) (#131861) correction of prices update time --- homeassistant/components/tibber/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index 125dc8eae6f..c1ec7bf2a9e 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -397,7 +397,7 @@ class TibberSensorElPrice(TibberSensor): if ( not self._tibber_home.last_data_timestamp or (self._tibber_home.last_data_timestamp - now).total_seconds() - < 11 * 3600 + self._spread_load_constant + < 10 * 3600 - self._spread_load_constant or not self.available ): _LOGGER.debug("Asking for new data") From 4d32fe97c34c7242b55cf0fb03ddee0b0757412c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 28 Nov 2024 11:45:27 -0800 Subject: [PATCH 0062/1198] Use ConfigEntry.runtime_data in Nest (#131871) --- homeassistant/components/nest/__init__.py | 43 ++++++++------------ homeassistant/components/nest/camera.py | 11 ++--- homeassistant/components/nest/climate.py | 12 ++---- homeassistant/components/nest/const.py | 2 - homeassistant/components/nest/device_info.py | 17 ++++---- homeassistant/components/nest/diagnostics.py | 40 +++++------------- homeassistant/components/nest/event.py | 12 ++---- homeassistant/components/nest/sensor.py | 11 ++--- homeassistant/components/nest/types.py | 19 +++++++++ 9 files changed, 66 insertions(+), 101 deletions(-) create mode 100644 homeassistant/components/nest/types.py diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index e89969cbe16..0bd2891914f 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -59,9 +59,7 @@ from .const import ( CONF_SUBSCRIBER_ID, CONF_SUBSCRIBER_ID_IMPORTED, CONF_SUBSCRIPTION_NAME, - DATA_DEVICE_MANAGER, DATA_SDM, - DATA_SUBSCRIBER, DOMAIN, ) from .events import EVENT_NAME_MAP, NEST_EVENT @@ -72,6 +70,7 @@ from .media_source import ( async_get_media_source_devices, async_get_transcoder, ) +from .types import NestConfigEntry, NestData _LOGGER = logging.getLogger(__name__) @@ -113,11 +112,8 @@ THUMBNAIL_SIZE_PX = 175 async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Nest components with dispatch between old/new flows.""" - hass.data[DOMAIN] = {} - hass.http.register_view(NestEventMediaView(hass)) hass.http.register_view(NestEventMediaThumbnailView(hass)) - return True @@ -128,12 +124,12 @@ class SignalUpdateCallback: self, hass: HomeAssistant, config_reload_cb: Callable[[], Awaitable[None]], - config_entry_id: str, + config_entry: NestConfigEntry, ) -> None: """Initialize EventCallback.""" self._hass = hass self._config_reload_cb = config_reload_cb - self._config_entry_id = config_entry_id + self._config_entry = config_entry async def async_handle_event(self, event_message: EventMessage) -> None: """Process an incoming EventMessage.""" @@ -181,17 +177,17 @@ class SignalUpdateCallback: message["zones"] = image_event.zones self._hass.bus.async_fire(NEST_EVENT, message) - def _supported_traits(self, device_id: str) -> list[TraitType]: - if not ( - device_manager := self._hass.data[DOMAIN] - .get(self._config_entry_id, {}) - .get(DATA_DEVICE_MANAGER) - ) or not (device := device_manager.devices.get(device_id)): + def _supported_traits(self, device_id: str) -> list[str]: + if ( + not self._config_entry.runtime_data + or not (device_manager := self._config_entry.runtime_data.device_manager) + or not (device := device_manager.devices.get(device_id)) + ): return [] return list(device.traits) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool: """Set up Nest from a config entry with dispatch between old/new flows.""" if DATA_SDM not in entry.data: hass.async_create_task(hass.config_entries.async_remove(entry.entry_id)) @@ -215,7 +211,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_config_reload() -> None: await hass.config_entries.async_reload(entry.entry_id) - update_callback = SignalUpdateCallback(hass, async_config_reload, entry.entry_id) + update_callback = SignalUpdateCallback(hass, async_config_reload, entry) subscriber.set_update_callback(update_callback.async_handle_event) try: await subscriber.start_async() @@ -245,11 +241,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) ) - - hass.data[DOMAIN][entry.entry_id] = { - DATA_SUBSCRIBER: subscriber, - DATA_DEVICE_MANAGER: device_manager, - } + entry.runtime_data = NestData( + subscriber=subscriber, + device_manager=device_manager, + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -262,13 +257,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Legacy API return True _LOGGER.debug("Stopping nest subscriber") - subscriber = hass.data[DOMAIN][entry.entry_id][DATA_SUBSCRIBER] + subscriber = entry.runtime_data.subscriber subscriber.stop_async() - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: diff --git a/homeassistant/components/nest/camera.py b/homeassistant/components/nest/camera.py index b7e0f210741..df02f17444f 100644 --- a/homeassistant/components/nest/camera.py +++ b/homeassistant/components/nest/camera.py @@ -17,7 +17,6 @@ from google_nest_sdm.camera_traits import ( WebRtcStream, ) from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.exceptions import ApiException from webrtc_models import RTCIceCandidateInit @@ -29,15 +28,14 @@ from homeassistant.components.camera import ( WebRTCSendMessage, ) from homeassistant.components.stream import CONF_EXTRA_PART_WAIT_TIME -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util.dt import utcnow -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -53,15 +51,12 @@ BACKOFF_MULTIPLIER = 1.5 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the cameras.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] entities: list[NestCameraBaseEntity] = [] - for device in device_manager.devices.values(): + for device in entry.runtime_data.device_manager.devices.values(): if (live_stream := device.traits.get(CameraLiveStreamTrait.NAME)) is None: continue if StreamingProtocol.WEB_RTC in live_stream.supported_protocols: diff --git a/homeassistant/components/nest/climate.py b/homeassistant/components/nest/climate.py index 03fb641d0e5..1e2727bfab7 100644 --- a/homeassistant/components/nest/climate.py +++ b/homeassistant/components/nest/climate.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any, cast from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import FanTrait, TemperatureTrait from google_nest_sdm.exceptions import ApiException from google_nest_sdm.thermostat_traits import ( @@ -28,14 +27,13 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry # Mapping for sdm.devices.traits.ThermostatMode mode field THERMOSTAT_MODE_MAP: dict[str, HVACMode] = { @@ -78,17 +76,13 @@ MIN_TEMP_RANGE = 1.66667 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the client entities.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] - async_add_entities( ThermostatEntity(device) - for device in device_manager.devices.values() + for device in entry.runtime_data.device_manager.devices.values() if ThermostatHvacTrait.NAME in device.traits ) diff --git a/homeassistant/components/nest/const.py b/homeassistant/components/nest/const.py index 0a828dcbf78..9950d1d5c2a 100644 --- a/homeassistant/components/nest/const.py +++ b/homeassistant/components/nest/const.py @@ -2,8 +2,6 @@ DOMAIN = "nest" DATA_SDM = "sdm" -DATA_SUBSCRIBER = "subscriber" -DATA_DEVICE_MANAGER = "device_manager" WEB_AUTH_DOMAIN = DOMAIN INSTALLED_AUTH_DOMAIN = f"{DOMAIN}.installed" diff --git a/homeassistant/components/nest/device_info.py b/homeassistant/components/nest/device_info.py index 33793fe836b..facd429b139 100644 --- a/homeassistant/components/nest/device_info.py +++ b/homeassistant/components/nest/device_info.py @@ -7,11 +7,12 @@ from collections.abc import Mapping from google_nest_sdm.device import Device from google_nest_sdm.device_traits import ConnectivityTrait, InfoTrait +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from .const import CONNECTIVITY_TRAIT_OFFLINE, DATA_DEVICE_MANAGER, DOMAIN +from .const import CONNECTIVITY_TRAIT_OFFLINE, DOMAIN DEVICE_TYPE_MAP: dict[str, str] = { "sdm.devices.types.CAMERA": "Camera", @@ -81,14 +82,12 @@ class NestDeviceInfo: @callback def async_nest_devices(hass: HomeAssistant) -> Mapping[str, Device]: """Return a mapping of all nest devices for all config entries.""" - devices = {} - for entry_id in hass.data[DOMAIN]: - if not (device_manager := hass.data[DOMAIN][entry_id].get(DATA_DEVICE_MANAGER)): - continue - devices.update( - {device.name: device for device in device_manager.devices.values()} - ) - return devices + return { + device.name: device + for config_entry in hass.config_entries.async_entries(DOMAIN) + if config_entry.state == ConfigEntryState.LOADED + for device in config_entry.runtime_data.device_manager.devices.values() + } @callback diff --git a/homeassistant/components/nest/diagnostics.py b/homeassistant/components/nest/diagnostics.py index 57ce4291cc6..345e15b0593 100644 --- a/homeassistant/components/nest/diagnostics.py +++ b/homeassistant/components/nest/diagnostics.py @@ -5,46 +5,26 @@ from __future__ import annotations from typing import Any from google_nest_sdm import diagnostics -from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import InfoTrait from homeassistant.components.camera import diagnostics as camera_diagnostics -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from .const import DATA_DEVICE_MANAGER, DATA_SDM, DOMAIN +from .types import NestConfigEntry REDACT_DEVICE_TRAITS = {InfoTrait.NAME} -@callback -def _async_get_nest_devices( - hass: HomeAssistant, config_entry: ConfigEntry -) -> dict[str, Device]: - """Return dict of available devices.""" - if DATA_SDM not in config_entry.data: - return {} - - if ( - config_entry.entry_id not in hass.data[DOMAIN] - or DATA_DEVICE_MANAGER not in hass.data[DOMAIN][config_entry.entry_id] - ): - return {} - - device_manager: DeviceManager = hass.data[DOMAIN][config_entry.entry_id][ - DATA_DEVICE_MANAGER - ] - return device_manager.devices - - async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: NestConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - nest_devices = _async_get_nest_devices(hass, config_entry) - if not nest_devices: + if ( + not hasattr(config_entry, "runtime_data") + or not config_entry.runtime_data + or not (nest_devices := config_entry.runtime_data.device_manager.devices) + ): return {} data: dict[str, Any] = { **diagnostics.get_diagnostics(), @@ -62,11 +42,11 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: NestConfigEntry, device: DeviceEntry, ) -> dict[str, Any]: """Return diagnostics for a device.""" - nest_devices = _async_get_nest_devices(hass, config_entry) + nest_devices = config_entry.runtime_data.device_manager.devices nest_device_id = next(iter(device.identifiers))[1] nest_device = nest_devices.get(nest_device_id) return nest_device.get_diagnostics() if nest_device else {} diff --git a/homeassistant/components/nest/event.py b/homeassistant/components/nest/event.py index a6d70fe86d5..1a2c0317496 100644 --- a/homeassistant/components/nest/event.py +++ b/homeassistant/components/nest/event.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import logging from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.event import EventMessage, EventType from google_nest_sdm.traits import TraitType @@ -13,11 +12,9 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo from .events import ( EVENT_CAMERA_MOTION, @@ -26,6 +23,7 @@ from .events import ( EVENT_DOORBELL_CHIME, EVENT_NAME_MAP, ) +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -68,16 +66,12 @@ ENTITY_DESCRIPTIONS = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" - - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] async_add_entities( NestTraitEventEntity(desc, device) - for device in device_manager.devices.values() + for device in entry.runtime_data.device_manager.devices.values() for desc in ENTITY_DESCRIPTIONS if any(trait in device.traits for trait in desc.trait_types) ) diff --git a/homeassistant/components/nest/sensor.py b/homeassistant/components/nest/sensor.py index edd359619fd..02a0e305813 100644 --- a/homeassistant/components/nest/sensor.py +++ b/homeassistant/components/nest/sensor.py @@ -5,7 +5,6 @@ from __future__ import annotations import logging from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import HumidityTrait, TemperatureTrait from homeassistant.components.sensor import ( @@ -13,13 +12,12 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -33,15 +31,12 @@ DEVICE_TYPE_MAP = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] entities: list[SensorEntity] = [] - for device in device_manager.devices.values(): + for device in entry.runtime_data.device_manager.devices.values(): if TemperatureTrait.NAME in device.traits: entities.append(TemperatureSensor(device)) if HumidityTrait.NAME in device.traits: diff --git a/homeassistant/components/nest/types.py b/homeassistant/components/nest/types.py new file mode 100644 index 00000000000..bd6cd5cd887 --- /dev/null +++ b/homeassistant/components/nest/types.py @@ -0,0 +1,19 @@ +"""Type definitions for Nest.""" + +from dataclasses import dataclass + +from google_nest_sdm.device_manager import DeviceManager +from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber + +from homeassistant.config_entries import ConfigEntry + + +@dataclass +class NestData: + """Data for the Nest integration.""" + + subscriber: GoogleNestSubscriber + device_manager: DeviceManager + + +type NestConfigEntry = ConfigEntry[NestData] From 1f9ecfe8390e1b3382b5c095d05eb5ca20ca5c9f Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 28 Nov 2024 20:49:49 +0100 Subject: [PATCH 0063/1198] Remove deprecated sensor constants (#131843) --- homeassistant/components/sensor/__init__.py | 19 ---------------- homeassistant/components/sensor/const.py | 25 --------------------- tests/components/sensor/test_init.py | 25 --------------------- 3 files changed, 69 deletions(-) diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 3c92506a45e..6b264efdd46 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -8,7 +8,6 @@ from contextlib import suppress from dataclasses import dataclass from datetime import UTC, date, datetime, timedelta from decimal import Decimal, InvalidOperation as DecimalInvalidOperation -from functools import partial import logging from math import ceil, floor, isfinite, log10 from typing import Any, Final, Self, cast, final, override @@ -25,11 +24,6 @@ from homeassistant.const import ( # noqa: F401 from homeassistant.core import HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -40,9 +34,6 @@ from homeassistant.util.enum import try_parse_enum from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 - _DEPRECATED_STATE_CLASS_MEASUREMENT, - _DEPRECATED_STATE_CLASS_TOTAL, - _DEPRECATED_STATE_CLASS_TOTAL_INCREASING, ATTR_LAST_RESET, ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -962,13 +953,3 @@ def async_rounded_state(hass: HomeAssistant, entity_id: str, state: State) -> st value = f"{numerical_value:z.{precision}f}" return value - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 87012c3631a..4d0454cbff3 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -3,7 +3,6 @@ from __future__ import annotations from enum import StrEnum -from functools import partial from typing import Final import voluptuous as vol @@ -41,12 +40,6 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, UnitOfVolumetricFlux, ) -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.unit_conversion import ( AreaConverter, BaseUnitConverter, @@ -494,17 +487,6 @@ class SensorStateClass(StrEnum): STATE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.Coerce(SensorStateClass)) -# STATE_CLASS* is deprecated as of 2021.12 -# use the SensorStateClass enum instead. -_DEPRECATED_STATE_CLASS_MEASUREMENT: Final = DeprecatedConstantEnum( - SensorStateClass.MEASUREMENT, "2025.1" -) -_DEPRECATED_STATE_CLASS_TOTAL: Final = DeprecatedConstantEnum( - SensorStateClass.TOTAL, "2025.1" -) -_DEPRECATED_STATE_CLASS_TOTAL_INCREASING: Final = DeprecatedConstantEnum( - SensorStateClass.TOTAL_INCREASING, "2025.1" -) STATE_CLASSES: Final[list[str]] = [cls.value for cls in SensorStateClass] UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = { @@ -683,10 +665,3 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { }, SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT}, } - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 6c2d73cb68c..44ad076807c 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Generator from datetime import UTC, date, datetime from decimal import Decimal -from types import ModuleType from typing import Any from unittest.mock import patch @@ -60,8 +59,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_restore_state_shutdown_restart, - help_test_all, - import_and_test_deprecated_constant_enum, mock_config_flow, mock_integration, mock_platform, @@ -2643,28 +2640,6 @@ async def test_entity_category_config_raises_error( assert not hass.states.get("sensor.test") -@pytest.mark.parametrize( - "module", - [sensor, sensor.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(sensor.SensorStateClass)) -@pytest.mark.parametrize(("module"), [sensor, sensor.const]) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: sensor.SensorStateClass, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "STATE_CLASS_", "2025.1" - ) - - @pytest.mark.parametrize( ("device_class", "native_unit"), [ From 18db16b82ccd10113d620f409652dd1dc4a41193 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 28 Nov 2024 20:50:53 +0100 Subject: [PATCH 0064/1198] Update frontend to 20241127.1 (#131855) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 3063d3d8440..7bd500f17ea 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.0"] + "requirements": ["home-assistant-frontend==20241127.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 691d80f31bf..cb3f51476c8 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 hassil==2.0.4 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.0 +home-assistant-frontend==20241127.1 home-assistant-intents==2024.11.27 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index a089fbc8b07..0226fa8d924 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.0 +home-assistant-frontend==20241127.1 # homeassistant.components.conversation home-assistant-intents==2024.11.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a2cfb9d620..ac180f8c650 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.0 +home-assistant-frontend==20241127.1 # homeassistant.components.conversation home-assistant-intents==2024.11.27 From 889ac1552b3688481181cdd28b6cfe5b4b245e06 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Thu, 28 Nov 2024 11:51:23 -0800 Subject: [PATCH 0065/1198] Fix flaky test in history stats (#131869) --- tests/components/history_stats/test_sensor.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 694c5c20707..d60203676e6 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -459,7 +459,11 @@ async def test_async_on_entire_period( def _fake_states(*args, **kwargs): return { "binary_sensor.test_on_id": [ - ha.State("binary_sensor.test_on_id", "on", last_changed=start_time), + ha.State( + "binary_sensor.test_on_id", + "on", + last_changed=(start_time - timedelta(seconds=10)), + ), ha.State("binary_sensor.test_on_id", "on", last_changed=t0), ha.State("binary_sensor.test_on_id", "on", last_changed=t1), ha.State("binary_sensor.test_on_id", "on", last_changed=t2), From 9db6f0ffc483e7fb3508c04704dc37552127d8b2 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 28 Nov 2024 20:52:51 +0100 Subject: [PATCH 0066/1198] Only download translation strings we have defined (#131864) --- script/translations/deduplicate.py | 3 +-- script/translations/develop.py | 25 +------------------- script/translations/download.py | 37 +++++++++++++++++++++++++++++- script/translations/util.py | 23 +++++++++++++++++++ 4 files changed, 61 insertions(+), 27 deletions(-) diff --git a/script/translations/deduplicate.py b/script/translations/deduplicate.py index 8cc4cee3b10..f92f90115ce 100644 --- a/script/translations/deduplicate.py +++ b/script/translations/deduplicate.py @@ -7,8 +7,7 @@ from pathlib import Path from homeassistant.const import Platform from . import upload -from .develop import flatten_translations -from .util import get_base_arg_parser, load_json_from_path +from .util import flatten_translations, get_base_arg_parser, load_json_from_path def get_arguments() -> argparse.Namespace: diff --git a/script/translations/develop.py b/script/translations/develop.py index 00465e1bc24..9e3a2ded046 100644 --- a/script/translations/develop.py +++ b/script/translations/develop.py @@ -9,7 +9,7 @@ import sys from . import download, upload from .const import INTEGRATIONS_DIR -from .util import get_base_arg_parser +from .util import flatten_translations, get_base_arg_parser def valid_integration(integration): @@ -32,29 +32,6 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def flatten_translations(translations): - """Flatten all translations.""" - stack = [iter(translations.items())] - key_stack = [] - flattened_translations = {} - while stack: - for k, v in stack[-1]: - key_stack.append(k) - if isinstance(v, dict): - stack.append(iter(v.items())) - break - if isinstance(v, str): - common_key = "::".join(key_stack) - flattened_translations[common_key] = v - key_stack.pop() - else: - stack.pop() - if key_stack: - key_stack.pop() - - return flattened_translations - - def substitute_translation_references(integration_strings, flattened_translations): """Recursively processes all translation strings for the integration.""" result = {} diff --git a/script/translations/download.py b/script/translations/download.py index 756de46fb61..3fa7065d058 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -7,10 +7,11 @@ import json from pathlib import Path import re import subprocess +from typing import Any from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp -from .util import get_lokalise_token, load_json_from_path +from .util import flatten_translations, get_lokalise_token, load_json_from_path FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") DOWNLOAD_DIR = Path("build/translations-download").absolute() @@ -103,7 +104,15 @@ def save_language_translations(lang, translations): f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue + if not ( + Path("homeassistant") / "components" / component / "strings.json" + ).exists(): + print( + f"Skipping {lang} for {component}, as the integration doesn't have a strings.json file." + ) + continue path.parent.mkdir(parents=True, exist_ok=True) + base_translations = pick_keys(component, base_translations) save_json(path, base_translations) if "platform" not in component_translations: @@ -131,6 +140,32 @@ def delete_old_translations(): fil.unlink() +def get_current_keys(component: str) -> dict[str, Any]: + """Get the current keys for a component.""" + strings_path = Path("homeassistant") / "components" / component / "strings.json" + return load_json_from_path(strings_path) + + +def pick_keys(component: str, translations: dict[str, Any]) -> dict[str, Any]: + """Pick the keys that are in the current strings.""" + flat_translations = flatten_translations(translations) + flat_current_keys = flatten_translations(get_current_keys(component)) + flatten_result = {} + for key in flat_current_keys: + if key in flat_translations: + flatten_result[key] = flat_translations[key] + result = {} + for key, value in flatten_result.items(): + parts = key.split("::") + d = result + for part in parts[:-1]: + if part not in d: + d[part] = {} + d = d[part] + d[parts[-1]] = value + return result + + def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) diff --git a/script/translations/util.py b/script/translations/util.py index 8892bb46b7a..d78b2c4faff 100644 --- a/script/translations/util.py +++ b/script/translations/util.py @@ -66,3 +66,26 @@ def load_json_from_path(path: pathlib.Path) -> Any: return json.loads(path.read_text()) except json.JSONDecodeError as err: raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err + + +def flatten_translations(translations): + """Flatten all translations.""" + stack = [iter(translations.items())] + key_stack = [] + flattened_translations = {} + while stack: + for k, v in stack[-1]: + key_stack.append(k) + if isinstance(v, dict): + stack.append(iter(v.items())) + break + if isinstance(v, str): + common_key = "::".join(key_stack) + flattened_translations[common_key] = v + key_stack.pop() + else: + stack.pop() + if key_stack: + key_stack.pop() + + return flattened_translations From 6dd93253c6b1f98c87ee3e18c524c4d899abe4aa Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Thu, 28 Nov 2024 21:01:00 +0100 Subject: [PATCH 0067/1198] Add captcha to BMW ConfigFlow (#131351) Co-authored-by: Franck Nijhof --- .../bmw_connected_drive/config_flow.py | 71 ++++++++-- .../components/bmw_connected_drive/const.py | 5 + .../bmw_connected_drive/coordinator.py | 5 - .../bmw_connected_drive/strings.json | 10 ++ .../bmw_connected_drive/__init__.py | 9 +- .../snapshots/test_diagnostics.ambr | 6 +- .../bmw_connected_drive/test_config_flow.py | 121 ++++++++++-------- 7 files changed, 153 insertions(+), 74 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 409bfdca6f1..8831895c71e 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -27,9 +27,18 @@ from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_US from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig +from homeassistant.util.ssl import get_default_context from . import DOMAIN -from .const import CONF_ALLOWED_REGIONS, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN +from .const import ( + CONF_ALLOWED_REGIONS, + CONF_CAPTCHA_REGIONS, + CONF_CAPTCHA_TOKEN, + CONF_CAPTCHA_URL, + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, +) DATA_SCHEMA = vol.Schema( { @@ -41,7 +50,14 @@ DATA_SCHEMA = vol.Schema( translation_key="regions", ) ), - } + }, + extra=vol.REMOVE_EXTRA, +) +CAPTCHA_SCHEMA = vol.Schema( + { + vol.Required(CONF_CAPTCHA_TOKEN): str, + }, + extra=vol.REMOVE_EXTRA, ) @@ -54,6 +70,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, data[CONF_USERNAME], data[CONF_PASSWORD], get_region_from_name(data[CONF_REGION]), + hcaptcha_token=data.get(CONF_CAPTCHA_TOKEN), + verify=get_default_context(), ) try: @@ -79,15 +97,17 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + data: dict[str, Any] = {} + _existing_entry_data: Mapping[str, Any] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} + errors: dict[str, str] = self.data.pop("errors", {}) - if user_input is not None: + if user_input is not None and not errors: unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}" await self.async_set_unique_id(unique_id) @@ -96,22 +116,35 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): else: self._abort_if_unique_id_configured() + # Store user input for later use + self.data.update(user_input) + + # North America and Rest of World require captcha token + if ( + self.data.get(CONF_REGION) in CONF_CAPTCHA_REGIONS + and CONF_CAPTCHA_TOKEN not in self.data + ): + return await self.async_step_captcha() + info = None try: - info = await validate_input(self.hass, user_input) - entry_data = { - **user_input, - CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), - CONF_GCID: info.get(CONF_GCID), - } + info = await validate_input(self.hass, self.data) except MissingCaptcha: errors["base"] = "missing_captcha" except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + finally: + self.data.pop(CONF_CAPTCHA_TOKEN, None) if info: + entry_data = { + **self.data, + CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), + CONF_GCID: info.get(CONF_GCID), + } + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( self._get_reauth_entry(), data=entry_data @@ -128,7 +161,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): schema = self.add_suggested_values_to_schema( DATA_SCHEMA, - self._existing_entry_data, + self._existing_entry_data or self.data, ) return self.async_show_form(step_id="user", data_schema=schema, errors=errors) @@ -147,6 +180,22 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self._existing_entry_data = self._get_reconfigure_entry().data return await self.async_step_user() + async def async_step_captcha( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show captcha form.""" + if user_input and user_input.get(CONF_CAPTCHA_TOKEN): + self.data[CONF_CAPTCHA_TOKEN] = user_input[CONF_CAPTCHA_TOKEN].strip() + return await self.async_step_user(self.data) + + return self.async_show_form( + step_id="captcha", + data_schema=CAPTCHA_SCHEMA, + description_placeholders={ + "captcha_url": CONF_CAPTCHA_URL.format(region=self.data[CONF_REGION]) + }, + ) + @staticmethod @callback def async_get_options_flow( diff --git a/homeassistant/components/bmw_connected_drive/const.py b/homeassistant/components/bmw_connected_drive/const.py index 98d4acbfc91..750289e9d0a 100644 --- a/homeassistant/components/bmw_connected_drive/const.py +++ b/homeassistant/components/bmw_connected_drive/const.py @@ -8,10 +8,15 @@ ATTR_DIRECTION = "direction" ATTR_VIN = "vin" CONF_ALLOWED_REGIONS = ["china", "north_america", "rest_of_world"] +CONF_CAPTCHA_REGIONS = ["north_america", "rest_of_world"] CONF_READ_ONLY = "read_only" CONF_ACCOUNT = "account" CONF_REFRESH_TOKEN = "refresh_token" CONF_GCID = "gcid" +CONF_CAPTCHA_TOKEN = "captcha_token" +CONF_CAPTCHA_URL = ( + "https://bimmer-connected.readthedocs.io/en/stable/captcha/{region}.html" +) DATA_HASS_CONFIG = "hass_config" diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index d38b7ffacc2..4f560d16f9c 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -84,11 +84,6 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): if self.account.refresh_token != old_refresh_token: self._update_config_entry_refresh_token(self.account.refresh_token) - _LOGGER.debug( - "bimmer_connected: refresh token %s > %s", - old_refresh_token, - self.account.refresh_token, - ) def _update_config_entry_refresh_token(self, refresh_token: str | None) -> None: """Update or delete the refresh_token in the Config Entry.""" diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 0e7a4a32ef4..8078971acd1 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -7,6 +7,16 @@ "password": "[%key:common::config_flow::data::password%]", "region": "ConnectedDrive Region" } + }, + "captcha": { + "title": "Are you a robot?", + "description": "A captcha is required for BMW login. Visit the external website to complete the challenge and submit the form. Copy the resulting token into the field below.\n\n{captcha_url}\n\nNo data will be exposed outside of your Home Assistant instance.", + "data": { + "captcha_token": "Captcha token" + }, + "data_description": { + "captcha_token": "One-time token retrieved from the captcha challenge." + } } }, "error": { diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index 4d280a1d0e5..f490b854749 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -9,6 +9,7 @@ import respx from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, @@ -24,8 +25,12 @@ FIXTURE_USER_INPUT = { CONF_PASSWORD: "p4ssw0rd", CONF_REGION: "rest_of_world", } -FIXTURE_REFRESH_TOKEN = "SOME_REFRESH_TOKEN" -FIXTURE_GCID = "SOME_GCID" +FIXTURE_CAPTCHA_INPUT = { + CONF_CAPTCHA_TOKEN: "captcha_token", +} +FIXTURE_USER_INPUT_W_CAPTCHA = FIXTURE_USER_INPUT | FIXTURE_CAPTCHA_INPUT +FIXTURE_REFRESH_TOKEN = "another_token_string" +FIXTURE_GCID = "DUMMY" FIXTURE_CONFIG_ENTRY = { "entry_id": "1", diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 81ef1220069..b87da22a332 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -4833,7 +4833,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -7202,7 +7202,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -8925,7 +8925,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f57f1a304ac..8fa9d9be22b 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -4,17 +4,14 @@ from copy import deepcopy from unittest.mock import patch from bimmer_connected.api.authentication import MyBMWAuthentication -from bimmer_connected.models import ( - MyBMWAPIError, - MyBMWAuthError, - MyBMWCaptchaMissingError, -) +from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError from httpx import RequestError import pytest from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.config_flow import DOMAIN from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) @@ -23,10 +20,12 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import ( + FIXTURE_CAPTCHA_INPUT, FIXTURE_CONFIG_ENTRY, FIXTURE_GCID, FIXTURE_REFRESH_TOKEN, FIXTURE_USER_INPUT, + FIXTURE_USER_INPUT_W_CAPTCHA, ) from tests.common import MockConfigEntry @@ -61,7 +60,7 @@ async def test_authentication_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -79,7 +78,7 @@ async def test_connection_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -97,7 +96,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=deepcopy(FIXTURE_USER_INPUT), + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -105,6 +104,28 @@ async def test_api_error(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_flow_missing_error(hass: HomeAssistant) -> None: + """Test the external flow with captcha failing once and succeeding the second time.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=deepcopy(FIXTURE_USER_INPUT), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_CAPTCHA_TOKEN: " "} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "missing_captcha"} + + async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" with ( @@ -118,14 +139,22 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: return_value=True, ) as mock_setup_entry, ): - result2 = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=deepcopy(FIXTURE_USER_INPUT), ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] - assert result2["data"] == FIXTURE_COMPLETE_ENTRY + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] + assert result["data"] == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 1 @@ -206,13 +235,20 @@ async def test_reauth(hass: HomeAssistant) -> None: assert suggested_values[CONF_PASSWORD] == wrong_password assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_configure( + result["flow_id"], deepcopy(FIXTURE_USER_INPUT) ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 2 @@ -243,13 +279,13 @@ async def test_reauth_unique_id_abort(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {**FIXTURE_USER_INPUT, CONF_REGION: "north_america"} ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "account_mismatch" assert config_entry.data == config_entry_with_wrong_password["data"] @@ -279,13 +315,20 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert suggested_values[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY @@ -307,40 +350,12 @@ async def test_reconfigure_unique_id_abort(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {**FIXTURE_USER_INPUT, CONF_USERNAME: "somebody@email.com"}, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "account_mismatch" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - - -@pytest.mark.usefixtures("bmw_fixture") -async def test_captcha_flow_not_set(hass: HomeAssistant) -> None: - """Test the external flow with captcha failing once and succeeding the second time.""" - - TEST_REGION = "north_america" - - # Start flow and open form - # Start flow and open form - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Add login data - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication._login_row_na", - side_effect=MyBMWCaptchaMissingError( - "Missing hCaptcha token for North America login" - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={**FIXTURE_USER_INPUT, CONF_REGION: TEST_REGION}, - ) - assert result["errors"]["base"] == "missing_captcha" From 8b467268df7ddb8bc581babe669cd5a2bf473fad Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Thu, 28 Nov 2024 15:09:01 -0500 Subject: [PATCH 0068/1198] Add data descriptions to Nice G.O. config flow (#131865) * Add data descriptions to Nice G.O. config flow * Reference other strings instead --- homeassistant/components/nice_go/strings.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json index 07dabf7d39f..224996e6408 100644 --- a/homeassistant/components/nice_go/strings.json +++ b/homeassistant/components/nice_go/strings.json @@ -6,12 +6,20 @@ "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::nice_go::config::step::user::data_description::email%]", + "password": "[%key:component::nice_go::config::step::user::data_description::password%]" } }, "user": { "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "The email address used to log in to the Nice G.O. app", + "password": "The password used to log in to the Nice G.O. app" } } }, From d596b4169dc909b709fa1047f0a7c866a1e6011c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 28 Nov 2024 22:05:34 +0100 Subject: [PATCH 0069/1198] Add strict_typing rule to quality_scale hassfest validation (#131877) * Add strict_typing rule to quality_scale hassfest validation * Add acaia to .strict-typing --- .strict-typing | 1 + mypy.ini | 10 ++++++ script/hassfest/quality_scale.py | 3 +- .../quality_scale_validation/strict_typing.py | 35 +++++++++++++++++++ 4 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/strict_typing.py diff --git a/.strict-typing b/.strict-typing index f1383fa3528..ed698c26ea0 100644 --- a/.strict-typing +++ b/.strict-typing @@ -41,6 +41,7 @@ homeassistant.util.unit_system # --- Add components below this line --- homeassistant.components homeassistant.components.abode.* +homeassistant.components.acaia.* homeassistant.components.accuweather.* homeassistant.components.acer_projector.* homeassistant.components.acmeda.* diff --git a/mypy.ini b/mypy.ini index aa9a2b81095..22e85244843 100644 --- a/mypy.ini +++ b/mypy.ini @@ -165,6 +165,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.acaia.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.accuweather.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 9d4c236b7b6..1a665df19f5 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -19,6 +19,7 @@ from .quality_scale_validation import ( diagnostics, reauthentication_flow, reconfiguration_flow, + strict_typing, ) QUALITY_SCALE_TIERS = {value.name.lower(): value for value in ScaledQualityScaleTiers} @@ -93,7 +94,7 @@ ALL_RULES = [ # PLATINUM Rule("async-dependency", ScaledQualityScaleTiers.PLATINUM), Rule("inject-websession", ScaledQualityScaleTiers.PLATINUM), - Rule("strict-typing", ScaledQualityScaleTiers.PLATINUM), + Rule("strict-typing", ScaledQualityScaleTiers.PLATINUM, strict_typing), ] SCALE_RULES = { diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py new file mode 100644 index 00000000000..285746a9eb6 --- /dev/null +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -0,0 +1,35 @@ +"""Enforce that the integration has strict typing enabled. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/strict-typing/ +""" + +from functools import lru_cache +from pathlib import Path +import re + +from script.hassfest.model import Integration + +_STRICT_TYPING_FILE = Path(".strict-typing") +_COMPONENT_REGEX = r"homeassistant.components.([^.]+).*" + + +@lru_cache +def _strict_typing_components() -> set[str]: + return set( + { + match.group(1) + for line in _STRICT_TYPING_FILE.read_text(encoding="utf-8").splitlines() + if (match := re.match(_COMPONENT_REGEX, line)) is not None + } + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration has strict typing enabled.""" + + if integration.domain not in _strict_typing_components(): + return [ + "Integration does not have strict typing enabled " + "(is missing from .strict-typing)" + ] + return None From a68cf21179758226f3be3eff3b5c2424dde1d36b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 29 Nov 2024 00:37:26 +0100 Subject: [PATCH 0070/1198] Remove deprecated data entry flow constants (#131800) * Remove deprecated data entry flow constants * Fix * Fix * Fix * Fix --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- .../yamaha_musiccast/config_flow.py | 15 +++----- homeassistant/data_entry_flow.py | 36 +------------------ .../azure_data_explorer/test_config_flow.py | 8 ++--- tests/test_data_entry_flow.py | 22 +----------- 4 files changed, 11 insertions(+), 70 deletions(-) diff --git a/homeassistant/components/yamaha_musiccast/config_flow.py b/homeassistant/components/yamaha_musiccast/config_flow.py index a074f34c782..d6ad54c4a3d 100644 --- a/homeassistant/components/yamaha_musiccast/config_flow.py +++ b/homeassistant/components/yamaha_musiccast/config_flow.py @@ -10,9 +10,8 @@ from aiohttp import ClientConnectorError from aiomusiccast import MusicCastConnectionException, MusicCastDevice import voluptuous as vol -from homeassistant import data_entry_flow from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -33,7 +32,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None - ) -> data_entry_flow.ConfigFlowResult: + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" # Request user input, unless we are preparing discovery flow if user_input is None: @@ -73,9 +72,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): return self._show_setup_form(errors) - def _show_setup_form( - self, errors: dict | None = None - ) -> data_entry_flow.ConfigFlowResult: + def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult: """Show the setup form to the user.""" return self.async_show_form( step_id="user", @@ -85,7 +82,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_ssdp( self, discovery_info: ssdp.SsdpServiceInfo - ) -> data_entry_flow.ConfigFlowResult: + ) -> ConfigFlowResult: """Handle ssdp discoveries.""" if not await MusicCastDevice.check_yamaha_ssdp( discovery_info.ssdp_location, async_get_clientsession(self.hass) @@ -117,9 +114,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_confirm() - async def async_step_confirm( - self, user_input=None - ) -> data_entry_flow.ConfigFlowResult: + async def async_step_confirm(self, user_input=None) -> ConfigFlowResult: """Allow the user to confirm adding the device.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 338b5f3992f..6df77443e7e 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -10,7 +10,6 @@ from contextlib import suppress import copy from dataclasses import dataclass from enum import StrEnum -from functools import partial import logging from types import MappingProxyType from typing import Any, Generic, Required, TypedDict, cast @@ -20,12 +19,6 @@ import voluptuous as vol from .core import HomeAssistant, callback from .exceptions import HomeAssistantError -from .helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from .helpers.frame import ReportBehavior, report_usage from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -46,26 +39,6 @@ class FlowResultType(StrEnum): MENU = "menu" -# RESULT_TYPE_* is deprecated, to be removed in 2025.1 -_DEPRECATED_RESULT_TYPE_FORM = DeprecatedConstantEnum(FlowResultType.FORM, "2025.1") -_DEPRECATED_RESULT_TYPE_CREATE_ENTRY = DeprecatedConstantEnum( - FlowResultType.CREATE_ENTRY, "2025.1" -) -_DEPRECATED_RESULT_TYPE_ABORT = DeprecatedConstantEnum(FlowResultType.ABORT, "2025.1") -_DEPRECATED_RESULT_TYPE_EXTERNAL_STEP = DeprecatedConstantEnum( - FlowResultType.EXTERNAL_STEP, "2025.1" -) -_DEPRECATED_RESULT_TYPE_EXTERNAL_STEP_DONE = DeprecatedConstantEnum( - FlowResultType.EXTERNAL_STEP_DONE, "2025.1" -) -_DEPRECATED_RESULT_TYPE_SHOW_PROGRESS = DeprecatedConstantEnum( - FlowResultType.SHOW_PROGRESS, "2025.1" -) -_DEPRECATED_RESULT_TYPE_SHOW_PROGRESS_DONE = DeprecatedConstantEnum( - FlowResultType.SHOW_PROGRESS_DONE, "2025.1" -) -_DEPRECATED_RESULT_TYPE_MENU = DeprecatedConstantEnum(FlowResultType.MENU, "2025.1") - # Event that is fired when a flow is progressed via external or progress source. EVENT_DATA_ENTRY_FLOW_PROGRESSED = "data_entry_flow_progressed" @@ -126,6 +99,7 @@ class InvalidData(vol.Invalid): schema_errors: dict[str, Any], **kwargs: Any, ) -> None: + """Initialize an invalid data exception.""" super().__init__(message, path, error_message, **kwargs) self.schema_errors = schema_errors @@ -929,11 +903,3 @@ class section: def __call__(self, value: Any) -> Any: """Validate input.""" return self.schema(value) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/azure_data_explorer/test_config_flow.py b/tests/components/azure_data_explorer/test_config_flow.py index a700299be33..13ff6a8bb13 100644 --- a/tests/components/azure_data_explorer/test_config_flow.py +++ b/tests/components/azure_data_explorer/test_config_flow.py @@ -25,7 +25,7 @@ async def test_config_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> BASE_CONFIG.copy(), ) - assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result2["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY assert result2["title"] == "cluster.region.kusto.windows.net" mock_setup_entry.assert_called_once() @@ -59,12 +59,12 @@ async def test_config_flow_errors( result["flow_id"], BASE_CONFIG.copy(), ) - assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result2["type"] == data_entry_flow.FlowResultType.FORM assert result2["errors"] == {"base": expected} await hass.async_block_till_done() - assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result2["type"] == data_entry_flow.FlowResultType.FORM # Retest error handling if error is corrected and connection is successful @@ -77,4 +77,4 @@ async def test_config_flow_errors( await hass.async_block_till_done() - assert result3["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result3["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 32020ac0d76..74a55cb4989 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -13,11 +13,7 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.util.decorator import Registry -from .common import ( - async_capture_events, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from .common import async_capture_events class MockFlowManager(data_entry_flow.FlowManager): @@ -985,22 +981,6 @@ async def test_find_flows_by_init_data_type(manager: MockFlowManager) -> None: assert len(manager.async_progress()) == 0 -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(data_entry_flow) - - -@pytest.mark.parametrize(("enum"), list(data_entry_flow.FlowResultType)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: data_entry_flow.FlowResultType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, data_entry_flow, enum, "RESULT_TYPE_", "2025.1" - ) - - def test_section_in_serializer() -> None: """Test section with custom_serializer.""" assert cv.custom_serializer( From 5c8fb5ec2ce4d6f044cd42b3bdbc862330e82ef6 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 29 Nov 2024 00:38:05 +0100 Subject: [PATCH 0071/1198] Remove deprecated climate constants (#131798) * Remove deprecated climate constants * Fix * Fix * Fix --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/climate/__init__.py | 29 --------- homeassistant/components/climate/const.py | 57 ----------------- .../components/climate/test_device_trigger.py | 2 +- tests/components/climate/test_init.py | 62 ------------------- 4 files changed, 1 insertion(+), 149 deletions(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index de9c90c81b8..045003dcd0f 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -26,11 +26,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -41,20 +36,6 @@ from homeassistant.util.hass_dict import HassKey from homeassistant.util.unit_conversion import TemperatureConverter from .const import ( # noqa: F401 - _DEPRECATED_HVAC_MODE_AUTO, - _DEPRECATED_HVAC_MODE_COOL, - _DEPRECATED_HVAC_MODE_DRY, - _DEPRECATED_HVAC_MODE_FAN_ONLY, - _DEPRECATED_HVAC_MODE_HEAT, - _DEPRECATED_HVAC_MODE_HEAT_COOL, - _DEPRECATED_HVAC_MODE_OFF, - _DEPRECATED_SUPPORT_AUX_HEAT, - _DEPRECATED_SUPPORT_FAN_MODE, - _DEPRECATED_SUPPORT_PRESET_MODE, - _DEPRECATED_SUPPORT_SWING_MODE, - _DEPRECATED_SUPPORT_TARGET_HUMIDITY, - _DEPRECATED_SUPPORT_TARGET_TEMPERATURE, - _DEPRECATED_SUPPORT_TARGET_TEMPERATURE_RANGE, ATTR_AUX_HEAT, ATTR_CURRENT_HUMIDITY, ATTR_CURRENT_TEMPERATURE, @@ -1082,13 +1063,3 @@ async def async_service_temperature_set( kwargs[value] = temp await entity.async_set_temperature(**kwargs) - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/climate/const.py b/homeassistant/components/climate/const.py index b22d5df93ba..111401a2251 100644 --- a/homeassistant/components/climate/const.py +++ b/homeassistant/components/climate/const.py @@ -1,14 +1,6 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial - -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) class HVACMode(StrEnum): @@ -37,15 +29,6 @@ class HVACMode(StrEnum): FAN_ONLY = "fan_only" -# These HVAC_MODE_* constants are deprecated as of Home Assistant 2022.5. -# Please use the HVACMode enum instead. -_DEPRECATED_HVAC_MODE_OFF = DeprecatedConstantEnum(HVACMode.OFF, "2025.1") -_DEPRECATED_HVAC_MODE_HEAT = DeprecatedConstantEnum(HVACMode.HEAT, "2025.1") -_DEPRECATED_HVAC_MODE_COOL = DeprecatedConstantEnum(HVACMode.COOL, "2025.1") -_DEPRECATED_HVAC_MODE_HEAT_COOL = DeprecatedConstantEnum(HVACMode.HEAT_COOL, "2025.1") -_DEPRECATED_HVAC_MODE_AUTO = DeprecatedConstantEnum(HVACMode.AUTO, "2025.1") -_DEPRECATED_HVAC_MODE_DRY = DeprecatedConstantEnum(HVACMode.DRY, "2025.1") -_DEPRECATED_HVAC_MODE_FAN_ONLY = DeprecatedConstantEnum(HVACMode.FAN_ONLY, "2025.1") HVAC_MODES = [cls.value for cls in HVACMode] # No preset is active @@ -110,14 +93,6 @@ class HVACAction(StrEnum): PREHEATING = "preheating" -# These CURRENT_HVAC_* constants are deprecated as of Home Assistant 2022.5. -# Please use the HVACAction enum instead. -_DEPRECATED_CURRENT_HVAC_OFF = DeprecatedConstantEnum(HVACAction.OFF, "2025.1") -_DEPRECATED_CURRENT_HVAC_HEAT = DeprecatedConstantEnum(HVACAction.HEATING, "2025.1") -_DEPRECATED_CURRENT_HVAC_COOL = DeprecatedConstantEnum(HVACAction.COOLING, "2025.1") -_DEPRECATED_CURRENT_HVAC_DRY = DeprecatedConstantEnum(HVACAction.DRYING, "2025.1") -_DEPRECATED_CURRENT_HVAC_IDLE = DeprecatedConstantEnum(HVACAction.IDLE, "2025.1") -_DEPRECATED_CURRENT_HVAC_FAN = DeprecatedConstantEnum(HVACAction.FAN, "2025.1") CURRENT_HVAC_ACTIONS = [cls.value for cls in HVACAction] @@ -176,35 +151,3 @@ class ClimateEntityFeature(IntFlag): TURN_OFF = 128 TURN_ON = 256 SWING_HORIZONTAL_MODE = 512 - - -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the ClimateEntityFeature enum instead. -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_TEMPERATURE, "2025.1" -) -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE_RANGE = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_TEMPERATURE_RANGE, "2025.1" -) -_DEPRECATED_SUPPORT_TARGET_HUMIDITY = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_HUMIDITY, "2025.1" -) -_DEPRECATED_SUPPORT_FAN_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.FAN_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_PRESET_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.PRESET_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_SWING_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.SWING_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_AUX_HEAT = DeprecatedConstantEnum( - ClimateEntityFeature.AUX_HEAT, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/climate/test_device_trigger.py b/tests/components/climate/test_device_trigger.py index a492d9805b5..4b5a578ecc4 100644 --- a/tests/components/climate/test_device_trigger.py +++ b/tests/components/climate/test_device_trigger.py @@ -48,7 +48,7 @@ async def test_get_triggers( ) hass.states.async_set( entity_entry.entity_id, - const.HVAC_MODE_COOL, + HVACMode.COOL, { const.ATTR_HVAC_ACTION: HVACAction.IDLE, const.ATTR_CURRENT_HUMIDITY: 23, diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 254fb26a471..a7f47668612 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -3,14 +3,12 @@ from __future__ import annotations from enum import Enum -from types import ModuleType from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest import voluptuous as vol -from homeassistant.components import climate from homeassistant.components.climate import ( DOMAIN, SET_TEMPERATURE_SCHEMA, @@ -58,9 +56,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_service, - help_test_all, - import_and_test_deprecated_constant, - import_and_test_deprecated_constant_enum, mock_integration, mock_platform, setup_test_component_platform, @@ -213,63 +208,6 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s ] -@pytest.mark.parametrize( - "module", - [climate, climate.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(climate.ClimateEntityFeature, "SUPPORT_") - + _create_tuples(climate.HVACMode, "HVAC_MODE_"), -) -@pytest.mark.parametrize( - "module", - [climate, climate.const], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.1" - ) - - -@pytest.mark.parametrize( - ("enum", "constant_postfix"), - [ - (climate.HVACAction.OFF, "OFF"), - (climate.HVACAction.HEATING, "HEAT"), - (climate.HVACAction.COOLING, "COOL"), - (climate.HVACAction.DRYING, "DRY"), - (climate.HVACAction.IDLE, "IDLE"), - (climate.HVACAction.FAN, "FAN"), - ], -) -def test_deprecated_current_constants( - caplog: pytest.LogCaptureFixture, - enum: climate.HVACAction, - constant_postfix: str, -) -> None: - """Test deprecated current constants.""" - import_and_test_deprecated_constant( - caplog, - climate.const, - "CURRENT_HVAC_" + constant_postfix, - f"{enum.__class__.__name__}.{enum.name}", - enum, - "2025.1", - ) - - async def test_temperature_features_is_valid( hass: HomeAssistant, register_test_integration: MockConfigEntry, From 8e12fbff88da5e783da88e8579f4fc645a64232b Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 29 Nov 2024 03:31:38 +0100 Subject: [PATCH 0072/1198] Refactor calendars in Habitica (#131020) * Refactor calendars * changes --- homeassistant/components/habitica/calendar.py | 159 +++++++----------- 1 file changed, 63 insertions(+), 96 deletions(-) diff --git a/homeassistant/components/habitica/calendar.py b/homeassistant/components/habitica/calendar.py index 6de22a0314a..ff483b71fd8 100644 --- a/homeassistant/components/habitica/calendar.py +++ b/homeassistant/components/habitica/calendar.py @@ -2,6 +2,7 @@ from __future__ import annotations +from abc import abstractmethod from datetime import date, datetime, timedelta from enum import StrEnum @@ -60,6 +61,43 @@ class HabiticaCalendarEntity(HabiticaBase, CalendarEntity): """Initialize calendar entity.""" super().__init__(coordinator, self.entity_description) + @abstractmethod + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Return events.""" + + @property + def event(self) -> CalendarEvent | None: + """Return the current or next upcoming event.""" + + return next(iter(self.get_events(dt_util.now())), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + + return self.get_events(start_date, end_date) + + @property + def start_of_today(self) -> datetime: + """Habitica daystart.""" + return dt_util.start_of_local_day( + datetime.fromisoformat(self.coordinator.data.user["lastCron"]) + ) + + def get_recurrence_dates( + self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None + ) -> list[datetime]: + """Calculate recurrence dates based on start_date and end_date.""" + if end_date: + return recurrences.between( + start_date, end_date - timedelta(days=1), inc=True + ) + # if no end_date is given, return only the next recurrence + return [recurrences.after(start_date, inc=True)] + class HabiticaTodosCalendarEntity(HabiticaCalendarEntity): """Habitica todos calendar entity.""" @@ -69,7 +107,7 @@ class HabiticaTodosCalendarEntity(HabiticaCalendarEntity): translation_key=HabiticaCalendar.TODOS, ) - def dated_todos( + def get_events( self, start_date: datetime, end_date: datetime | None = None ) -> list[CalendarEvent]: """Get all dated todos.""" @@ -112,18 +150,6 @@ class HabiticaTodosCalendarEntity(HabiticaCalendarEntity): ), ) - @property - def event(self) -> CalendarEvent | None: - """Return the current or next upcoming event.""" - - return next(iter(self.dated_todos(dt_util.now())), None) - - async def async_get_events( - self, hass: HomeAssistant, start_date: datetime, end_date: datetime - ) -> list[CalendarEvent]: - """Return calendar events within a datetime range.""" - return self.dated_todos(start_date, end_date) - class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): """Habitica dailies calendar entity.""" @@ -133,13 +159,6 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): translation_key=HabiticaCalendar.DAILIES, ) - @property - def today(self) -> datetime: - """Habitica daystart.""" - return dt_util.start_of_local_day( - datetime.fromisoformat(self.coordinator.data.user["lastCron"]) - ) - def end_date(self, recurrence: datetime, end: datetime | None = None) -> date: """Calculate the end date for a yesterdaily. @@ -152,29 +171,20 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): if end: return recurrence.date() + timedelta(days=1) return ( - dt_util.start_of_local_day() if recurrence == self.today else recurrence + dt_util.start_of_local_day() + if recurrence == self.start_of_today + else recurrence ).date() + timedelta(days=1) - def get_recurrence_dates( - self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None - ) -> list[datetime]: - """Calculate recurrence dates based on start_date and end_date.""" - if end_date: - return recurrences.between( - start_date, end_date - timedelta(days=1), inc=True - ) - # if no end_date is given, return only the next recurrence - return [recurrences.after(self.today, inc=True)] - - def due_dailies( + def get_events( self, start_date: datetime, end_date: datetime | None = None ) -> list[CalendarEvent]: """Get dailies and recurrences for a given period or the next upcoming.""" # we only have dailies for today and future recurrences - if end_date and end_date < self.today: + if end_date and end_date < self.start_of_today: return [] - start_date = max(start_date, self.today) + start_date = max(start_date, self.start_of_today) events = [] for task in self.coordinator.data.tasks: @@ -187,10 +197,12 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): recurrences, start_date, end_date ) for recurrence in recurrence_dates: - is_future_event = recurrence > self.today - is_current_event = recurrence <= self.today and not task["completed"] + is_future_event = recurrence > self.start_of_today + is_current_event = ( + recurrence <= self.start_of_today and not task["completed"] + ) - if not (is_future_event or is_current_event): + if not is_future_event and not is_current_event: continue events.append( @@ -214,20 +226,15 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): @property def event(self) -> CalendarEvent | None: """Return the next upcoming event.""" - return next(iter(self.due_dailies(self.today)), None) - - async def async_get_events( - self, hass: HomeAssistant, start_date: datetime, end_date: datetime - ) -> list[CalendarEvent]: - """Return calendar events within a datetime range.""" - - return self.due_dailies(start_date, end_date) + return next(iter(self.get_events(self.start_of_today)), None) @property def extra_state_attributes(self) -> dict[str, bool | None] | None: """Return entity specific state attributes.""" return { - "yesterdaily": self.event.start < self.today.date() if self.event else None + "yesterdaily": self.event.start < self.start_of_today.date() + if self.event + else None } @@ -239,7 +246,7 @@ class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity): translation_key=HabiticaCalendar.TODO_REMINDERS, ) - def reminders( + def get_events( self, start_date: datetime, end_date: datetime | None = None ) -> list[CalendarEvent]: """Reminders for todos.""" @@ -282,18 +289,6 @@ class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity): key=lambda event: event.start, ) - @property - def event(self) -> CalendarEvent | None: - """Return the next upcoming event.""" - return next(iter(self.reminders(dt_util.now())), None) - - async def async_get_events( - self, hass: HomeAssistant, start_date: datetime, end_date: datetime - ) -> list[CalendarEvent]: - """Return calendar events within a datetime range.""" - - return self.reminders(start_date, end_date) - class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): """Habitica daily reminders calendar entity.""" @@ -321,47 +316,31 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): tzinfo=dt_util.DEFAULT_TIME_ZONE, ) - @property - def today(self) -> datetime: - """Habitica daystart.""" - return dt_util.start_of_local_day( - datetime.fromisoformat(self.coordinator.data.user["lastCron"]) - ) - - def get_recurrence_dates( - self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None - ) -> list[datetime]: - """Calculate recurrence dates based on start_date and end_date.""" - if end_date: - return recurrences.between( - start_date, end_date - timedelta(days=1), inc=True - ) - # if no end_date is given, return only the next recurrence - return [recurrences.after(self.today, inc=True)] - - def reminders( + def get_events( self, start_date: datetime, end_date: datetime | None = None ) -> list[CalendarEvent]: """Reminders for dailies.""" events = [] - if end_date and end_date < self.today: + if end_date and end_date < self.start_of_today: return [] - start_date = max(start_date, self.today) + start_date = max(start_date, self.start_of_today) for task in self.coordinator.data.tasks: if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]): continue recurrences = build_rrule(task) - recurrences_start = self.today + recurrences_start = self.start_of_today recurrence_dates = self.get_recurrence_dates( recurrences, recurrences_start, end_date ) for recurrence in recurrence_dates: - is_future_event = recurrence > self.today - is_current_event = recurrence <= self.today and not task["completed"] + is_future_event = recurrence > self.start_of_today + is_current_event = ( + recurrence <= self.start_of_today and not task["completed"] + ) if not is_future_event and not is_current_event: continue @@ -388,15 +367,3 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): events, key=lambda event: event.start, ) - - @property - def event(self) -> CalendarEvent | None: - """Return the next upcoming event.""" - return next(iter(self.reminders(dt_util.now())), None) - - async def async_get_events( - self, hass: HomeAssistant, start_date: datetime, end_date: datetime - ) -> list[CalendarEvent]: - """Return calendar events within a datetime range.""" - - return self.reminders(start_date, end_date) From 24f7bae5f260c7b4417e0b21d4f742ca916b2349 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 29 Nov 2024 03:32:01 +0100 Subject: [PATCH 0073/1198] Add documentation URL to quality_scale hassfest validation (#131879) * Add documentation URL to quality_scale hassfest validation * Adjust --- script/hassfest/quality_scale.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 1a665df19f5..543bf616952 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -104,6 +104,12 @@ SCALE_RULES = { VALIDATORS = {rule.name: rule.validator for rule in ALL_RULES if rule.validator} +RULE_URL = ( + "Please check the documentation at " + "https://developers.home-assistant.io/docs/core/" + "integration-quality-scale/rules/{rule_name}/" +) + INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "abode", "accuweather", @@ -1367,6 +1373,7 @@ def validate_iqs_file(config: Config, integration: Integration) -> None: ): for error in errors: integration.add_error("quality_scale", f"[{rule_name}] {error}") + integration.add_error("quality_scale", RULE_URL.format(rule_name=rule_name)) # An integration must have all the necessary rules for the declared # quality scale, and all the rules below. From 28cfa372483e9eaadebceb19069a33bdb3d1df12 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 29 Nov 2024 05:08:43 +0100 Subject: [PATCH 0074/1198] Add unique_config_entry rule to quality_scale hassfest validation (#131878) * Add unique_config_entry rule to quality_scale hassfest validation * Improve message --- script/hassfest/quality_scale.py | 3 +- .../unique_config_entry.py | 49 +++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/unique_config_entry.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 543bf616952..bb6f0cae7f0 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -20,6 +20,7 @@ from .quality_scale_validation import ( reauthentication_flow, reconfiguration_flow, strict_typing, + unique_config_entry, ) QUALITY_SCALE_TIERS = {value.name.lower(): value for value in ScaledQualityScaleTiers} @@ -53,7 +54,7 @@ ALL_RULES = [ Rule("runtime-data", ScaledQualityScaleTiers.BRONZE), Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE), - Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE), + Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE, unique_config_entry), # SILVER Rule("action-exceptions", ScaledQualityScaleTiers.SILVER), Rule( diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py new file mode 100644 index 00000000000..eaa879bb05e --- /dev/null +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -0,0 +1,49 @@ +"""Enforce that the integration prevents duplicates from being configured. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/unique-config-entry/ +""" + +import ast + +from script.hassfest.model import Integration + + +def _has_method_call(module: ast.Module, name: str) -> bool: + """Test if the module calls a specific method.""" + return any( + type(item.func) is ast.Attribute and item.func.attr == name + for item in ast.walk(module) + if isinstance(item, ast.Call) + ) + + +def _has_abort_entries_match(module: ast.Module) -> bool: + """Test if the module calls `_async_abort_entries_match`.""" + return _has_method_call(module, "_async_abort_entries_match") + + +def _has_abort_unique_id_configured(module: ast.Module) -> bool: + """Test if the module calls defines (and checks for) a unique_id.""" + return _has_method_call(module, "async_set_unique_id") and _has_method_call( + module, "_abort_if_unique_id_configured" + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration prevents duplicate devices.""" + + if integration.manifest.get("single_config_entry"): + return None + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast.parse(config_flow_file.read_text()) + + if not ( + _has_abort_entries_match(config_flow) + or _has_abort_unique_id_configured(config_flow) + ): + return [ + "Integration doesn't prevent the same device or service from being " + f"set up twice in {config_flow_file}" + ] + return None From 954ac0d288d8768836803508a259060c4e22c2d3 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Thu, 28 Nov 2024 23:34:20 -0500 Subject: [PATCH 0075/1198] Ensure Schlage exceptions are translated (#131733) --- homeassistant/components/schlage/coordinator.py | 4 +++- homeassistant/components/schlage/strings.json | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/schlage/coordinator.py b/homeassistant/components/schlage/coordinator.py index 5d525e3c842..b319b21be0c 100644 --- a/homeassistant/components/schlage/coordinator.py +++ b/homeassistant/components/schlage/coordinator.py @@ -56,7 +56,9 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): except NotAuthorizedError as ex: raise ConfigEntryAuthFailed from ex except SchlageError as ex: - raise UpdateFailed("Failed to refresh Schlage data") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="schlage_refresh_failed" + ) from ex lock_data = await asyncio.gather( *( self.hass.async_add_executor_job(self._get_lock_data, lock) diff --git a/homeassistant/components/schlage/strings.json b/homeassistant/components/schlage/strings.json index 5c8cd0826a9..56e72c2d2c0 100644 --- a/homeassistant/components/schlage/strings.json +++ b/homeassistant/components/schlage/strings.json @@ -53,5 +53,10 @@ "name": "1-Touch Locking" } } + }, + "exceptions": { + "schlage_refresh_failed": { + "message": "Failed to refresh Schlage data" + } } } From 0fc365a114f4b5883bc2b6cc0f1e6fd6a3ac008d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 29 Nov 2024 16:06:38 +0100 Subject: [PATCH 0076/1198] Add discovery rule to quality_scale hassfest validation (#131890) --- script/hassfest/quality_scale.py | 3 +- .../quality_scale_validation/discovery.py | 46 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/discovery.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index bb6f0cae7f0..24c98807936 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -17,6 +17,7 @@ from .quality_scale_validation import ( config_entry_unloading, config_flow, diagnostics, + discovery, reauthentication_flow, reconfiguration_flow, strict_typing, @@ -73,7 +74,7 @@ ALL_RULES = [ # GOLD: [ Rule("devices", ScaledQualityScaleTiers.GOLD), Rule("diagnostics", ScaledQualityScaleTiers.GOLD, diagnostics), - Rule("discovery", ScaledQualityScaleTiers.GOLD), + Rule("discovery", ScaledQualityScaleTiers.GOLD, discovery), Rule("discovery-update-info", ScaledQualityScaleTiers.GOLD), Rule("docs-data-update", ScaledQualityScaleTiers.GOLD), Rule("docs-examples", ScaledQualityScaleTiers.GOLD), diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py new file mode 100644 index 00000000000..a4f01ce0269 --- /dev/null +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -0,0 +1,46 @@ +"""Enforce that the integration supports discovery. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/discovery/ +""" + +import ast + +from script.hassfest.model import Integration + +DISCOVERY_FUNCTIONS = [ + "async_step_discovery", + "async_step_bluetooth", + "async_step_hassio", + "async_step_homekit", + "async_step_mqtt", + "async_step_ssdp", + "async_step_zeroconf", + "async_step_dhcp", + "async_step_usb", +] + + +def _has_discovery_function(module: ast.Module) -> bool: + """Test if the module defines at least one of the discovery functions.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name in DISCOVERY_FUNCTIONS + for item in ast.walk(module) + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration implements diagnostics.""" + + config_flow_file = integration.path / "config_flow.py" + if not config_flow_file.exists(): + return ["Integration is missing config_flow.py"] + + config_flow = ast.parse(config_flow_file.read_text()) + + if not _has_discovery_function(config_flow): + return [ + f"Integration is missing one of {DISCOVERY_FUNCTIONS} " + f"in {config_flow_file}" + ] + + return None From 920c958ec7e6a52c6c535113cc495a8d817827a7 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 29 Nov 2024 08:56:26 -0800 Subject: [PATCH 0077/1198] Add runtime_data rule to quality_scale hassfest validation (#131857) * Add quality scale check for runtime_data * Linter fixes * Add developer documentation link * Update script/hassfest/quality_scale_validation/runtime_data.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Update validation to check explicitly for ConfigEntry.runtime_data * Update script/hassfest/quality_scale_validation/runtime_data.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Refine check for setting attributes * Patch with changes from epenet --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- script/hassfest/quality_scale.py | 3 +- .../quality_scale_validation/runtime_data.py | 53 +++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/runtime_data.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 24c98807936..734c6d57faf 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -20,6 +20,7 @@ from .quality_scale_validation import ( discovery, reauthentication_flow, reconfiguration_flow, + runtime_data, strict_typing, unique_config_entry, ) @@ -52,7 +53,7 @@ ALL_RULES = [ Rule("entity-event-setup", ScaledQualityScaleTiers.BRONZE), Rule("entity-unique-id", ScaledQualityScaleTiers.BRONZE), Rule("has-entity-name", ScaledQualityScaleTiers.BRONZE), - Rule("runtime-data", ScaledQualityScaleTiers.BRONZE), + Rule("runtime-data", ScaledQualityScaleTiers.BRONZE, runtime_data), Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE), Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE, unique_config_entry), diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py new file mode 100644 index 00000000000..765db43d1e3 --- /dev/null +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -0,0 +1,53 @@ +"""Enforce that the integration uses ConfigEntry.runtime_data to store runtime data. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/runtime-data +""" + +import ast + +from script.hassfest.model import Integration + + +def _sets_runtime_data( + async_setup_entry_function: ast.AsyncFunctionDef, config_entry_argument: ast.arg +) -> bool: + """Check that `entry.runtime` gets set within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if ( + isinstance(node, ast.Attribute) + and isinstance(node.value, ast.Name) + and node.value.id == config_entry_argument.arg + and node.attr == "runtime_data" + and isinstance(node.ctx, ast.Store) + ): + return True + return False + + +def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None: + """Get async_setup_entry function.""" + for item in module.body: + if isinstance(item, ast.AsyncFunctionDef) and item.name == "async_setup_entry": + return item + return None + + +def validate(integration: Integration) -> list[str] | None: + """Validate correct use of ConfigEntry.runtime_data.""" + init_file = integration.path / "__init__.py" + init = ast.parse(init_file.read_text()) + + # Should not happen, but better to be safe + if not (async_setup_entry := _get_setup_entry_function(init)): + return [f"Could not find `async_setup_entry` in {init_file}"] + if len(async_setup_entry.args.args) != 2: + return [f"async_setup_entry has incorrect signature in {init_file}"] + config_entry_argument = async_setup_entry.args.args[1] + + if not _sets_runtime_data(async_setup_entry, config_entry_argument): + return [ + "Integration does not set entry.runtime_data in async_setup_entry" + f"({init_file})" + ] + + return None From 6144cc26ba80314c46c9784165f4324cbfa039f7 Mon Sep 17 00:00:00 2001 From: Jc2k Date: Fri, 29 Nov 2024 17:29:10 +0000 Subject: [PATCH 0078/1198] Bump aiohomekit to 3.2.7 (#131924) --- homeassistant/components/homekit_controller/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index cddd61a12c1..b7c82b9fd51 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.6"], + "requirements": ["aiohomekit==3.2.7"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 0226fa8d924..a49a7434ef5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -265,7 +265,7 @@ aioharmony==0.2.10 aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ac180f8c650..c98d3525419 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -250,7 +250,7 @@ aioharmony==0.2.10 aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 From c19038ced6b10a0c26c61ef6c5d4b3d9ce39d0ea Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Fri, 29 Nov 2024 19:47:33 +0100 Subject: [PATCH 0079/1198] Bump uiprotect to 6.6.4 (#131931) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 9a76ba6f984..9730c1e3741 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.6.3", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.6.4", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index a49a7434ef5..e63af5cede4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2897,7 +2897,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.3 +uiprotect==6.6.4 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c98d3525419..a0a39296b39 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2310,7 +2310,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.3 +uiprotect==6.6.4 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From dd62fb387e80b2fad7f85a95e95d586d18f446eb Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 29 Nov 2024 20:23:10 +0100 Subject: [PATCH 0080/1198] Bump pynecil to v1.0.1 (#131935) --- homeassistant/components/iron_os/manifest.json | 2 +- homeassistant/components/iron_os/sensor.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/iron_os/snapshots/test_sensor.ambr | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 4ec08a43b61..3141273e3f0 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==0.2.1", "aiogithubapi==24.6.0"] + "requirements": ["pynecil==1.0.1", "aiogithubapi==24.6.0"] } diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index 095ffd254df..680ad9abfde 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -137,7 +137,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( IronOSSensorEntityDescription( key=PinecilSensor.TIP_VOLTAGE, translation_key=PinecilSensor.TIP_VOLTAGE, - native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + native_unit_of_measurement=UnitOfElectricPotential.MICROVOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=3, diff --git a/requirements_all.txt b/requirements_all.txt index e63af5cede4..6e5737e3c77 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2090,7 +2090,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.1 +pynecil==1.0.1 # homeassistant.components.netgear pynetgear==0.10.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0a39296b39..0ff38cdc407 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1686,7 +1686,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.1 +pynecil==1.0.1 # homeassistant.components.netgear pynetgear==0.10.10 diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr index 64cb951dacc..4149d5c9a1d 100644 --- a/tests/components/iron_os/snapshots/test_sensor.ambr +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -513,7 +513,7 @@ 'supported_features': 0, 'translation_key': , 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_voltage', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.pinecil_raw_tip_voltage-state] @@ -522,7 +522,7 @@ 'device_class': 'voltage', 'friendly_name': 'Pinecil Raw tip voltage', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.pinecil_raw_tip_voltage', From 87020e89458b002f8ad9d763fb99abe497ec7047 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 29 Nov 2024 20:23:57 +0100 Subject: [PATCH 0081/1198] Bump ruff to 0.8.1 (#131927) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3a20276c881..9947ee05ad1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.0 + rev: v0.8.1 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 6523c4d0e43..b263373f11d 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.8.0 +ruff==0.8.1 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index e11ffca025d..b6fbbdd1172 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.0 \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.4 home-assistant-intents==2024.11.27 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From aa206c76087715e2eef82c2ef2c9389101018f78 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 29 Nov 2024 20:28:18 +0100 Subject: [PATCH 0082/1198] Use typed ConfigEntry in discovergy (#131891) --- homeassistant/components/discovergy/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 72aa6c19a21..81c33adc052 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -60,11 +60,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) From 1abd2209b3e5b1d06fd40bfa16455d1c2a97d156 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 30 Nov 2024 01:13:52 +0100 Subject: [PATCH 0083/1198] Fix KNX IP Secure tunnelling endpoint selection with keyfile (#131941) --- homeassistant/components/knx/__init__.py | 3 +++ homeassistant/components/knx/const.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 9180e287618..ea654c358e7 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -54,6 +54,7 @@ from .const import ( CONF_KNX_SECURE_USER_PASSWORD, CONF_KNX_STATE_UPDATER, CONF_KNX_TELEGRAM_LOG_SIZE, + CONF_KNX_TUNNEL_ENDPOINT_IA, CONF_KNX_TUNNELING, CONF_KNX_TUNNELING_TCP, CONF_KNX_TUNNELING_TCP_SECURE, @@ -352,6 +353,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], auto_reconnect=True, @@ -364,6 +366,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP_SECURE: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP_SECURE, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], secure_config=SecureConfig( diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 7a9dfc34546..a946ded0359 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -104,7 +104,7 @@ class KNXConfigEntryData(TypedDict, total=False): route_back: bool # not required host: str # only required for tunnelling port: int # only required for tunnelling - tunnel_endpoint_ia: str | None + tunnel_endpoint_ia: str | None # tunnelling only - not required (use get()) # KNX secure user_id: int | None # not required user_password: str | None # not required From 24bd61be3b42f31772e76ebca01bbe2205519026 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sat, 30 Nov 2024 04:10:12 +0100 Subject: [PATCH 0084/1198] Add missing state_class in IronOS (#131928) Add missing state class in IronOS --- homeassistant/components/iron_os/sensor.py | 1 + tests/components/iron_os/snapshots/test_sensor.ambr | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index 680ad9abfde..b21fa2e5591 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -107,6 +107,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=OHM, value_fn=lambda data: data.tip_resistance, entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, ), IronOSSensorEntityDescription( key=PinecilSensor.UPTIME, diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr index 4149d5c9a1d..44a17dd6ea5 100644 --- a/tests/components/iron_os/snapshots/test_sensor.ambr +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -537,7 +537,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -569,6 +571,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Pinecil Tip resistance', + 'state_class': , 'unit_of_measurement': 'Ω', }), 'context': , From 8c6a24c3681c9908232a8b9d460edd4b78fbdc45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Sat, 30 Nov 2024 04:11:15 +0100 Subject: [PATCH 0085/1198] Use HomeAssistant error in the right cases (#131923) * Use the correct exceptions * Improved exception strings --- .../components/home_connect/light.py | 14 +++++----- .../components/home_connect/number.py | 4 +-- .../components/home_connect/select.py | 4 +-- .../components/home_connect/strings.json | 26 +++++++++---------- .../components/home_connect/switch.py | 18 ++++++------- homeassistant/components/home_connect/time.py | 4 +-- tests/components/home_connect/test_light.py | 4 +-- tests/components/home_connect/test_number.py | 4 +-- tests/components/home_connect/test_select.py | 4 +-- tests/components/home_connect/test_switch.py | 8 +++--- tests/components/home_connect/test_time.py | 4 +-- 11 files changed, 47 insertions(+), 47 deletions(-) diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 97efc0413ab..e33017cd51f 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -16,7 +16,7 @@ from homeassistant.components.light import ( LightEntityDescription, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util @@ -150,7 +150,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): self.device.appliance.set_setting, self.bsh_key, True ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_on_light", translation_placeholders={ @@ -169,7 +169,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): self._enable_custom_color_value_key, ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="select_light_custom_color", translation_placeholders={ @@ -187,7 +187,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): f"#{hex_val}", ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_light_color", translation_placeholders={ @@ -219,7 +219,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): f"#{hex_val}", ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_light_color", translation_placeholders={ @@ -244,7 +244,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): self.device.appliance.set_setting, self._brightness_key, brightness ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_light_brightness", translation_placeholders={ @@ -263,7 +263,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): self.device.appliance.set_setting, self.bsh_key, False ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_off_light", translation_placeholders={ diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index d1063a2026f..fc53939b9d8 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -12,7 +12,7 @@ from homeassistant.components.number import ( NumberEntityDescription, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HomeConnectConfigEntry, get_dict_from_home_connect_error @@ -117,7 +117,7 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): value, ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_setting", translation_placeholders={ diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py index fdd1f38bf97..46b2bda24d6 100644 --- a/homeassistant/components/home_connect/select.py +++ b/homeassistant/components/home_connect/select.py @@ -7,7 +7,7 @@ from homeconnect.api import HomeConnectError from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ( @@ -289,7 +289,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): translation_key = "start_program" else: translation_key = "select_program" - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key=translation_key, translation_placeholders={ diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index f9524763020..5f5ed3cee54 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -23,43 +23,43 @@ }, "exceptions": { "turn_on_light": { - "message": "Error while trying to turn on {entity_id}: {description}" + "message": "Error turning on {entity_id}: {description}" }, "turn_off_light": { - "message": "Error while trying to turn off {entity_id}: {description}" + "message": "Error turning off {entity_id}: {description}" }, "set_light_brightness": { - "message": "Error while trying to set brightness of {entity_id}: {description}" + "message": "Error setting brightness of {entity_id}: {description}" }, "select_light_custom_color": { - "message": "Error while trying to select custom color of {entity_id}: {description}" + "message": "Error selecting custom color of {entity_id}: {description}" }, "set_light_color": { - "message": "Error while trying to set color of {entity_id}: {description}" + "message": "Error setting color of {entity_id}: {description}" }, "set_setting": { - "message": "Error while trying to assign the value \"{value}\" to the setting \"{setting_key}\" for {entity_id}: {description}" + "message": "Error assigning the value \"{value}\" to the setting \"{setting_key}\" for {entity_id}: {description}" }, "turn_on": { - "message": "Error while trying to turn on {entity_id} ({setting_key}): {description}" + "message": "Error turning on {entity_id} ({setting_key}): {description}" }, "turn_off": { - "message": "Error while trying to turn off {entity_id} ({setting_key}): {description}" + "message": "Error turning off {entity_id} ({setting_key}): {description}" }, "select_program": { - "message": "Error while trying to select program {program}: {description}" + "message": "Error selecting program {program}: {description}" }, "start_program": { - "message": "Error while trying to start program {program}: {description}" + "message": "Error starting program {program}: {description}" }, "stop_program": { - "message": "Error while trying to stop program {program}: {description}" + "message": "Error stopping program {program}: {description}" }, "power_on": { - "message": "Error while trying to turn on {appliance_name}: {description}" + "message": "Error turning on {appliance_name}: {description}" }, "power_off": { - "message": "Error while trying to turn off {appliance_name} with value \"{value}\": {description}" + "message": "Error turning off {appliance_name} with value \"{value}\": {description}" }, "turn_off_not_supported": { "message": "{appliance_name} does not support turning off or entering standby mode." diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 2fe3ff0a010..7e3a285912b 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -8,7 +8,7 @@ from homeconnect.api import HomeConnectError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HomeConnectConfigEntry, get_dict_from_home_connect_error @@ -134,7 +134,7 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): ) except HomeConnectError as err: self._attr_available = False - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_on", translation_placeholders={ @@ -158,7 +158,7 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): except HomeConnectError as err: _LOGGER.error("Error while trying to turn off: %s", err) self._attr_available = False - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_off", translation_placeholders={ @@ -209,7 +209,7 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): self.device.appliance.start_program, self.program_name ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="start_program", translation_placeholders={ @@ -225,7 +225,7 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): try: await self.hass.async_add_executor_job(self.device.appliance.stop_program) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="stop_program", translation_placeholders={ @@ -278,7 +278,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): ) except HomeConnectError as err: self._attr_is_on = False - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="power_on", translation_placeholders={ @@ -291,7 +291,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Switch the device off.""" if not hasattr(self, "power_off_state"): - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="unable_to_retrieve_turn_off", translation_placeholders={ @@ -300,7 +300,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): ) if self.power_off_state is None: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_off_not_supported", translation_placeholders={ @@ -316,7 +316,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): ) except HomeConnectError as err: self._attr_is_on = True - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="power_off", translation_placeholders={ diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index f28339b3595..cad16d63cb2 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -7,7 +7,7 @@ from homeconnect.api import HomeConnectError from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HomeConnectConfigEntry, get_dict_from_home_connect_error @@ -80,7 +80,7 @@ class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): time_to_seconds(value), ) except HomeConnectError as err: - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_setting", translation_placeholders={ diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 7a9747929c9..471ddf0ec54 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -27,7 +27,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -341,7 +341,7 @@ async def test_switch_exception_handling( problematic_appliance.status.update(status) service_data["entity_id"] = entity_id - with pytest.raises(ServiceValidationError, match=exception_match): + with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( LIGHT_DOMAIN, service, service_data, blocking=True ) diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py index f70e307cb41..bce19161cf8 100644 --- a/tests/components/home_connect/test_number.py +++ b/tests/components/home_connect/test_number.py @@ -24,7 +24,7 @@ from homeassistant.components.number import ( from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -162,7 +162,7 @@ async def test_number_entity_error( getattr(problematic_appliance, mock_attr)() with pytest.raises( - ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" + HomeAssistantError, match=r"Error.*assign.*value.*to.*setting.*" ): await hass.services.async_call( NUMBER_DOMAIN, diff --git a/tests/components/home_connect/test_select.py b/tests/components/home_connect/test_select.py index 5939d256e0a..7d5843e9525 100644 --- a/tests/components/home_connect/test_select.py +++ b/tests/components/home_connect/test_select.py @@ -14,7 +14,7 @@ from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -151,7 +151,7 @@ async def test_select_exception_handling( getattr(problematic_appliance, mock_attr)() problematic_appliance.status.update(status) - with pytest.raises(ServiceValidationError, match=exception_match): + with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index e4f45fbcdf9..3a89005dc59 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -29,7 +29,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -219,7 +219,7 @@ async def test_switch_exception_handling( with pytest.raises(HomeConnectError): getattr(problematic_appliance, mock_attr)() - with pytest.raises(ServiceValidationError, match=exception_match): + with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) @@ -344,7 +344,7 @@ async def test_ent_desc_switch_exception_handling( getattr(problematic_appliance, mock_attr)() problematic_appliance.status.update(status) - with pytest.raises(ServiceValidationError, match=exception_match): + with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) @@ -502,7 +502,7 @@ async def test_power_switch_service_validation_errors( appliance.status.update({BSH_POWER_STATE: {"value": BSH_POWER_ON}}) - with pytest.raises(ServiceValidationError, match=exception_match): + with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py index 25ce39786a5..1401e07b05a 100644 --- a/tests/components/home_connect/test_time.py +++ b/tests/components/home_connect/test_time.py @@ -12,7 +12,7 @@ from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -136,7 +136,7 @@ async def test_time_entity_error( getattr(problematic_appliance, mock_attr)() with pytest.raises( - ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" + HomeAssistantError, match=r"Error.*assign.*value.*to.*setting.*" ): await hass.services.async_call( TIME_DOMAIN, From a760786faffe5cce2ed455df157c183a798762e0 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Sat, 30 Nov 2024 04:11:57 +0100 Subject: [PATCH 0086/1198] Fix media player join action for Music Assistant integration (#131910) * Fix media player join action for Music Assistant integration * Add tests for join/unjoin * add one more test --- .../music_assistant/media_player.py | 10 +-- .../music_assistant/test_media_player.py | 68 +++++++++++++++++++ 2 files changed, 73 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index d1d707c92e1..fdf3a0c0c48 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -400,13 +400,13 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): async def async_join_players(self, group_members: list[str]) -> None: """Join `group_members` as a player group with the current player.""" player_ids: list[str] = [] + entity_registry = er.async_get(self.hass) for child_entity_id in group_members: # resolve HA entity_id to MA player_id - if (hass_state := self.hass.states.get(child_entity_id)) is None: - continue - if (mass_player_id := hass_state.attributes.get("mass_player_id")) is None: - continue - player_ids.append(mass_player_id) + if not (entity_reg_entry := entity_registry.async_get(child_entity_id)): + raise HomeAssistantError(f"Entity {child_entity_id} not found") + # unique id is the MA player_id + player_ids.append(entity_reg_entry.unique_id) await self.mass.players.player_command_group_many(self.player_id, player_ids) @catch_musicassistant_error diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py index 26ed5d1e538..13716b6a479 100644 --- a/tests/components/music_assistant/test_media_player.py +++ b/tests/components/music_assistant/test_media_player.py @@ -8,6 +8,7 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_REPEAT, ATTR_MEDIA_SEEK_POSITION, @@ -16,6 +17,8 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_MUTED, DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, + SERVICE_UNJOIN, ) from homeassistant.components.music_assistant.const import DOMAIN as MASS_DOMAIN from homeassistant.components.music_assistant.media_player import ( @@ -269,6 +272,71 @@ async def test_media_player_repeat_set_action( ) +async def test_media_player_join_players_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity join_players action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.my_super_test_player_2"], + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/group_many", + target_player=mass_player_id, + child_player_ids=["00:00:00:00:00:02"], + ) + # test again with invalid source player + music_assistant_client.send_command.reset_mock() + with pytest.raises( + HomeAssistantError, match="Entity media_player.blah_blah not found" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.blah_blah"], + }, + blocking=True, + ) + + +async def test_media_player_unjoin_player_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity unjoin player action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/ungroup", player_id=mass_player_id + ) + + async def test_media_player_clear_playlist_action( hass: HomeAssistant, music_assistant_client: MagicMock, From d9cef1e7080d69b01d6a75fca5f5c074119ba03b Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Sat, 30 Nov 2024 04:31:56 +0100 Subject: [PATCH 0087/1198] Guard against hostname change in lamarzocco discovery (#131873) * Guard against hostname change in lamarzocco discovery * switch to abort_entries_match --- .../components/lamarzocco/config_flow.py | 1 + .../components/lamarzocco/test_config_flow.py | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 0f288e22c4a..a727e3fe357 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -291,6 +291,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): CONF_ADDRESS: discovery_info.macaddress, } ) + self._async_abort_entries_match({CONF_ADDRESS: discovery_info.macaddress}) _LOGGER.debug( "Discovered La Marzocco machine %s through DHCP at address %s", diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index f8103ac3054..b206b7b68a3 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -493,6 +493,27 @@ async def test_dhcp_discovery( } +async def test_dhcp_discovery_abort_on_hostname_changed( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test dhcp discovery aborts when hostname was changed manually.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname="custom_name", + macaddress="00:00:00:00:00:00", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_dhcp_already_configured_and_update( hass: HomeAssistant, mock_lamarzocco: MagicMock, From e8ced4fa1276153cc8017687fedce4eb54c52b2e Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Sat, 30 Nov 2024 04:32:20 +0100 Subject: [PATCH 0088/1198] Bump aioacaia to 0.1.10 (#131906) --- homeassistant/components/acaia/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index 49b3489cf9a..3f3e1c14d58 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -25,5 +25,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioacaia"], - "requirements": ["aioacaia==0.1.9"] + "requirements": ["aioacaia==0.1.10"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6e5737e3c77..2c06f7f7763 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.9 +aioacaia==0.1.10 # homeassistant.components.airq aioairq==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0ff38cdc407..8d38f0fb0a9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.9 +aioacaia==0.1.10 # homeassistant.components.airq aioairq==0.4.3 From 2c1a754e5de421ca3d1708e1574112ac07010051 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Fri, 29 Nov 2024 21:25:59 -0800 Subject: [PATCH 0089/1198] Make uploaded images browsable in media (#131468) * Make uploaded images browsable in media * tests * Update homeassistant/components/image_upload/media_source.py Co-authored-by: Martin Hjelmare * use executor * more executor * use thumbnail --------- Co-authored-by: Martin Hjelmare --- .../components/image_upload/media_source.py | 76 ++++++++++++++++ .../image_upload/test_media_source.py | 90 +++++++++++++++++++ 2 files changed, 166 insertions(+) create mode 100644 homeassistant/components/image_upload/media_source.py create mode 100644 tests/components/image_upload/test_media_source.py diff --git a/homeassistant/components/image_upload/media_source.py b/homeassistant/components/image_upload/media_source.py new file mode 100644 index 00000000000..ee9511e2c36 --- /dev/null +++ b/homeassistant/components/image_upload/media_source.py @@ -0,0 +1,76 @@ +"""Expose image_upload as media sources.""" + +from __future__ import annotations + +from homeassistant.components.media_player import BrowseError, MediaClass +from homeassistant.components.media_source import ( + BrowseMediaSource, + MediaSource, + MediaSourceItem, + PlayMedia, + Unresolvable, +) +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + + +async def async_get_media_source(hass: HomeAssistant) -> ImageUploadMediaSource: + """Set up image media source.""" + return ImageUploadMediaSource(hass) + + +class ImageUploadMediaSource(MediaSource): + """Provide images as media sources.""" + + name: str = "Image Upload" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize ImageMediaSource.""" + super().__init__(DOMAIN) + self.hass = hass + + async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: + """Resolve media to a url.""" + image = self.hass.data[DOMAIN].data.get(item.identifier) + + if not image: + raise Unresolvable(f"Could not resolve media item: {item.identifier}") + + return PlayMedia( + f"/api/image/serve/{image['id']}/original", image["content_type"] + ) + + async def async_browse_media( + self, + item: MediaSourceItem, + ) -> BrowseMediaSource: + """Return media.""" + if item.identifier: + raise BrowseError("Unknown item") + + children = [ + BrowseMediaSource( + domain=DOMAIN, + identifier=image["id"], + media_class=MediaClass.IMAGE, + media_content_type=image["content_type"], + title=image["name"], + thumbnail=f"/api/image/serve/{image['id']}/256x256", + can_play=True, + can_expand=False, + ) + for image in self.hass.data[DOMAIN].data.values() + ] + + return BrowseMediaSource( + domain=DOMAIN, + identifier=None, + media_class=MediaClass.APP, + media_content_type="", + title="Image Upload", + can_play=False, + can_expand=True, + children_media_class=MediaClass.IMAGE, + children=children, + ) diff --git a/tests/components/image_upload/test_media_source.py b/tests/components/image_upload/test_media_source.py new file mode 100644 index 00000000000..d66e099bdc9 --- /dev/null +++ b/tests/components/image_upload/test_media_source.py @@ -0,0 +1,90 @@ +"""Test image_upload media source.""" + +import tempfile +from unittest.mock import patch + +from aiohttp import ClientSession +import pytest + +from homeassistant.components import media_source +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import TEST_IMAGE + +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +async def setup_media_source(hass: HomeAssistant) -> None: + """Set up media source.""" + assert await async_setup_component(hass, "media_source", {}) + + +async def __upload_test_image( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> str: + with ( + tempfile.TemporaryDirectory() as tempdir, + patch.object(hass.config, "path", return_value=tempdir), + ): + assert await async_setup_component(hass, "image_upload", {}) + client: ClientSession = await hass_client() + + file = await hass.async_add_executor_job(TEST_IMAGE.open, "rb") + res = await client.post("/api/image/upload", data={"file": file}) + hass.async_add_executor_job(file.close) + + assert res.status == 200 + item = await res.json() + assert item["content_type"] == "image/png" + assert item["filesize"] == 38847 + return item["id"] + + +async def test_browsing( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test browsing image media source.""" + image_id = await __upload_test_image(hass, hass_client) + + item = await media_source.async_browse_media(hass, "media-source://image_upload") + + assert item is not None + assert item.title == "Image Upload" + assert len(item.children) == 1 + assert item.children[0].media_content_type == "image/png" + assert item.children[0].identifier == image_id + assert item.children[0].thumbnail == f"/api/image/serve/{image_id}/256x256" + + with pytest.raises( + media_source.BrowseError, + match="Unknown item", + ): + await media_source.async_browse_media( + hass, "media-source://image_upload/invalid_path" + ) + + +async def test_resolving( + hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> None: + """Test resolving.""" + image_id = await __upload_test_image(hass, hass_client) + item = await media_source.async_resolve_media( + hass, f"media-source://image_upload/{image_id}", None + ) + assert item is not None + assert item.url == f"/api/image/serve/{image_id}/original" + assert item.mime_type == "image/png" + + invalid_id = "aabbccddeeff" + with pytest.raises( + media_source.Unresolvable, + match=f"Could not resolve media item: {invalid_id}", + ): + await media_source.async_resolve_media( + hass, f"media-source://image_upload/{invalid_id}", None + ) From 9209e43e4cc2e0e7c05743082ae67d03e4f27e1d Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Fri, 29 Nov 2024 21:43:31 -0800 Subject: [PATCH 0090/1198] Fix history stats count update immediately after change (#131856) * Fix history stats count update immediately after change * rerun CI --- homeassistant/components/history_stats/data.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/history_stats/data.py b/homeassistant/components/history_stats/data.py index 40cf351fd9e..f9b79d74cb4 100644 --- a/homeassistant/components/history_stats/data.py +++ b/homeassistant/components/history_stats/data.py @@ -4,6 +4,8 @@ from __future__ import annotations from dataclasses import dataclass import datetime +import logging +import math from homeassistant.components.recorder import get_instance, history from homeassistant.core import Event, EventStateChangedData, HomeAssistant, State @@ -14,6 +16,8 @@ from .helpers import async_calculate_period, floored_timestamp MIN_TIME_UTC = datetime.datetime.min.replace(tzinfo=dt_util.UTC) +_LOGGER = logging.getLogger(__name__) + @dataclass class HistoryStatsState: @@ -186,8 +190,13 @@ class HistoryStats: current_state_matches = history_state.state in self._entity_states state_change_timestamp = history_state.last_changed - if state_change_timestamp > now_timestamp: + if math.floor(state_change_timestamp) > now_timestamp: # Shouldn't count states that are in the future + _LOGGER.debug( + "Skipping future timestamp %s (now %s)", + state_change_timestamp, + now_timestamp, + ) continue if previous_state_matches: From 5d71533c7b8085a6b6c5ee4cb219ad4291f50527 Mon Sep 17 00:00:00 2001 From: "Glenn Vandeuren (aka Iondependent)" Date: Sat, 30 Nov 2024 09:30:24 +0100 Subject: [PATCH 0091/1198] Fix modbus state not dumped on restart (#131319) * Fix modbus state not dumped on restart * Update test_init.py * Set event back to stop * Update test_init.py --------- Co-authored-by: VandeurenGlenn <8685280+VandeurenGlenn@users.noreply.github.com> --- homeassistant/components/modbus/modbus.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index d85b4e0e67f..18d91f8dd3b 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -158,8 +158,6 @@ async def async_modbus_setup( async def async_stop_modbus(event: Event) -> None: """Stop Modbus service.""" - - async_dispatcher_send(hass, SIGNAL_STOP_ENTITY) for client in hub_collect.values(): await client.async_close() From 92204e6c9270d1087f2c682c30f58b0d5c549e10 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Sat, 30 Nov 2024 11:15:19 +0000 Subject: [PATCH 0092/1198] Bump aiomealie to 0.9.4 (#131951) --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../mealie/snapshots/test_diagnostics.ambr | 66 +++++++++---------- .../mealie/snapshots/test_services.ambr | 36 +++++----- 5 files changed, 54 insertions(+), 54 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index f594f1398e3..c555fcbc3d6 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.9.3"] + "requirements": ["aiomealie==0.9.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2c06f7f7763..42146d2b116 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -298,7 +298,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8d38f0fb0a9..1215799f132 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -280,7 +280,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr index ecb5d1d6cd1..a694c72fcf6 100644 --- a/tests/components/mealie/snapshots/test_diagnostics.ambr +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -15,7 +15,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '229', + 'mealplan_id': 229, 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -42,7 +42,7 @@ '__type': "", 'isoformat': '2024-01-22', }), - 'mealplan_id': '230', + 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -67,7 +67,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '222', + 'mealplan_id': 222, 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -92,7 +92,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '221', + 'mealplan_id': 221, 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -117,7 +117,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '219', + 'mealplan_id': 219, 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -142,7 +142,7 @@ '__type': "", 'isoformat': '2024-01-22', }), - 'mealplan_id': '217', + 'mealplan_id': 217, 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -167,7 +167,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '212', + 'mealplan_id': 212, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -192,7 +192,7 @@ '__type': "", 'isoformat': '2024-01-22', }), - 'mealplan_id': '211', + 'mealplan_id': 211, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -217,7 +217,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '196', + 'mealplan_id': 196, 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -242,7 +242,7 @@ '__type': "", 'isoformat': '2024-01-22', }), - 'mealplan_id': '195', + 'mealplan_id': 195, 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -267,7 +267,7 @@ '__type': "", 'isoformat': '2024-01-21', }), - 'mealplan_id': '1', + 'mealplan_id': 1, 'recipe': None, 'title': 'Aquavite', 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', @@ -283,7 +283,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '226', + 'mealplan_id': 226, 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -308,7 +308,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '224', + 'mealplan_id': 224, 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -333,7 +333,7 @@ '__type': "", 'isoformat': '2024-01-22', }), - 'mealplan_id': '216', + 'mealplan_id': 216, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -360,7 +360,7 @@ '__type': "", 'isoformat': '2024-01-23', }), - 'mealplan_id': '220', + 'mealplan_id': 220, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -385,15 +385,15 @@ 'checked': False, 'disable_amount': True, 'display': '2 Apples', - 'food_id': 'None', + 'food_id': None, 'is_food': False, 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': 'Apples', 'position': 0, 'quantity': 2.0, - 'unit_id': 'None', + 'unit_id': None, }), dict({ 'checked': False, @@ -402,7 +402,7 @@ 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', 'is_food': True, 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 1, @@ -416,12 +416,12 @@ 'food_id': '96801494-4e26-4148-849a-8155deb76327', 'is_food': True, 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 2, 'quantity': 0.0, - 'unit_id': 'None', + 'unit_id': None, }), ]), 'shopping_list': dict({ @@ -435,15 +435,15 @@ 'checked': False, 'disable_amount': True, 'display': '2 Apples', - 'food_id': 'None', + 'food_id': None, 'is_food': False, 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': 'Apples', 'position': 0, 'quantity': 2.0, - 'unit_id': 'None', + 'unit_id': None, }), dict({ 'checked': False, @@ -452,7 +452,7 @@ 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', 'is_food': True, 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 1, @@ -466,12 +466,12 @@ 'food_id': '96801494-4e26-4148-849a-8155deb76327', 'is_food': True, 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 2, 'quantity': 0.0, - 'unit_id': 'None', + 'unit_id': None, }), ]), 'shopping_list': dict({ @@ -485,15 +485,15 @@ 'checked': False, 'disable_amount': True, 'display': '2 Apples', - 'food_id': 'None', + 'food_id': None, 'is_food': False, 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': 'Apples', 'position': 0, 'quantity': 2.0, - 'unit_id': 'None', + 'unit_id': None, }), dict({ 'checked': False, @@ -502,7 +502,7 @@ 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', 'is_food': True, 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 1, @@ -516,12 +516,12 @@ 'food_id': '96801494-4e26-4148-849a-8155deb76327', 'is_food': True, 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': 'None', + 'label_id': None, 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', 'note': '', 'position': 2, 'quantity': 0.0, - 'unit_id': 'None', + 'unit_id': None, }), ]), 'shopping_list': dict({ diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr index 93b5f2cad1d..4f9ee6a5c09 100644 --- a/tests/components/mealie/snapshots/test_services.ambr +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -199,7 +199,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': '230', + 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -221,7 +221,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '229', + 'mealplan_id': 229, 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -243,7 +243,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '226', + 'mealplan_id': 226, 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -265,7 +265,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '224', + 'mealplan_id': 224, 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -287,7 +287,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '222', + 'mealplan_id': 222, 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -309,7 +309,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '221', + 'mealplan_id': 221, 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -331,7 +331,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '220', + 'mealplan_id': 220, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -353,7 +353,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '219', + 'mealplan_id': 219, 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -375,7 +375,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': '217', + 'mealplan_id': 217, 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -397,7 +397,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': '216', + 'mealplan_id': 216, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -419,7 +419,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '212', + 'mealplan_id': 212, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -441,7 +441,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': '211', + 'mealplan_id': 211, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -463,7 +463,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': '196', + 'mealplan_id': 196, 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -485,7 +485,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': '195', + 'mealplan_id': 195, 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -507,7 +507,7 @@ 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 21), - 'mealplan_id': '1', + 'mealplan_id': 1, 'recipe': None, 'title': 'Aquavite', 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', @@ -714,7 +714,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': '230', + 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -740,7 +740,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': '230', + 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', @@ -766,7 +766,7 @@ 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': '230', + 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', From 74522390ad36ef7a07f19b6ec6aab1b85cd97d45 Mon Sep 17 00:00:00 2001 From: "Glenn Vandeuren (aka Iondependent)" Date: Sat, 30 Nov 2024 12:16:12 +0100 Subject: [PATCH 0093/1198] Add config flow to NHC (#130554) Co-authored-by: Joost Lekkerkerker Co-authored-by: VandeurenGlenn <8685280+VandeurenGlenn@users.noreply.github.com> --- CODEOWNERS | 2 + .../components/niko_home_control/__init__.py | 84 ++++++++++- .../niko_home_control/config_flow.py | 66 +++++++++ .../components/niko_home_control/const.py | 3 + .../components/niko_home_control/light.py | 106 ++++++------- .../niko_home_control/manifest.json | 4 +- .../components/niko_home_control/strings.json | 27 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 2 +- requirements_test_all.txt | 3 + .../components/niko_home_control/__init__.py | 13 ++ .../components/niko_home_control/conftest.py | 43 ++++++ .../niko_home_control/test_config_flow.py | 140 ++++++++++++++++++ 13 files changed, 440 insertions(+), 54 deletions(-) create mode 100644 homeassistant/components/niko_home_control/config_flow.py create mode 100644 homeassistant/components/niko_home_control/const.py create mode 100644 homeassistant/components/niko_home_control/strings.json create mode 100644 tests/components/niko_home_control/__init__.py create mode 100644 tests/components/niko_home_control/conftest.py create mode 100644 tests/components/niko_home_control/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index ba233c0c141..7755c3eb4ae 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1004,6 +1004,8 @@ build.json @home-assistant/supervisor /tests/components/nice_go/ @IceBotYT /homeassistant/components/nightscout/ @marciogranzotto /tests/components/nightscout/ @marciogranzotto +/homeassistant/components/niko_home_control/ @VandeurenGlenn +/tests/components/niko_home_control/ @VandeurenGlenn /homeassistant/components/nilu/ @hfurubotten /homeassistant/components/nina/ @DeerMaximum /tests/components/nina/ @DeerMaximum diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py index 2cb5c70d1dd..bdbb8d6b85f 100644 --- a/homeassistant/components/niko_home_control/__init__.py +++ b/homeassistant/components/niko_home_control/__init__.py @@ -1 +1,83 @@ -"""The niko_home_control component.""" +"""The Niko home control integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from nclib.errors import NetcatError +from nikohomecontrol import NikoHomeControl + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.util import Throttle + +PLATFORMS: list[Platform] = [Platform.LIGHT] + +type NikoHomeControlConfigEntry = ConfigEntry[NikoHomeControlData] + + +_LOGGER = logging.getLogger(__name__) +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) + + +async def async_setup_entry( + hass: HomeAssistant, entry: NikoHomeControlConfigEntry +) -> bool: + """Set Niko Home Control from a config entry.""" + try: + controller = NikoHomeControl({"ip": entry.data[CONF_HOST], "port": 8000}) + niko_data = NikoHomeControlData(hass, controller) + await niko_data.async_update() + except NetcatError as err: + raise ConfigEntryNotReady("cannot connect to controller.") from err + except OSError as err: + raise ConfigEntryNotReady( + "unknown error while connecting to controller." + ) from err + + entry.runtime_data = niko_data + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: NikoHomeControlConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +class NikoHomeControlData: + """The class for handling data retrieval.""" + + def __init__(self, hass, nhc): + """Set up Niko Home Control Data object.""" + self.nhc = nhc + self.hass = hass + self.available = True + self.data = {} + self._system_info = None + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + async def async_update(self): + """Get the latest data from the NikoHomeControl API.""" + _LOGGER.debug("Fetching async state in bulk") + try: + self.data = await self.hass.async_add_executor_job( + self.nhc.list_actions_raw + ) + self.available = True + except OSError as ex: + _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) + self.available = False + + def get_state(self, aid): + """Find and filter state based on action id.""" + for state in self.data: + if state["id"] == aid: + return state["value1"] + _LOGGER.error("Failed to retrieve state off unknown light") + return None diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py new file mode 100644 index 00000000000..9174a932534 --- /dev/null +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -0,0 +1,66 @@ +"""Config flow for the Niko home control integration.""" + +from __future__ import annotations + +from typing import Any + +from nikohomecontrol import NikoHomeControlConnection +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST + +from .const import DOMAIN + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +def test_connection(host: str) -> str | None: + """Test if we can connect to the Niko Home Control controller.""" + try: + NikoHomeControlConnection(host, 8000) + except Exception: # noqa: BLE001 + return "cannot_connect" + return None + + +class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Niko Home Control.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + error = test_connection(user_input[CONF_HOST]) + if not error: + return self.async_create_entry( + title="Niko Home Control", + data=user_input, + ) + errors["base"] = error + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) + + async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry.""" + self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) + error = test_connection(import_info[CONF_HOST]) + + if not error: + return self.async_create_entry( + title="Niko Home Control", + data={CONF_HOST: import_info[CONF_HOST]}, + ) + return self.async_abort(reason=error) diff --git a/homeassistant/components/niko_home_control/const.py b/homeassistant/components/niko_home_control/const.py new file mode 100644 index 00000000000..202b031b9a2 --- /dev/null +++ b/homeassistant/components/niko_home_control/const.py @@ -0,0 +1,3 @@ +"""Constants for niko_home_control integration.""" + +DOMAIN = "niko_home_control" diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index b2d41f3a41e..f2bf302eab7 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -1,4 +1,4 @@ -"""Support for Niko Home Control.""" +"""Light platform Niko Home Control.""" from __future__ import annotations @@ -6,7 +6,6 @@ from datetime import timedelta import logging from typing import Any -import nikohomecontrol import voluptuous as vol from homeassistant.components.light import ( @@ -16,18 +15,22 @@ from homeassistant.components.light import ( LightEntity, brightness_supported, ) +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import Throttle + +from . import NikoHomeControlConfigEntry +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) +# delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -38,20 +41,56 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Niko Home Control light platform.""" - host = config[CONF_HOST] - - try: - nhc = nikohomecontrol.NikoHomeControl( - {"ip": host, "port": 8000, "timeout": 20000} + # Start import flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if ( + result.get("type") == FlowResultType.ABORT + and result.get("reason") != "already_configured" + ): + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2025.7.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Niko Home Control", + }, ) - niko_data = NikoHomeControlData(hass, nhc) - await niko_data.async_update() - except OSError as err: - _LOGGER.error("Unable to access %s (%s)", host, err) - raise PlatformNotReady from err + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.7.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Niko Home Control", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NikoHomeControlConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Niko Home Control light entry.""" + niko_data = entry.runtime_data async_add_entities( - [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True + NikoHomeControlLight(light, niko_data) for light in niko_data.nhc.list_actions() ) @@ -88,36 +127,3 @@ class NikoHomeControlLight(LightEntity): self._attr_is_on = state != 0 if brightness_supported(self.supported_color_modes): self._attr_brightness = state * 2.55 - - -class NikoHomeControlData: - """The class for handling data retrieval.""" - - def __init__(self, hass, nhc): - """Set up Niko Home Control Data object.""" - self._nhc = nhc - self.hass = hass - self.available = True - self.data = {} - self._system_info = None - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): - """Get the latest data from the NikoHomeControl API.""" - _LOGGER.debug("Fetching async state in bulk") - try: - self.data = await self.hass.async_add_executor_job( - self._nhc.list_actions_raw - ) - self.available = True - except OSError as ex: - _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) - self.available = False - - def get_state(self, aid): - """Find and filter state based on action id.""" - for state in self.data: - if state["id"] == aid: - return state["value1"] - _LOGGER.error("Failed to retrieve state off unknown light") - return None diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index 316dc1dc958..194596d534f 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -1,10 +1,10 @@ { "domain": "niko_home_control", "name": "Niko Home Control", - "codeowners": [], + "codeowners": ["@VandeurenGlenn"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/niko_home_control", "iot_class": "local_polling", "loggers": ["nikohomecontrol"], - "quality_scale": "legacy", "requirements": ["niko-home-control==0.2.1"] } diff --git a/homeassistant/components/niko_home_control/strings.json b/homeassistant/components/niko_home_control/strings.json new file mode 100644 index 00000000000..495dca94c0c --- /dev/null +++ b/homeassistant/components/niko_home_control/strings.json @@ -0,0 +1,27 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up your Niko Home Control instance.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Niko Home Control controller." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "issues": { + "deprecated_yaml_import_issue_cannot_connect": { + "title": "YAML import failed due to a connection error", + "description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index ffe61b915c6..9a75ac32ea1 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -406,6 +406,7 @@ FLOWS = { "nibe_heatpump", "nice_go", "nightscout", + "niko_home_control", "nina", "nmap_tracker", "nobo_hub", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 8238a09072b..9fee6abb894 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4145,7 +4145,7 @@ "niko_home_control": { "name": "Niko Home Control", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "nilu": { diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1215799f132..feeb35017f3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1222,6 +1222,9 @@ nibe==2.13.0 # homeassistant.components.nice_go nice-go==0.3.10 +# homeassistant.components.niko_home_control +niko-home-control==0.2.1 + # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 diff --git a/tests/components/niko_home_control/__init__.py b/tests/components/niko_home_control/__init__.py new file mode 100644 index 00000000000..f6e8187bf0f --- /dev/null +++ b/tests/components/niko_home_control/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the niko_home_control integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Set up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py new file mode 100644 index 00000000000..932480ac710 --- /dev/null +++ b/tests/components/niko_home_control/conftest.py @@ -0,0 +1,43 @@ +"""niko_home_control integration tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override integration setup.""" + with patch( + "homeassistant.components.niko_home_control.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_niko_home_control_connection() -> Generator[AsyncMock]: + """Mock a NHC client.""" + with ( + patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + autospec=True, + ) as mock_client, + ): + client = mock_client.return_value + client.return_value = True + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, title="Niko Home Control", data={CONF_HOST: "192.168.0.123"} + ) diff --git a/tests/components/niko_home_control/test_config_flow.py b/tests/components/niko_home_control/test_config_flow.py new file mode 100644 index 00000000000..8220ee15e02 --- /dev/null +++ b/tests/components/niko_home_control/test_config_flow.py @@ -0,0 +1,140 @@ +"""Test niko_home_control config flow.""" + +from unittest.mock import AsyncMock, patch + +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Niko Home Control" + assert result["data"] == {CONF_HOST: "192.168.0.123"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test the cannot connect error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test uniqueness.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the import flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Niko Home Control" + assert result["data"] == {CONF_HOST: "192.168.0.123"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test the cannot connect error.""" + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_duplicate_import_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test uniqueness.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From bd29aaffb86196aeb9e476bd2021c7492732cbcf Mon Sep 17 00:00:00 2001 From: Oliver <10700296+ol-iver@users.noreply.github.com> Date: Sat, 30 Nov 2024 17:27:31 +0100 Subject: [PATCH 0094/1198] Bump denonavr to v1.0.1 (#131882) --- homeassistant/components/denonavr/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/denonavr/manifest.json b/homeassistant/components/denonavr/manifest.json index eff70b94a18..328ab504bd1 100644 --- a/homeassistant/components/denonavr/manifest.json +++ b/homeassistant/components/denonavr/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/denonavr", "iot_class": "local_push", "loggers": ["denonavr"], - "requirements": ["denonavr==1.0.0"], + "requirements": ["denonavr==1.0.1"], "ssdp": [ { "manufacturer": "Denon", diff --git a/requirements_all.txt b/requirements_all.txt index 42146d2b116..6b873506166 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -752,7 +752,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index feeb35017f3..f476607999c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -642,7 +642,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 From 6c6980a5502aea090690c1e11c8749dc6d9b6677 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Sat, 30 Nov 2024 17:32:41 +0100 Subject: [PATCH 0095/1198] Improvements for bluetooth device for lamarzocco (#131875) --- homeassistant/components/lamarzocco/entity.py | 26 +++++++----- .../lamarzocco/snapshots/test_init.ambr | 41 +++++++++++++++++++ .../lamarzocco/snapshots/test_switch.ambr | 36 ---------------- .../components/lamarzocco/test_config_flow.py | 20 +++++++++ tests/components/lamarzocco/test_init.py | 36 +++++++++++++++- tests/components/lamarzocco/test_switch.py | 26 +----------- 6 files changed, 112 insertions(+), 73 deletions(-) create mode 100644 tests/components/lamarzocco/snapshots/test_init.ambr diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index f0942f51ace..5542906d887 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -6,8 +6,12 @@ from dataclasses import dataclass from pylamarzocco.const import FirmwareType from pylamarzocco.lm_machine import LaMarzoccoMachine -from homeassistant.const import CONF_ADDRESS -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.const import CONF_ADDRESS, CONF_MAC +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + CONNECTION_NETWORK_MAC, + DeviceInfo, +) from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -48,17 +52,17 @@ class LaMarzoccoBaseEntity( serial_number=device.serial_number, sw_version=device.firmware[FirmwareType.MACHINE].current_version, ) + connections: set[tuple[str, str]] = set() if coordinator.config_entry.data.get(CONF_ADDRESS): - self._attr_device_info.update( - DeviceInfo( - connections={ - ( - CONNECTION_NETWORK_MAC, - coordinator.config_entry.data[CONF_ADDRESS], - ) - } - ) + connections.add( + (CONNECTION_NETWORK_MAC, coordinator.config_entry.data[CONF_ADDRESS]) ) + if coordinator.config_entry.data.get(CONF_MAC): + connections.add( + (CONNECTION_BLUETOOTH, coordinator.config_entry.data[CONF_MAC]) + ) + if connections: + self._attr_device_info.update(DeviceInfo(connections=connections)) class LaMarzoccoEntity(LaMarzoccoBaseEntity): diff --git a/tests/components/lamarzocco/snapshots/test_init.ambr b/tests/components/lamarzocco/snapshots/test_init.ambr new file mode 100644 index 00000000000..519a9301bfd --- /dev/null +++ b/tests/components/lamarzocco/snapshots/test_init.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'bluetooth', + 'aa:bb:cc:dd:ee:ff', + ), + tuple( + 'mac', + '00:00:00:00:00:00', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'lamarzocco', + 'GS012345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'La Marzocco', + 'model': , + 'model_id': , + 'name': 'GS012345', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'GS012345', + 'suggested_area': None, + 'sw_version': '1.40', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 084b54b3f3a..79a305c998f 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -91,42 +91,6 @@ 'state': 'on', }) # --- -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - '00:00:00:00:00:00', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'lamarzocco', - 'GS012345', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'La Marzocco', - 'model': , - 'model_id': , - 'name': 'GS012345', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': 'GS012345', - 'suggested_area': None, - 'sw_version': '1.40', - 'via_device_id': None, - }) -# --- # name: test_switches[-set_power-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index b206b7b68a3..e25aab39012 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -381,6 +381,26 @@ async def test_bluetooth_discovery( } +async def test_bluetooth_discovery_already_configured( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], + mock_config_entry: MockConfigEntry, +) -> None: + """Test bluetooth discovery.""" + mock_config_entry.add_to_hass(hass) + + service_info = get_bluetooth_service_info( + mock_lamarzocco.model, mock_lamarzocco.serial_number + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_bluetooth_discovery_errors( hass: HomeAssistant, mock_lamarzocco: MagicMock, diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 75c3019afb4..cb6b028bda0 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from pylamarzocco.const import FirmwareType from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest +from syrupy import SnapshotAssertion from websockets.protocol import State from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE @@ -19,7 +20,11 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from . import USER_INPUT, async_init_integration, get_bluetooth_service_info @@ -220,3 +225,32 @@ async def test_gateway_version_issue( issue_registry = ir.async_get(hass) issue = issue_registry.async_get_issue(DOMAIN, "unsupported_gateway_firmware") assert (issue is not None) == issue_exists + + +async def test_device( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the device.""" + + await async_init_integration(hass, mock_config_entry) + + hass.config_entries.async_update_entry( + mock_config_entry, + data={**mock_config_entry.data, CONF_MAC: "aa:bb:cc:dd:ee:ff"}, + ) + + state = hass.states.get(f"switch.{mock_lamarzocco.serial_number}") + assert state + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + + device = device_registry.async_get(entry.device_id) + assert device + assert device == snapshot diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index 5c6d1cb1e42..9082e6f4c09 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -15,7 +15,7 @@ from homeassistant.components.switch import ( from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from . import WAKE_UP_SLEEP_ENTRY_IDS, async_init_integration @@ -88,30 +88,6 @@ async def test_switches( control_fn.assert_called_with(enabled=True, **kwargs) -async def test_device( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the device for one switch.""" - - await async_init_integration(hass, mock_config_entry) - - state = hass.states.get(f"switch.{mock_lamarzocco.serial_number}") - assert state - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry.device_id - - device = device_registry.async_get(entry.device_id) - assert device - assert device == snapshot - - async def test_auto_on_off_switches( hass: HomeAssistant, mock_lamarzocco: MagicMock, From 6da2515d7a23430851b537a41e8fec5732302d9a Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Sat, 30 Nov 2024 17:32:53 +0100 Subject: [PATCH 0096/1198] Bump reolink_aio to 0.11.4 (#131957) --- homeassistant/components/reolink/host.py | 2 ++ homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index d2b2bba6276..a8e1de07642 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -536,6 +536,8 @@ class ReolinkHost: async def renew(self) -> None: """Renew the subscription of motion events (lease time is 15 minutes).""" + await self._api.baichuan.check_subscribe_events() + if self._api.baichuan.events_active and self._api.subscribed(SubType.push): # TCP push active, unsubscribe from ONVIF push because not needed self.unregister_webhook() diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 4846ec8cb94..913864a92fa 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.11.3"] + "requirements": ["reolink-aio==0.11.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6b873506166..d25732ee81b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2556,7 +2556,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.3 +reolink-aio==0.11.4 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f476607999c..679b8fabd45 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2050,7 +2050,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.3 +reolink-aio==0.11.4 # homeassistant.components.rflink rflink==0.0.66 From bcdac7ed3785473fb8962d48eab96a6133cb41ca Mon Sep 17 00:00:00 2001 From: Andy <4983703+krauseerl@users.noreply.github.com> Date: Sat, 30 Nov 2024 20:30:21 +0100 Subject: [PATCH 0097/1198] Add support for `linked_doorbell_sensor` to HomeKit locks (#131660) Co-authored-by: J. Nick Koston --- homeassistant/components/homekit/__init__.py | 3 + homeassistant/components/homekit/doorbell.py | 121 +++++++ .../components/homekit/type_cameras.py | 88 +---- .../components/homekit/type_locks.py | 5 +- homeassistant/components/homekit/util.py | 14 +- tests/components/homekit/test_type_locks.py | 301 +++++++++++++++++- tests/components/homekit/test_util.py | 16 +- 7 files changed, 456 insertions(+), 92 deletions(-) create mode 100644 homeassistant/components/homekit/doorbell.py diff --git a/homeassistant/components/homekit/__init__.py b/homeassistant/components/homekit/__init__.py index b85308ffd66..97fb17d7db5 100644 --- a/homeassistant/components/homekit/__init__.py +++ b/homeassistant/components/homekit/__init__.py @@ -33,6 +33,7 @@ from homeassistant.components.device_automation.trigger import ( from homeassistant.components.event import DOMAIN as EVENT_DOMAIN, EventDeviceClass from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( @@ -1133,6 +1134,8 @@ class HomeKit: config[entity_id].setdefault( CONF_LINKED_MOTION_SENSOR, motion_binary_sensor_entity_id ) + + if domain in (CAMERA_DOMAIN, LOCK_DOMAIN): if doorbell_event_entity_id := lookup.get(DOORBELL_EVENT_SENSOR): config[entity_id].setdefault( CONF_LINKED_DOORBELL_SENSOR, doorbell_event_entity_id diff --git a/homeassistant/components/homekit/doorbell.py b/homeassistant/components/homekit/doorbell.py new file mode 100644 index 00000000000..45bbb2ea0ca --- /dev/null +++ b/homeassistant/components/homekit/doorbell.py @@ -0,0 +1,121 @@ +"""Extend the doorbell functions.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyhap.util import callback as pyhap_callback + +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback as ha_callback, +) +from homeassistant.helpers.event import async_track_state_change_event + +from .accessories import HomeAccessory +from .const import ( + CHAR_MUTE, + CHAR_PROGRAMMABLE_SWITCH_EVENT, + CONF_LINKED_DOORBELL_SENSOR, + SERV_DOORBELL, + SERV_SPEAKER, + SERV_STATELESS_PROGRAMMABLE_SWITCH, +) +from .util import state_changed_event_is_same_state + +_LOGGER = logging.getLogger(__name__) + +DOORBELL_SINGLE_PRESS = 0 +DOORBELL_DOUBLE_PRESS = 1 +DOORBELL_LONG_PRESS = 2 + + +class HomeDoorbellAccessory(HomeAccessory): + """Accessory with optional doorbell.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize an Accessory object with optional attached doorbell.""" + super().__init__(*args, **kwargs) + self._char_doorbell_detected = None + self._char_doorbell_detected_switch = None + linked_doorbell_sensor: str | None + linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR) + self.linked_doorbell_sensor = linked_doorbell_sensor + self.doorbell_is_event = False + if not linked_doorbell_sensor: + return + self.doorbell_is_event = linked_doorbell_sensor.startswith("event.") + if not (state := self.hass.states.get(linked_doorbell_sensor)): + return + serv_doorbell = self.add_preload_service(SERV_DOORBELL) + self.set_primary_service(serv_doorbell) + self._char_doorbell_detected = serv_doorbell.configure_char( + CHAR_PROGRAMMABLE_SWITCH_EVENT, + value=0, + ) + serv_stateless_switch = self.add_preload_service( + SERV_STATELESS_PROGRAMMABLE_SWITCH + ) + self._char_doorbell_detected_switch = serv_stateless_switch.configure_char( + CHAR_PROGRAMMABLE_SWITCH_EVENT, + value=0, + valid_values={"SinglePress": DOORBELL_SINGLE_PRESS}, + ) + serv_speaker = self.add_preload_service(SERV_SPEAKER) + serv_speaker.configure_char(CHAR_MUTE, value=0) + self.async_update_doorbell_state(None, state) + + @ha_callback + @pyhap_callback # type: ignore[misc] + def run(self) -> None: + """Handle doorbell event.""" + if self._char_doorbell_detected: + assert self.linked_doorbell_sensor + self._subscriptions.append( + async_track_state_change_event( + self.hass, + self.linked_doorbell_sensor, + self.async_update_doorbell_state_event, + job_type=HassJobType.Callback, + ) + ) + + super().run() + + @ha_callback + def async_update_doorbell_state_event( + self, event: Event[EventStateChangedData] + ) -> None: + """Handle state change event listener callback.""" + if not state_changed_event_is_same_state(event) and ( + new_state := event.data["new_state"] + ): + self.async_update_doorbell_state(event.data["old_state"], new_state) + + @ha_callback + def async_update_doorbell_state( + self, old_state: State | None, new_state: State + ) -> None: + """Handle link doorbell sensor state change to update HomeKit value.""" + assert self._char_doorbell_detected + assert self._char_doorbell_detected_switch + state = new_state.state + if state == STATE_ON or ( + self.doorbell_is_event + and old_state is not None + and old_state.state != STATE_UNAVAILABLE + and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) + ): + self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS) + self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS) + _LOGGER.debug( + "%s: Set linked doorbell %s sensor to %d", + self.entity_id, + self.linked_doorbell_sensor, + DOORBELL_SINGLE_PRESS, + ) diff --git a/homeassistant/components/homekit/type_cameras.py b/homeassistant/components/homekit/type_cameras.py index 9e076f7d4d7..0fb2c2e7922 100644 --- a/homeassistant/components/homekit/type_cameras.py +++ b/homeassistant/components/homekit/type_cameras.py @@ -31,15 +31,12 @@ from homeassistant.helpers.event import ( ) from homeassistant.util.async_ import create_eager_task -from .accessories import TYPES, HomeAccessory, HomeDriver +from .accessories import TYPES, HomeDriver from .const import ( CHAR_MOTION_DETECTED, - CHAR_MUTE, - CHAR_PROGRAMMABLE_SWITCH_EVENT, CONF_AUDIO_CODEC, CONF_AUDIO_MAP, CONF_AUDIO_PACKET_SIZE, - CONF_LINKED_DOORBELL_SENSOR, CONF_LINKED_MOTION_SENSOR, CONF_MAX_FPS, CONF_MAX_HEIGHT, @@ -64,18 +61,13 @@ from .const import ( DEFAULT_VIDEO_MAP, DEFAULT_VIDEO_PACKET_SIZE, DEFAULT_VIDEO_PROFILE_NAMES, - SERV_DOORBELL, SERV_MOTION_SENSOR, - SERV_SPEAKER, - SERV_STATELESS_PROGRAMMABLE_SWITCH, ) +from .doorbell import HomeDoorbellAccessory from .util import pid_is_alive, state_changed_event_is_same_state _LOGGER = logging.getLogger(__name__) -DOORBELL_SINGLE_PRESS = 0 -DOORBELL_DOUBLE_PRESS = 1 -DOORBELL_LONG_PRESS = 2 VIDEO_OUTPUT = ( "-map {v_map} -an " @@ -149,7 +141,7 @@ CONFIG_DEFAULTS = { @TYPES.register("Camera") # False-positive on pylint, not a CameraEntity # pylint: disable-next=hass-enforce-class-module -class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] +class Camera(HomeDoorbellAccessory, PyhapCamera): # type: ignore[misc] """Generate a Camera accessory.""" def __init__( @@ -237,36 +229,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] ) self._async_update_motion_state(None, state) - self._char_doorbell_detected = None - self._char_doorbell_detected_switch = None - linked_doorbell_sensor: str | None = self.config.get( - CONF_LINKED_DOORBELL_SENSOR - ) - self.linked_doorbell_sensor = linked_doorbell_sensor - self.doorbell_is_event = False - if not linked_doorbell_sensor: - return - self.doorbell_is_event = linked_doorbell_sensor.startswith("event.") - if not (state := self.hass.states.get(linked_doorbell_sensor)): - return - serv_doorbell = self.add_preload_service(SERV_DOORBELL) - self.set_primary_service(serv_doorbell) - self._char_doorbell_detected = serv_doorbell.configure_char( - CHAR_PROGRAMMABLE_SWITCH_EVENT, - value=0, - ) - serv_stateless_switch = self.add_preload_service( - SERV_STATELESS_PROGRAMMABLE_SWITCH - ) - self._char_doorbell_detected_switch = serv_stateless_switch.configure_char( - CHAR_PROGRAMMABLE_SWITCH_EVENT, - value=0, - valid_values={"SinglePress": DOORBELL_SINGLE_PRESS}, - ) - serv_speaker = self.add_preload_service(SERV_SPEAKER) - serv_speaker.configure_char(CHAR_MUTE, value=0) - self._async_update_doorbell_state(None, state) - @pyhap_callback # type: ignore[misc] @callback def run(self) -> None: @@ -285,17 +247,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] ) ) - if self._char_doorbell_detected: - assert self.linked_doorbell_sensor - self._subscriptions.append( - async_track_state_change_event( - self.hass, - self.linked_doorbell_sensor, - self._async_update_doorbell_state_event, - job_type=HassJobType.Callback, - ) - ) - super().run() @callback @@ -344,39 +295,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] detected, ) - @callback - def _async_update_doorbell_state_event( - self, event: Event[EventStateChangedData] - ) -> None: - """Handle state change event listener callback.""" - if not state_changed_event_is_same_state(event) and ( - new_state := event.data["new_state"] - ): - self._async_update_doorbell_state(event.data["old_state"], new_state) - - @callback - def _async_update_doorbell_state( - self, old_state: State | None, new_state: State - ) -> None: - """Handle link doorbell sensor state change to update HomeKit value.""" - assert self._char_doorbell_detected - assert self._char_doorbell_detected_switch - state = new_state.state - if state == STATE_ON or ( - self.doorbell_is_event - and old_state is not None - and old_state.state != STATE_UNAVAILABLE - and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) - ): - self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS) - self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS) - _LOGGER.debug( - "%s: Set linked doorbell %s sensor to %d", - self.entity_id, - self.linked_doorbell_sensor, - DOORBELL_SINGLE_PRESS, - ) - @callback def async_update_state(self, new_state: State | None) -> None: """Handle state change to update HomeKit value.""" diff --git a/homeassistant/components/homekit/type_locks.py b/homeassistant/components/homekit/type_locks.py index 70570a8fca5..59da802b8b7 100644 --- a/homeassistant/components/homekit/type_locks.py +++ b/homeassistant/components/homekit/type_locks.py @@ -9,8 +9,9 @@ from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import State, callback -from .accessories import TYPES, HomeAccessory +from .accessories import TYPES from .const import CHAR_LOCK_CURRENT_STATE, CHAR_LOCK_TARGET_STATE, SERV_LOCK +from .doorbell import HomeDoorbellAccessory _LOGGER = logging.getLogger(__name__) @@ -53,7 +54,7 @@ STATE_TO_SERVICE = { @TYPES.register("Lock") -class Lock(HomeAccessory): +class Lock(HomeDoorbellAccessory): """Generate a Lock accessory for a lock entity. The lock entity must support: unlock and lock. diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index ae7e35030be..b255d4c79dd 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -182,7 +182,6 @@ HUMIDIFIER_SCHEMA = BASIC_INFO_SCHEMA.extend( {vol.Optional(CONF_LINKED_HUMIDITY_SENSOR): cv.entity_domain(sensor.DOMAIN)} ) - COVER_SCHEMA = BASIC_INFO_SCHEMA.extend( { vol.Optional(CONF_LINKED_OBSTRUCTION_SENSOR): cv.entity_domain( @@ -195,6 +194,14 @@ CODE_SCHEMA = BASIC_INFO_SCHEMA.extend( {vol.Optional(ATTR_CODE, default=None): vol.Any(None, cv.string)} ) +LOCK_SCHEMA = CODE_SCHEMA.extend( + { + vol.Optional(CONF_LINKED_DOORBELL_SENSOR): cv.entity_domain( + [binary_sensor.DOMAIN, EVENT_DOMAIN] + ), + } +) + MEDIA_PLAYER_SCHEMA = vol.Schema( { vol.Required(CONF_FEATURE): vol.All( @@ -284,7 +291,7 @@ def validate_entity_config(values: dict) -> dict[str, dict]: if not isinstance(config, dict): raise vol.Invalid(f"The configuration for {entity} must be a dictionary.") - if domain in ("alarm_control_panel", "lock"): + if domain == "alarm_control_panel": config = CODE_SCHEMA(config) elif domain == media_player.const.DOMAIN: @@ -301,6 +308,9 @@ def validate_entity_config(values: dict) -> dict[str, dict]: elif domain == "camera": config = CAMERA_SCHEMA(config) + elif domain == "lock": + config = LOCK_SCHEMA(config) + elif domain == "switch": config = SWITCH_TYPE_SCHEMA(config) diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 2961fe52170..7691e341dcc 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -1,17 +1,34 @@ """Test different accessory types: Locks.""" +from unittest.mock import MagicMock + import pytest -from homeassistant.components.homekit.const import ATTR_VALUE +from homeassistant.components import lock +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.event import EventDeviceClass +from homeassistant.components.homekit.accessories import HomeBridge +from homeassistant.components.homekit.const import ( + ATTR_VALUE, + CHAR_PROGRAMMABLE_SWITCH_EVENT, + CONF_LINKED_DOORBELL_SENSOR, + SERV_DOORBELL, + SERV_STATELESS_PROGRAMMABLE_SWITCH, +) from homeassistant.components.homekit.type_locks import Lock from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ( ATTR_CODE, + ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN, ) from homeassistant.core import Event, HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.common import async_mock_service @@ -135,3 +152,285 @@ async def test_no_code( assert acc.char_target_state.value == 1 assert len(events) == 1 assert events[-1].data[ATTR_VALUE] is None + + +async def test_lock_with_linked_doorbell_sensor(hass: HomeAssistant, hk_driver) -> None: + """Test a lock with a linked doorbell sensor can update.""" + code = "1234" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + doorbell_entity_id = "binary_sensor.doorbell" + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + entity_id = "lock.demo_lock" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_OFF, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + +async def test_lock_with_linked_doorbell_event(hass: HomeAssistant, hk_driver) -> None: + """Test a lock with a linked doorbell event can update.""" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + doorbell_entity_id = "event.doorbell" + code = "1234" + + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + entity_id = "lock.demo_lock" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_UNKNOWN, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + original_time = dt_util.utcnow().isoformat() + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + await hass.async_block_till_done() + hass.states.async_set( + doorbell_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + # Ensure re-adding does not fire an event + assert not broker.mock_calls + broker.reset_mock() + + # going from unavailable to a state should not fire an event + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + +async def test_lock_with_a_missing_linked_doorbell_sensor( + hass: HomeAssistant, hk_driver +) -> None: + """Test a lock with a configured linked doorbell sensor that is missing.""" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + code = "1234" + doorbell_entity_id = "binary_sensor.doorbell" + entity_id = "lock.demo_lock" + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + assert not acc.get_service(SERV_DOORBELL) + assert not acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 7f7e3ee0ce0..ebd260de054 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -159,8 +159,20 @@ def test_validate_entity_config() -> None: assert vec({"lock.demo": {}}) == { "lock.demo": {ATTR_CODE: None, CONF_LOW_BATTERY_THRESHOLD: 20} } - assert vec({"lock.demo": {ATTR_CODE: "1234"}}) == { - "lock.demo": {ATTR_CODE: "1234", CONF_LOW_BATTERY_THRESHOLD: 20} + + assert vec( + { + "lock.demo": { + ATTR_CODE: "1234", + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + } + } + ) == { + "lock.demo": { + ATTR_CODE: "1234", + CONF_LOW_BATTERY_THRESHOLD: 20, + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + } } assert vec({"media_player.demo": {}}) == { From 2b907ee56e8eba05b8af46eab97c75ba3337b676 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Nov 2024 14:47:40 -0600 Subject: [PATCH 0098/1198] Strip trailing spaces from HomeKit names (#131971) --- homeassistant/components/homekit/util.py | 2 +- tests/components/homekit/test_accessories.py | 4 ++-- tests/components/homekit/test_util.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index b255d4c79dd..d540a88d6e6 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -114,7 +114,7 @@ _LOGGER = logging.getLogger(__name__) NUMBERS_ONLY_RE = re.compile(r"[^\d.]+") VERSION_RE = re.compile(r"([0-9]+)(\.[0-9]+)?(\.[0-9]+)?") -INVALID_END_CHARS = "-_" +INVALID_END_CHARS = "-_ " MAX_VERSION_PART = 2**32 - 1 diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index c37cac84b8a..00cf42bb916 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -121,7 +121,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc3.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value @@ -154,7 +154,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc4.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index ebd260de054..e544362acc0 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -268,6 +268,7 @@ def test_cleanup_name_for_homekit() -> None: """Ensure name sanitize works as expected.""" assert cleanup_name_for_homekit("abc") == "abc" + assert cleanup_name_for_homekit("abc ") == "abc" assert cleanup_name_for_homekit("a b c") == "a b c" assert cleanup_name_for_homekit("ab_c") == "ab c" assert ( From a0d5fda4b6c502d877ed8ad1c3b440835606ac5f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Nov 2024 16:09:37 -0600 Subject: [PATCH 0099/1198] Reduce precision loss when converting HomeKit temperature (#131973) --- homeassistant/components/homekit/util.py | 12 ++---------- tests/components/homekit/test_type_thermostats.py | 10 +++++----- tests/components/homekit/test_util.py | 8 +++++--- 3 files changed, 12 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index d540a88d6e6..d339aa6aded 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -434,20 +434,12 @@ def cleanup_name_for_homekit(name: str | None) -> str: def temperature_to_homekit(temperature: float, unit: str) -> float: """Convert temperature to Celsius for HomeKit.""" - return round( - TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS), 1 - ) + return TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS) def temperature_to_states(temperature: float, unit: str) -> float: """Convert temperature back from Celsius to Home Assistant unit.""" - return ( - round( - TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) - * 2 - ) - / 2 - ) + return TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) def density_to_air_quality(density: float) -> int: diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 8454610566b..e99db8f6234 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -921,8 +921,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[0] assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68.18 assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "CoolingThresholdTemperature to 23°C" @@ -942,8 +942,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[1] assert call_set_temperature[1].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.5 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.6 assert len(events) == 2 assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 22°C" @@ -962,7 +962,7 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[2] assert call_set_temperature[2].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.0 + assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.2 assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == "TargetTemperature to 24.0°C" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index e544362acc0..853db54b992 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -280,14 +280,16 @@ def test_cleanup_name_for_homekit() -> None: def test_temperature_to_homekit() -> None: """Test temperature conversion from HA to HomeKit.""" - assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.5 - assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == 33.4 + assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.46 + assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == pytest.approx( + 33.388888888888886 + ) def test_temperature_to_states() -> None: """Test temperature conversion from HomeKit to HA.""" assert temperature_to_states(20, UnitOfTemperature.CELSIUS) == 20.0 - assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.5 + assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.36 def test_density_to_air_quality() -> None: From 44ed83a82978e8561b3e18cdf2eff4b40673fc77 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 1 Dec 2024 04:01:33 +0100 Subject: [PATCH 0100/1198] Bump plugwise to v1.6.1 (#131950) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index d4d80749a8d..df35777ac54 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.0"], + "requirements": ["plugwise==1.6.1"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index d25732ee81b..492554972e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.0 +plugwise==1.6.1 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 679b8fabd45..c7b35596e01 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1332,7 +1332,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.0 +plugwise==1.6.1 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From ffeefd4856d8355b9746e94ca45c27d27168d89d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 30 Nov 2024 21:07:51 -0600 Subject: [PATCH 0101/1198] Bump SQLAlchemy to 2.0.36 (#126683) * Bump SQLAlchemy to 2.0.35 changelog: https://docs.sqlalchemy.org/en/20/changelog/changelog_20.html#change-2.0.35 * fix mocking * adjust to .36 * remove ignored as these are now typed * fix SQLAlchemy --- .github/workflows/wheels.yml | 2 +- .../components/recorder/db_schema.py | 6 +-- .../components/recorder/manifest.json | 2 +- homeassistant/components/sql/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/sql/test_config_flow.py | 52 ++++++++----------- tests/components/sql/test_sensor.py | 47 ++++++++++------- 11 files changed, 62 insertions(+), 59 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b9f54bba081..e0a850fa340 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -143,7 +143,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" - skip-binary: aiohttp;multidict;yarl + skip-binary: aiohttp;multidict;yarl;SQLAlchemy constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 7e8343321c3..dbe2b775297 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -162,14 +162,14 @@ class Unused(CHAR): """An unused column type that behaves like a string.""" -@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] -@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") +@compiles(Unused, "mysql", "mariadb", "sqlite") def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) -@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "postgresql") def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile Unused as CHAR(1) on postgresql.""" return "CHAR(1)" # Uses 1 byte diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 2be4b6862ba..93ffb12d18c 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "quality_scale": "internal", "requirements": [ - "SQLAlchemy==2.0.31", + "SQLAlchemy==2.0.36", "fnv-hash-fast==1.0.2", "psutil-home-assistant==0.0.1" ] diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index dcb5f47829c..01c95d6c5e4 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.31", "sqlparse==0.5.0"] + "requirements": ["SQLAlchemy==2.0.36", "sqlparse==0.5.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cb3f51476c8..cb7aa1219ab 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -59,7 +59,7 @@ pyudev==0.24.1 PyYAML==6.0.2 requests==2.32.3 securetar==2024.11.0 -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 standard-aifc==3.13.0;python_version>='3.13' standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 diff --git a/pyproject.toml b/pyproject.toml index 4bf14a36948..3f2df027b4a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ dependencies = [ "PyYAML==6.0.2", "requests==2.32.3", "securetar==2024.11.0", - "SQLAlchemy==2.0.31", + "SQLAlchemy==2.0.36", "standard-aifc==3.13.0;python_version>='3.13'", "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", diff --git a/requirements.txt b/requirements.txt index 2cbdeb14b98..1fa82f175bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -37,7 +37,7 @@ python-slugify==8.0.4 PyYAML==6.0.2 requests==2.32.3 securetar==2024.11.0 -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 standard-aifc==3.13.0;python_version>='3.13' standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 diff --git a/requirements_all.txt b/requirements_all.txt index 492554972e3..0fa30cee23a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -116,7 +116,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c7b35596e01..668e98a262a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -110,7 +110,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 diff --git a/tests/components/sql/test_config_flow.py b/tests/components/sql/test_config_flow.py index cb990e454b7..3f2400c0a32 100644 --- a/tests/components/sql/test_config_flow.py +++ b/tests/components/sql/test_config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from pathlib import Path from unittest.mock import patch from sqlalchemy.exc import SQLAlchemyError @@ -597,9 +598,6 @@ async def test_options_flow_db_url_empty( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], @@ -621,7 +619,9 @@ async def test_options_flow_db_url_empty( async def test_full_flow_not_recorder_db( - recorder_mock: Recorder, hass: HomeAssistant + recorder_mock: Recorder, + hass: HomeAssistant, + tmp_path: Path, ) -> None: """Test full config flow with not using recorder db.""" result = await hass.config_entries.flow.async_init( @@ -629,20 +629,19 @@ async def test_full_flow_not_recorder_db( ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + db_path = tmp_path / "db.db" + db_path_str = f"sqlite:///{db_path}" with ( patch( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "name": "Get Value", "query": "SELECT 5 as value", "column": "value", @@ -654,7 +653,7 @@ async def test_full_flow_not_recorder_db( assert result2["title"] == "Get Value" assert result2["options"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", } @@ -671,15 +670,12 @@ async def test_full_flow_not_recorder_db( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "column": "value", "unit_of_measurement": "MiB", }, @@ -689,7 +685,7 @@ async def test_full_flow_not_recorder_db( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MiB", @@ -697,24 +693,22 @@ async def test_full_flow_not_recorder_db( # Need to test same again to mitigate issue with db_url removal result = await hass.config_entries.options.async_init(entry.entry_id) - with patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", - "column": "value", - "unit_of_measurement": "MB", - }, - ) - await hass.async_block_till_done() + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "query": "SELECT 5 as value", + "db_url": db_path_str, + "column": "value", + "unit_of_measurement": "MB", + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", @@ -722,7 +716,7 @@ async def test_full_flow_not_recorder_db( assert entry.options == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", diff --git a/tests/components/sql/test_sensor.py b/tests/components/sql/test_sensor.py index b219ad47f3a..6b4032323d0 100644 --- a/tests/components/sql/test_sensor.py +++ b/tests/components/sql/test_sensor.py @@ -3,12 +3,13 @@ from __future__ import annotations from datetime import timedelta +from pathlib import Path +import sqlite3 from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from sqlalchemy import text as sql_text from sqlalchemy.exc import SQLAlchemyError from homeassistant.components.recorder import Recorder @@ -143,29 +144,37 @@ async def test_query_no_value( assert text in caplog.text -async def test_query_mssql_no_result( - recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture +async def test_query_on_disk_sqlite_no_result( + recorder_mock: Recorder, + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test the SQL sensor with a query that returns no value.""" - config = { - "db_url": "mssql://", - "query": "SELECT 5 as value where 1=2", - "column": "value", - "name": "count_tables", - } - with ( - patch("homeassistant.components.sql.sensor.sqlalchemy"), - patch( - "homeassistant.components.sql.sensor.sqlalchemy.text", - return_value=sql_text("SELECT TOP 1 5 as value where 1=2"), - ), - ): - await init_integration(hass, config) + db_path = tmp_path / "test.db" + db_path_str = f"sqlite:///{db_path}" - state = hass.states.get("sensor.count_tables") + def make_test_db(): + """Create a test database.""" + conn = sqlite3.connect(db_path) + conn.execute("CREATE TABLE users (value INTEGER)") + conn.commit() + conn.close() + + await hass.async_add_executor_job(make_test_db) + + config = { + "db_url": db_path_str, + "query": "SELECT value from users", + "column": "value", + "name": "count_users", + } + await init_integration(hass, config) + + state = hass.states.get("sensor.count_users") assert state.state == STATE_UNKNOWN - text = "SELECT TOP 1 5 AS VALUE WHERE 1=2 returned no results" + text = "SELECT value from users LIMIT 1; returned no results" assert text in caplog.text From 6103cea3f5d018d683209865830aa074978ff430 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sat, 30 Nov 2024 23:04:29 -0500 Subject: [PATCH 0102/1198] Make the full conversation input available to sentence triggers (#131982) Co-authored-by: Michael Hansen --- .../components/conversation/default_agent.py | 6 +- .../components/conversation/models.py | 11 ++ .../components/conversation/trigger.py | 8 +- .../conversation/test_default_agent.py | 2 +- tests/components/conversation/test_trigger.py | 144 +++++++++++++++--- 5 files changed, 146 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index c1256a1507b..59c09232b93 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -70,7 +70,7 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"] REGEX_TYPE = type(re.compile("")) TRIGGER_CALLBACK_TYPE = Callable[ - [str, RecognizeResult, str | None], Awaitable[str | None] + [ConversationInput, RecognizeResult], Awaitable[str | None] ] METADATA_CUSTOM_SENTENCE = "hass_custom_sentence" METADATA_CUSTOM_FILE = "hass_custom_file" @@ -1286,9 +1286,7 @@ class DefaultAgent(ConversationEntity): # Gather callback responses in parallel trigger_callbacks = [ - self._trigger_sentences[trigger_id].callback( - user_input.text, trigger_result, user_input.device_id - ) + self._trigger_sentences[trigger_id].callback(user_input, trigger_result) for trigger_id, trigger_result in result.matched_triggers.items() ] diff --git a/homeassistant/components/conversation/models.py b/homeassistant/components/conversation/models.py index 724e520e6df..10218e76751 100644 --- a/homeassistant/components/conversation/models.py +++ b/homeassistant/components/conversation/models.py @@ -40,6 +40,17 @@ class ConversationInput: agent_id: str | None = None """Agent to use for processing.""" + def as_dict(self) -> dict[str, Any]: + """Return input as a dict.""" + return { + "text": self.text, + "context": self.context.as_dict(), + "conversation_id": self.conversation_id, + "device_id": self.device_id, + "language": self.language, + "agent_id": self.agent_id, + } + @dataclass(slots=True) class ConversationResult: diff --git a/homeassistant/components/conversation/trigger.py b/homeassistant/components/conversation/trigger.py index a4f64ffbad9..24eb54c5694 100644 --- a/homeassistant/components/conversation/trigger.py +++ b/homeassistant/components/conversation/trigger.py @@ -16,6 +16,7 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import UNDEFINED, ConfigType from .const import DATA_DEFAULT_ENTITY, DOMAIN +from .models import ConversationInput def has_no_punctuation(value: list[str]) -> list[str]: @@ -62,7 +63,7 @@ async def async_attach_trigger( job = HassJob(action) async def call_action( - sentence: str, result: RecognizeResult, device_id: str | None + user_input: ConversationInput, result: RecognizeResult ) -> str | None: """Call action with right context.""" @@ -83,12 +84,13 @@ async def async_attach_trigger( trigger_input: dict[str, Any] = { # Satisfy type checker **trigger_data, "platform": DOMAIN, - "sentence": sentence, + "sentence": user_input.text, "details": details, "slots": { # direct access to values entity_name: entity["value"] for entity_name, entity in details.items() }, - "device_id": device_id, + "device_id": user_input.device_id, + "user_input": user_input.as_dict(), } # Wait for the automation to complete diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 6990ffe7717..20fa41944f2 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -397,7 +397,7 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: callback.reset_mock() result = await conversation.async_converse(hass, sentence, None, Context()) assert callback.call_count == 1 - assert callback.call_args[0][0] == sentence + assert callback.call_args[0][0].text == sentence assert ( result.response.response_type == intent.IntentResponseType.ACTION_DONE ), sentence diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index 903bc405cf0..50fac51c87a 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -40,18 +40,31 @@ async def test_if_fires_on_event( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "Ha ha ha"}, blocking=True, return_response=True, + context=context, ) assert service_response["response"]["speech"]["plain"]["speech"] == "Done" @@ -61,13 +74,21 @@ async def test_if_fires_on_event( assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "Ha ha ha", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "Ha ha ha", + }, } @@ -152,7 +173,19 @@ async def test_response_same_sentence( {"delay": "0:0:0.100"}, { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, {"set_conversation_response": "response 2"}, ], @@ -168,13 +201,14 @@ async def test_response_same_sentence( ] }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "test sentence"}, blocking=True, return_response=True, + context=context, ) await hass.async_block_till_done() @@ -188,12 +222,20 @@ async def test_response_same_sentence( assert service_calls[1].data["data"] == { "alias": None, "id": "trigger1", - "idx": "0", + "idx": 0, "platform": "conversation", "sentence": "test sentence", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "test sentence", + }, } @@ -231,13 +273,14 @@ async def test_response_same_sentence_with_error( ] }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "test sentence"}, blocking=True, return_response=True, + context=context, ) await hass.async_block_till_done() @@ -320,12 +363,24 @@ async def test_same_trigger_multiple_sentences( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) - + context = Context() await hass.services.async_call( "conversation", "process", @@ -333,6 +388,7 @@ async def test_same_trigger_multiple_sentences( "text": "hello", }, blocking=True, + context=context, ) # Only triggers once @@ -342,13 +398,21 @@ async def test_same_trigger_multiple_sentences( assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "hello", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "hello", + }, } @@ -371,7 +435,19 @@ async def test_same_sentence_multiple_triggers( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, }, { @@ -384,7 +460,19 @@ async def test_same_sentence_multiple_triggers( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, }, ], @@ -488,12 +576,25 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) + context = Context() await hass.services.async_call( "conversation", "process", @@ -501,6 +602,7 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) "text": "play the white album by the beatles", }, blocking=True, + context=context, ) await hass.async_block_till_done() @@ -509,8 +611,8 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "play the white album by the beatles", "slots": { @@ -530,6 +632,14 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) }, }, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "play the white album by the beatles", + }, } From 37972ec88e2a9fcb38bce6a3cd86ff9b0bfec949 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 1 Dec 2024 12:08:35 +0100 Subject: [PATCH 0103/1198] Match "delete" with "create" in the action descriptions (#131989) --- .../components/persistent_notification/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/persistent_notification/strings.json b/homeassistant/components/persistent_notification/strings.json index b9a4ae4f10f..e6c3d3b7775 100644 --- a/homeassistant/components/persistent_notification/strings.json +++ b/homeassistant/components/persistent_notification/strings.json @@ -21,17 +21,17 @@ }, "dismiss": { "name": "Dismiss", - "description": "Removes a notification from the notifications panel.", + "description": "Deletes a notification from the notifications panel.", "fields": { "notification_id": { "name": "[%key:component::persistent_notification::services::create::fields::notification_id::name%]", - "description": "ID of the notification to be removed." + "description": "ID of the notification to be deleted." } } }, "dismiss_all": { "name": "Dismiss all", - "description": "Removes all notifications from the notifications panel." + "description": "Deletes all notifications from the notifications panel." } } } From 47aebabc513eec2f2eb833c5dc264bd295351ccf Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 1 Dec 2024 12:20:45 +0100 Subject: [PATCH 0104/1198] Add final translations to mqtt exceptions (#131933) --- homeassistant/components/mqtt/client.py | 18 +++++++++++++++--- homeassistant/components/mqtt/config_flow.py | 4 +++- .../components/mqtt/device_trigger.py | 5 ++++- homeassistant/components/mqtt/strings.json | 15 +++++++++++++++ 4 files changed, 37 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index a626e0e5b28..1dcd0928434 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -776,7 +776,11 @@ class MQTT: else: del self._wildcard_subscriptions[subscription] except (KeyError, ValueError) as exc: - raise HomeAssistantError("Can't remove subscription twice") from exc + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_not_setup_cannot_unsubscribe_twice", + translation_placeholders={"topic": topic}, + ) from exc @callback def _async_queue_subscriptions( @@ -822,7 +826,11 @@ class MQTT: ) -> Callable[[], None]: """Set up a subscription to a topic with the provided qos.""" if not isinstance(topic, str): - raise HomeAssistantError("Topic needs to be a string!") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_topic_not_a_string", + translation_placeholders={"topic": topic}, + ) if job_type is None: job_type = get_hassjob_callable_job_type(msg_callback) @@ -1213,7 +1221,11 @@ class MQTT: import paho.mqtt.client as mqtt raise HomeAssistantError( - f"Error talking to MQTT: {mqtt.error_string(result_code)}" + translation_domain=DOMAIN, + translation_key="mqtt_broker_error", + translation_placeholders={ + "error_message": mqtt.error_string(result_code) + }, ) # Create the mid event if not created, either _mqtt_handle_mid or diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 69306a1c383..34d43ad87f3 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -331,7 +331,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): break else: raise AddonError( - f"Failed to correctly start {addon_manager.addon_name} add-on" + translation_domain=DOMAIN, + translation_key="addon_start_failed", + translation_placeholders={"addon": addon_manager.addon_name}, ) async def async_step_user( diff --git a/homeassistant/components/mqtt/device_trigger.py b/homeassistant/components/mqtt/device_trigger.py index 80faf879587..8665ac26961 100644 --- a/homeassistant/components/mqtt/device_trigger.py +++ b/homeassistant/components/mqtt/device_trigger.py @@ -148,7 +148,10 @@ class Trigger: def async_remove() -> None: """Remove trigger.""" if instance not in self.trigger_instances: - raise HomeAssistantError("Can't remove trigger twice") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_trigger_cannot_remove_twice", + ) if instance.remove: instance.remove() diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 7cf35783569..4d23007e51b 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -289,6 +289,9 @@ } }, "exceptions": { + "addon_start_failed": { + "message": "Failed to correctly start {addon} add-on." + }, "command_template_error": { "message": "Parsing template `{command_template}` for entity `{entity_id}` failed with error: {error}." }, @@ -298,11 +301,23 @@ "invalid_publish_topic": { "message": "Unable to publish: topic template `{topic_template}` produced an invalid topic `{topic}` after rendering ({error})" }, + "mqtt_broker_error": { + "message": "Error talking to MQTT: {error_message}." + }, "mqtt_not_setup_cannot_subscribe": { "message": "Cannot subscribe to topic \"{topic}\", make sure MQTT is set up correctly." }, "mqtt_not_setup_cannot_publish": { "message": "Cannot publish to topic \"{topic}\", make sure MQTT is set up correctly." + }, + "mqtt_not_setup_cannot_unsubscribe_twice": { + "message": "Cannot unsubscribe topic \"{topic}\" twice." + }, + "mqtt_topic_not_a_string": { + "message": "Topic needs to be a string! Got: {topic}." + }, + "mqtt_trigger_cannot_remove_twice": { + "message": "Can't remove trigger twice." } } } From 8878d0f0e13adfebc086bc14d38ea71a7540c7ec Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Dec 2024 08:55:07 -0600 Subject: [PATCH 0105/1198] Reduce time syscalls needed to insert new statistics (#131984) --- homeassistant/components/recorder/db_schema.py | 15 +++++++++++---- homeassistant/components/recorder/statistics.py | 16 +++++++++------- tests/components/recorder/test_statistics.py | 4 ++-- 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index dbe2b775297..fb57a1c73e2 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -691,12 +691,14 @@ class StatisticsBase: duration: timedelta @classmethod - def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: + def from_stats( + cls, metadata_id: int, stats: StatisticData, now_timestamp: float | None = None + ) -> Self: """Create object from a statistics with datetime objects.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, - created_ts=time.time(), + created_ts=now_timestamp or time.time(), start=None, start_ts=stats["start"].timestamp(), mean=stats.get("mean"), @@ -709,12 +711,17 @@ class StatisticsBase: ) @classmethod - def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: + def from_stats_ts( + cls, + metadata_id: int, + stats: StatisticDataTimestamp, + now_timestamp: float | None = None, + ) -> Self: """Create object from a statistics with timestamps.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, - created_ts=time.time(), + created_ts=now_timestamp or time.time(), start=None, start_ts=stats["start_ts"], mean=stats.get("mean"), diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 9f01fd0399c..3f1d5b981e3 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -11,6 +11,7 @@ from itertools import chain, groupby import logging from operator import itemgetter import re +from time import time as time_time from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text @@ -446,8 +447,9 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None: } # Insert compiled hourly statistics in the database + now_timestamp = time_time() session.add_all( - Statistics.from_stats_ts(metadata_id, summary_item) + Statistics.from_stats_ts(metadata_id, summary_item, now_timestamp) for metadata_id, summary_item in summary.items() ) @@ -578,6 +580,7 @@ def _compile_statistics( new_short_term_stats: list[StatisticsBase] = [] updated_metadata_ids: set[int] = set() + now_timestamp = time_time() # Insert collected statistics in the database for stats in platform_stats: modified_statistic_id, metadata_id = statistics_meta_manager.update_or_add( @@ -587,10 +590,7 @@ def _compile_statistics( modified_statistic_ids.add(modified_statistic_id) updated_metadata_ids.add(metadata_id) if new_stat := _insert_statistics( - session, - StatisticsShortTerm, - metadata_id, - stats["stat"], + session, StatisticsShortTerm, metadata_id, stats["stat"], now_timestamp ): new_short_term_stats.append(new_stat) @@ -666,10 +666,11 @@ def _insert_statistics( table: type[StatisticsBase], metadata_id: int, statistic: StatisticData, + now_timestamp: float, ) -> StatisticsBase | None: """Insert statistics in the database.""" try: - stat = table.from_stats(metadata_id, statistic) + stat = table.from_stats(metadata_id, statistic, now_timestamp) session.add(stat) except SQLAlchemyError: _LOGGER.exception( @@ -2347,11 +2348,12 @@ def _import_statistics_with_session( _, metadata_id = statistics_meta_manager.update_or_add( session, metadata, old_metadata_dict ) + now_timestamp = time_time() for stat in statistics: if stat_id := _statistics_exists(session, table, metadata_id, stat["start"]): _update_statistics(session, table, stat_id, stat) else: - _insert_statistics(session, table, metadata_id, stat) + _insert_statistics(session, table, metadata_id, stat, now_timestamp) if table != StatisticsShortTerm: return True diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index bdf39c5ef4a..6b1e1a655db 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -337,12 +337,12 @@ def mock_from_stats(): counter = 0 real_from_stats = StatisticsShortTerm.from_stats - def from_stats(metadata_id, stats): + def from_stats(metadata_id, stats, now_timestamp): nonlocal counter if counter == 0 and metadata_id == 2: counter += 1 return None - return real_from_stats(metadata_id, stats) + return real_from_stats(metadata_id, stats, now_timestamp) with patch( "homeassistant.components.recorder.statistics.StatisticsShortTerm.from_stats", From 598ce1f3b008ff323f9118df3aa46614205549b5 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 1 Dec 2024 16:17:55 +0100 Subject: [PATCH 0106/1198] Freeze integration setup timeout for recorder during non-live data migration (#131998) --- homeassistant/components/recorder/core.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 8c2e1c9e006..0c61f8a955e 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -740,7 +740,7 @@ class Recorder(threading.Thread): self.schema_version = schema_status.current_version # Do non-live data migration - migration.migrate_data_non_live(self, self.get_session, schema_status) + self._migrate_data_offline(schema_status) # Non-live migration is now completed, remaining steps are live self.migration_is_live = True @@ -916,6 +916,13 @@ class Recorder(threading.Thread): return False + def _migrate_data_offline( + self, schema_status: migration.SchemaValidationStatus + ) -> None: + """Migrate data.""" + with self.hass.timeout.freeze(DOMAIN): + migration.migrate_data_non_live(self, self.get_session, schema_status) + def _migrate_schema_offline( self, schema_status: migration.SchemaValidationStatus ) -> tuple[bool, migration.SchemaValidationStatus]: From bc7cfb6761849ceaaa4c5724c99ad00ea149fc93 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:39:33 +0100 Subject: [PATCH 0107/1198] Use typed ConfigEntry in lamarzocco (#131892) --- homeassistant/components/lamarzocco/__init__.py | 11 +++++++---- homeassistant/components/lamarzocco/config_flow.py | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index da513bc8cff..09187848a0f 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -10,7 +10,6 @@ from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -125,7 +124,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener( + hass: HomeAssistant, entry: LaMarzoccoConfigEntry + ) -> None: await hass.config_entries.async_reload(entry.entry_id) entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -133,12 +134,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, entry: LaMarzoccoConfigEntry +) -> bool: """Migrate config entry.""" if entry.version > 2: # guard against downgrade from a future version diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index a727e3fe357..e4ee0682ae7 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -20,7 +20,6 @@ from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, - ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, @@ -46,6 +45,7 @@ from homeassistant.helpers.selector import ( ) from .const import CONF_USE_BLUETOOTH, DOMAIN +from .coordinator import LaMarzoccoConfigEntry CONF_MACHINE = "machine" @@ -354,7 +354,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: LaMarzoccoConfigEntry, ) -> LmOptionsFlowHandler: """Create the options flow.""" return LmOptionsFlowHandler() From fd42c01a21832d64674f55dbb3e450dfefa405ed Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:40:06 +0100 Subject: [PATCH 0108/1198] Use typed ConfigEntry in tedee (#131893) --- homeassistant/components/tedee/__init__.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tedee/__init__.py b/homeassistant/components/tedee/__init__.py index 528a5052678..95348053805 100644 --- a/homeassistant/components/tedee/__init__.py +++ b/homeassistant/components/tedee/__init__.py @@ -16,7 +16,6 @@ from homeassistant.components.webhook import ( async_register as webhook_register, async_unregister as webhook_unregister, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -99,7 +98,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> boo return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -131,7 +130,9 @@ def get_webhook_handler( return async_webhook_handler -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: TedeeConfigEntry +) -> bool: """Migrate old entry.""" if config_entry.version > 1: # This means the user has downgraded from a future version From 8343d7f348ff017d731b96e7679b202d161545c4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:40:30 +0100 Subject: [PATCH 0109/1198] Use typed ConfigEntry in twentemilieu (#131894) --- homeassistant/components/twentemilieu/__init__.py | 8 ++++++-- homeassistant/components/twentemilieu/diagnostics.py | 5 +++-- homeassistant/components/twentemilieu/entity.py | 5 ++--- homeassistant/components/twentemilieu/sensor.py | 6 +++--- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index 0a2fb50c7c4..2796e9916f1 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -29,7 +29,9 @@ type TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator[ type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: TwenteMilieuConfigEntry +) -> bool: """Set up Twente Milieu from a config entry.""" session = async_get_clientsession(hass) twentemilieu = TwenteMilieu( @@ -55,6 +57,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: TwenteMilieuConfigEntry +) -> bool: """Unload Twente Milieu config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twentemilieu/diagnostics.py b/homeassistant/components/twentemilieu/diagnostics.py index 9de3f9bfaff..75775303eb6 100644 --- a/homeassistant/components/twentemilieu/diagnostics.py +++ b/homeassistant/components/twentemilieu/diagnostics.py @@ -4,12 +4,13 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from . import TwenteMilieuConfigEntry + async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: TwenteMilieuConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" return { diff --git a/homeassistant/components/twentemilieu/entity.py b/homeassistant/components/twentemilieu/entity.py index 896a8e32de9..0a2473f4524 100644 --- a/homeassistant/components/twentemilieu/entity.py +++ b/homeassistant/components/twentemilieu/entity.py @@ -2,13 +2,12 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwenteMilieuDataUpdateCoordinator +from . import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator from .const import DOMAIN @@ -17,7 +16,7 @@ class TwenteMilieuEntity(CoordinatorEntity[TwenteMilieuDataUpdateCoordinator], E _attr_has_entity_name = True - def __init__(self, entry: ConfigEntry) -> None: + def __init__(self, entry: TwenteMilieuConfigEntry) -> None: """Initialize the Twente Milieu entity.""" super().__init__(coordinator=entry.runtime_data) self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/twentemilieu/sensor.py b/homeassistant/components/twentemilieu/sensor.py index 2d2e3de0f0e..f5f91ce7080 100644 --- a/homeassistant/components/twentemilieu/sensor.py +++ b/homeassistant/components/twentemilieu/sensor.py @@ -12,11 +12,11 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import TwenteMilieuConfigEntry from .const import DOMAIN from .entity import TwenteMilieuEntity @@ -64,7 +64,7 @@ SENSORS: tuple[TwenteMilieuSensorDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: TwenteMilieuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Twente Milieu sensor based on a config entry.""" @@ -80,7 +80,7 @@ class TwenteMilieuSensor(TwenteMilieuEntity, SensorEntity): def __init__( self, - entry: ConfigEntry, + entry: TwenteMilieuConfigEntry, description: TwenteMilieuSensorDescription, ) -> None: """Initialize the Twente Milieu entity.""" From c55a4e9584a81d551e0954df9a6a47b4b7c797ec Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:49:51 +0100 Subject: [PATCH 0110/1198] Cleanup pylint obsolete import checks (#131904) --- pylint/plugins/hass_imports.py | 266 --------------------------------- 1 file changed, 266 deletions(-) diff --git a/pylint/plugins/hass_imports.py b/pylint/plugins/hass_imports.py index c6a869dd7fc..194f99ae700 100644 --- a/pylint/plugins/hass_imports.py +++ b/pylint/plugins/hass_imports.py @@ -37,140 +37,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^cached_property$"), ), ], - "homeassistant.components.alarm_control_panel": [ - ObsoleteImportMatch( - reason="replaced by AlarmControlPanelEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CodeFormat enum", - constant=re.compile(r"^FORMAT_(\w*)$"), - ), - ], - "homeassistant.components.alarm_control_panel.const": [ - ObsoleteImportMatch( - reason="replaced by AlarmControlPanelEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CodeFormat enum", - constant=re.compile(r"^FORMAT_(\w*)$"), - ), - ], - "homeassistant.components.automation": [ - ObsoleteImportMatch( - reason="replaced by TriggerActionType from helpers.trigger", - constant=re.compile(r"^AutomationActionType$"), - ), - ObsoleteImportMatch( - reason="replaced by TriggerData from helpers.trigger", - constant=re.compile(r"^AutomationTriggerData$"), - ), - ObsoleteImportMatch( - reason="replaced by TriggerInfo from helpers.trigger", - constant=re.compile(r"^AutomationTriggerInfo$"), - ), - ], - "homeassistant.components.binary_sensor": [ - ObsoleteImportMatch( - reason="replaced by BinarySensorDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ], - "homeassistant.components.camera": [ - ObsoleteImportMatch( - reason="replaced by CameraEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by StreamType enum", - constant=re.compile(r"^STREAM_TYPE_(\w*)$"), - ), - ], - "homeassistant.components.camera.const": [ - ObsoleteImportMatch( - reason="replaced by StreamType enum", - constant=re.compile(r"^STREAM_TYPE_(\w*)$"), - ), - ], - "homeassistant.components.climate": [ - ObsoleteImportMatch( - reason="replaced by HVACMode enum", - constant=re.compile(r"^HVAC_MODE_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by ClimateEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.climate.const": [ - ObsoleteImportMatch( - reason="replaced by HVACAction enum", - constant=re.compile(r"^CURRENT_HVAC_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HVACMode enum", - constant=re.compile(r"^HVAC_MODE_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by ClimateEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.cover": [ - ObsoleteImportMatch( - reason="replaced by CoverDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CoverEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.device_tracker": [ - ObsoleteImportMatch( - reason="replaced by SourceType enum", - constant=re.compile(r"^SOURCE_TYPE_\w+$"), - ), - ], - "homeassistant.components.device_tracker.const": [ - ObsoleteImportMatch( - reason="replaced by SourceType enum", - constant=re.compile(r"^SOURCE_TYPE_\w+$"), - ), - ], - "homeassistant.components.fan": [ - ObsoleteImportMatch( - reason="replaced by FanEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.humidifier": [ - ObsoleteImportMatch( - reason="replaced by HumidifierDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HumidifierEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.humidifier.const": [ - ObsoleteImportMatch( - reason="replaced by HumidifierDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HumidifierEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.lock": [ - ObsoleteImportMatch( - reason="replaced by LockEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], "homeassistant.components.light": [ ObsoleteImportMatch( reason="replaced by ColorMode enum", @@ -225,52 +91,12 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^REPEAT_MODE(\w*)$"), ), ], - "homeassistant.components.remote": [ - ObsoleteImportMatch( - reason="replaced by RemoteEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.sensor": [ - ObsoleteImportMatch( - reason="replaced by SensorDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(?!STATE_CLASSES)$"), - ), - ObsoleteImportMatch( - reason="replaced by SensorStateClass enum", - constant=re.compile(r"^STATE_CLASS_(\w*)$"), - ), - ], - "homeassistant.components.siren": [ - ObsoleteImportMatch( - reason="replaced by SirenEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.siren.const": [ - ObsoleteImportMatch( - reason="replaced by SirenEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.switch": [ - ObsoleteImportMatch( - reason="replaced by SwitchDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ], "homeassistant.components.vacuum": [ ObsoleteImportMatch( reason="replaced by VacuumEntityFeature enum", constant=re.compile(r"^SUPPORT_(\w*)$"), ), ], - "homeassistant.components.water_heater": [ - ObsoleteImportMatch( - reason="replaced by WaterHeaterEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], "homeassistant.config_entries": [ ObsoleteImportMatch( reason="replaced by ConfigEntryDisabler enum", @@ -282,86 +108,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { reason="replaced by local constants", constant=re.compile(r"^CONF_UNIT_SYSTEM_(\w+)$"), ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^DATA_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by ***DeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^ELECTRIC_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^ENERGY_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by EntityCategory enum", - constant=re.compile(r"^(ENTITY_CATEGORY_(\w+))|(ENTITY_CATEGORIES)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^FREQUENCY_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^IRRADIATION_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^LENGTH_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^MASS_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^POWER_(?!VOLT_AMPERE_REACTIVE)(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^PRECIPITATION_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^PRESSURE_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^SOUND_PRESSURE_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^SPEED_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^TEMP_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^TIME_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^VOLUME_(\w+)$"), - ), - ], - "homeassistant.core": [ - ObsoleteImportMatch( - reason="replaced by ConfigSource enum", - constant=re.compile(r"^SOURCE_(\w*)$"), - ), - ], - "homeassistant.data_entry_flow": [ - ObsoleteImportMatch( - reason="replaced by FlowResultType enum", - constant=re.compile(r"^RESULT_TYPE_(\w*)$"), - ), ], "homeassistant.helpers.config_validation": [ ObsoleteImportMatch( @@ -369,12 +115,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^PLATFORM_SCHEMA(_BASE)?$"), ), ], - "homeassistant.helpers.device_registry": [ - ObsoleteImportMatch( - reason="replaced by DeviceEntryDisabler enum", - constant=re.compile(r"^DISABLED_(\w*)$"), - ), - ], "homeassistant.helpers.json": [ ObsoleteImportMatch( reason="moved to homeassistant.util.json", @@ -383,12 +123,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { ), ), ], - "homeassistant.util": [ - ObsoleteImportMatch( - reason="replaced by unit_conversion.***Converter", - constant=re.compile(r"^(distance|pressure|speed|temperature|volume)$"), - ), - ], "homeassistant.util.unit_system": [ ObsoleteImportMatch( reason="replaced by US_CUSTOMARY_SYSTEM", From 3aae9b629fc8ca0b8cbf8a2981f0bce20a78e16f Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 1 Dec 2024 16:53:06 +0100 Subject: [PATCH 0111/1198] Add exception translation for entity action not supported (#131956) --- .../components/homeassistant/strings.json | 3 +++ homeassistant/exceptions.py | 19 +++++++++++++++++++ homeassistant/helpers/service.py | 5 ++--- tests/components/august/test_lock.py | 6 ++++-- tests/components/calendar/test_init.py | 11 ++++++++--- tests/components/google/test_init.py | 13 +++++++++---- tests/components/matter/test_vacuum.py | 10 +++++++--- .../components/samsungtv/test_media_player.py | 6 ++++-- tests/components/tedee/test_lock.py | 9 ++++++--- tests/components/tesla_fleet/test_climate.py | 13 ++++++++++--- tests/components/todo/test_init.py | 13 +++++++++---- tests/components/yale/test_lock.py | 13 ++++++++++--- tests/helpers/test_service.py | 8 +++++++- 13 files changed, 98 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index da8a1015d79..52b330bfbc8 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -224,6 +224,9 @@ "service_not_found": { "message": "Action {domain}.{service} not found." }, + "service_not_supported": { + "message": "Entity {entity_id} does not support action {domain}.{service}." + }, "service_does_not_support_response": { "message": "An action which does not return responses can't be called with {return_response}." }, diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index f308cbc5cd8..85fe55277fa 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -270,6 +270,25 @@ class ServiceNotFound(ServiceValidationError): self.generate_message = True +class ServiceNotSupported(ServiceValidationError): + """Raised when an entity action is not supported.""" + + def __init__(self, domain: str, service: str, entity_id: str) -> None: + """Initialize ServiceNotSupported exception.""" + super().__init__( + translation_domain="homeassistant", + translation_key="service_not_supported", + translation_placeholders={ + "domain": domain, + "service": service, + "entity_id": entity_id, + }, + ) + self.domain = domain + self.service = service + self.generate_message = True + + class MaxLengthExceeded(HomeAssistantError): """Raised when a property value has exceeded the max character length.""" diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 31b2e8e8ac8..35135010452 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -42,6 +42,7 @@ from homeassistant.core import ( ) from homeassistant.exceptions import ( HomeAssistantError, + ServiceNotSupported, TemplateError, Unauthorized, UnknownUser, @@ -986,9 +987,7 @@ async def entity_service_call( ): # If entity explicitly referenced, raise an error if referenced is not None and entity.entity_id in referenced.referenced: - raise HomeAssistantError( - f"Entity {entity.entity_id} does not support this service." - ) + raise ServiceNotSupported(call.domain, call.service, entity.entity_id) continue diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 1b8c98e299c..eb177a35cfb 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -20,8 +20,9 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .mocks import ( @@ -453,8 +454,9 @@ async def test_open_throws_hass_service_not_supported_error( hass: HomeAssistant, ) -> None: """Test open throws correct error on entity does not support this service error.""" + await async_setup_component(hass, "homeassistant", {}) mocked_lock_detail = await _mock_operative_august_lock_detail(hass) await _create_august_with_devices(hass, [mocked_lock_detail]) data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises(ServiceNotSupported, match="does not support action"): await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index 4ad5e11b8e4..36b102b933a 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -14,7 +14,8 @@ import voluptuous as vol from homeassistant.components.calendar import DOMAIN, SERVICE_GET_EVENTS from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .conftest import MockCalendarEntity, MockConfigEntry @@ -214,8 +215,12 @@ async def test_unsupported_websocket( async def test_unsupported_create_event_service(hass: HomeAssistant) -> None: """Test unsupported service call.""" - - with pytest.raises(HomeAssistantError, match="does not support this service"): + await async_setup_component(hass, "homeassistant", {}) + with pytest.raises( + ServiceNotSupported, + match="Entity calendar.calendar_1 does not " + "support action calendar.create_event", + ): await hass.services.async_call( DOMAIN, "create_event", diff --git a/tests/components/google/test_init.py b/tests/components/google/test_init.py index 536a1440958..ad43e341968 100644 --- a/tests/components/google/test_init.py +++ b/tests/components/google/test_init.py @@ -20,7 +20,8 @@ from homeassistant.components.google.const import CONF_CALENDAR_ACCESS from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_OFF from homeassistant.core import HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported +from homeassistant.setup import async_setup_component from homeassistant.util.dt import UTC, utcnow from .conftest import ( @@ -593,7 +594,7 @@ async def test_unsupported_create_event( aioclient_mock: AiohttpClientMocker, ) -> None: """Test create event service call is unsupported for virtual calendars.""" - + await async_setup_component(hass, "homeassistant", {}) mock_calendars_list({"items": [test_api_calendar]}) mock_events_list({}) assert await component_setup() @@ -601,8 +602,12 @@ async def test_unsupported_create_event( start_datetime = datetime.datetime.now(tz=zoneinfo.ZoneInfo("America/Regina")) delta = datetime.timedelta(days=3, hours=3) end_datetime = start_datetime + delta + entity_id = "calendar.backyard_light" - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises( + ServiceNotSupported, + match=f"Entity {entity_id} does not support action google.create_event", + ): await hass.services.async_call( DOMAIN, "create_event", @@ -613,7 +618,7 @@ async def test_unsupported_create_event( "summary": TEST_EVENT_SUMMARY, "description": TEST_EVENT_DESCRIPTION, }, - target={"entity_id": "calendar.backyard_light"}, + target={"entity_id": entity_id}, blocking=True, ) diff --git a/tests/components/matter/test_vacuum.py b/tests/components/matter/test_vacuum.py index 86f7542395a..1b33f6a2fe2 100644 --- a/tests/components/matter/test_vacuum.py +++ b/tests/components/matter/test_vacuum.py @@ -8,8 +8,10 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotSupported from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from .common import ( set_node_attribute, @@ -35,6 +37,8 @@ async def test_vacuum_actions( matter_node: MatterNode, ) -> None: """Test vacuum entity actions.""" + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) entity_id = "vacuum.mock_vacuum" state = hass.states.get(entity_id) assert state @@ -96,8 +100,8 @@ async def test_vacuum_actions( # test stop action # stop command is not supported by the vacuum fixture with pytest.raises( - HomeAssistantError, - match="Entity vacuum.mock_vacuum does not support this service.", + ServiceNotSupported, + match="Entity vacuum.mock_vacuum does not support action vacuum.stop", ): await hass.services.async_call( "vacuum", diff --git a/tests/components/samsungtv/test_media_player.py b/tests/components/samsungtv/test_media_player.py index ef7e58251e8..1a7c8713b17 100644 --- a/tests/components/samsungtv/test_media_player.py +++ b/tests/components/samsungtv/test_media_player.py @@ -76,7 +76,8 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceNotSupported +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import async_wait_config_entry_reload, setup_samsungtv_entry @@ -1021,8 +1022,9 @@ async def test_turn_on_wol(hass: HomeAssistant) -> None: async def test_turn_on_without_turnon(hass: HomeAssistant, remote: Mock) -> None: """Test turn on.""" + await async_setup_component(hass, "homeassistant", {}) await setup_samsungtv_entry(hass, MOCK_CONFIG) - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises(ServiceNotSupported, match="does not support action"): await hass.services.async_call( MP_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 45eae6e22d9..d84acb212ea 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -24,8 +24,9 @@ from homeassistant.components.lock import ( from homeassistant.components.webhook import async_generate_url from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component from .conftest import WEBHOOK_ID @@ -113,6 +114,8 @@ async def test_lock_without_pullspring( snapshot: SnapshotAssertion, ) -> None: """Test the tedee lock without pullspring.""" + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) mock_tedee.lock.return_value = None mock_tedee.unlock.return_value = None mock_tedee.open.return_value = None @@ -131,8 +134,8 @@ async def test_lock_without_pullspring( assert device == snapshot with pytest.raises( - HomeAssistantError, - match="Entity lock.lock_2c3d does not support this service.", + ServiceNotSupported, + match=f"Entity lock.lock_2c3d does not support action {LOCK_DOMAIN}.{SERVICE_OPEN}", ): await hass.services.async_call( LOCK_DOMAIN, diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py index b8cb7f1269b..b45e5259a5c 100644 --- a/tests/components/tesla_fleet/test_climate.py +++ b/tests/components/tesla_fleet/test_climate.py @@ -24,8 +24,13 @@ from homeassistant.components.climate import ( from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceNotSupported, + ServiceValidationError, +) from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from . import assert_entities, setup_platform from .const import ( @@ -391,6 +396,7 @@ async def test_climate_noscope( snapshot: SnapshotAssertion, ) -> None: """Tests with no command scopes.""" + await async_setup_component(hass, "homeassistant", {}) await setup_platform(hass, readonly_config_entry, [Platform.CLIMATE]) entity_id = "climate.test_climate" @@ -405,8 +411,9 @@ async def test_climate_noscope( ) with pytest.raises( - HomeAssistantError, - match="Entity climate.test_climate does not support this service.", + ServiceNotSupported, + match="Entity climate.test_climate does not " + "support action climate.set_temperature", ): await hass.services.async_call( CLIMATE_DOMAIN, diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index fd052a7f8a3..8e8c010f758 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -27,7 +27,11 @@ from homeassistant.components.todo import ( from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceNotSupported, + ServiceValidationError, +) from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -941,14 +945,15 @@ async def test_unsupported_service( payload: dict[str, Any] | None, ) -> None: """Test a To-do list that does not support features.""" - + # Fetch translations + await async_setup_component(hass, "homeassistant", "") entity1 = TodoListEntity() entity1.entity_id = "todo.entity1" await create_mock_platform(hass, [entity1]) with pytest.raises( - HomeAssistantError, - match="does not support this service", + ServiceNotSupported, + match=f"Entity todo.entity1 does not support action {DOMAIN}.{service_name}", ): await hass.services.async_call( DOMAIN, diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py index f0fe018759c..f6b96120d0d 100644 --- a/tests/components/yale/test_lock.py +++ b/tests/components/yale/test_lock.py @@ -18,7 +18,7 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.util.dt as dt_util @@ -29,6 +29,7 @@ from .mocks import ( _mock_lock_from_fixture, _mock_lock_with_unlatch, _mock_operative_yale_lock_detail, + async_setup_component, ) from tests.common import async_fire_time_changed @@ -418,8 +419,14 @@ async def test_open_throws_hass_service_not_supported_error( hass: HomeAssistant, ) -> None: """Test open throws correct error on entity does not support this service error.""" + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) await _create_yale_with_devices(hass, [mocked_lock_detail]) - data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises(HomeAssistantError, match="does not support this service"): + entity_id = "lock.a6697750d607098bae8d6baa11ef8063_name" + data = {ATTR_ENTITY_ID: entity_id} + with pytest.raises( + ServiceNotSupported, + match=f"Entity {entity_id} does not support action {LOCK_DOMAIN}.{SERVICE_OPEN}", + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index d0e1aa34340..e63cb69909c 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -1274,6 +1274,8 @@ async def test_register_with_mixed_case(hass: HomeAssistant) -> None: async def test_call_with_required_features(hass: HomeAssistant, mock_entities) -> None: """Test service calls invoked only if entity has required features.""" + # Set up homeassistant component to fetch the translations + await async_setup_component(hass, "homeassistant", {}) test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, @@ -1293,7 +1295,11 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - # Test we raise if we target entity ID that does not support the service test_service_mock.reset_mock() - with pytest.raises(exceptions.HomeAssistantError): + with pytest.raises( + exceptions.ServiceNotSupported, + match="Entity light.living_room does not " + "support action test_domain.test_service", + ): await service.entity_service_call( hass, mock_entities, From 2b094ee25d51bec2887ba65848eef64f6f7fb9e8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:54:05 +0100 Subject: [PATCH 0112/1198] Improve renault config-flow translation strings (#131706) --- homeassistant/components/renault/strings.json | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/renault/strings.json b/homeassistant/components/renault/strings.json index a6487772bb6..7d9cae1bcf1 100644 --- a/homeassistant/components/renault/strings.json +++ b/homeassistant/components/renault/strings.json @@ -13,14 +13,21 @@ "step": { "kamereon": { "data": { - "kamereon_account_id": "Kamereon account id" + "kamereon_account_id": "Account ID" }, - "title": "Select Kamereon account id" + "data_description": { + "kamereon_account_id": "The Kamereon account ID associated with your vehicle" + }, + "title": "Kamereon Account ID", + "description": "You have multiple Kamereon accounts associated to this email, please select one" }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "password": "Your MyRenault phone application password" + }, "description": "Please update your password for {username}", "title": "[%key:common::config_flow::title::reauth%]" }, @@ -30,6 +37,11 @@ "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "locale": "Your country code", + "username": "Your MyRenault phone application email address", + "password": "Your MyRenault phone application password" + }, "title": "Set Renault credentials" } } From a0541c7fe6e82e180eda93c2033f011f905aa44d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 1 Dec 2024 16:55:43 +0100 Subject: [PATCH 0113/1198] Improve renault config flow tests (#131698) --- homeassistant/components/renault/quality_scale.yaml | 4 +--- tests/components/renault/test_config_flow.py | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renault/quality_scale.yaml b/homeassistant/components/renault/quality_scale.yaml index aa693e8e86d..b49ff669895 100644 --- a/homeassistant/components/renault/quality_scale.yaml +++ b/homeassistant/components/renault/quality_scale.yaml @@ -4,9 +4,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: Tests are not asserting the unique id + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: done diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 56e0c8a99d7..781b7efe226 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -101,6 +101,7 @@ async def test_config_flow_single_account( assert result["data"][CONF_PASSWORD] == "test" assert result["data"][CONF_KAMEREON_ACCOUNT_ID] == "account_id_1" assert result["data"][CONF_LOCALE] == "fr_FR" + assert result["context"]["unique_id"] == "account_id_1" assert len(mock_setup_entry.mock_calls) == 1 @@ -189,6 +190,7 @@ async def test_config_flow_multiple_accounts( assert result["data"][CONF_PASSWORD] == "test" assert result["data"][CONF_KAMEREON_ACCOUNT_ID] == "account_id_2" assert result["data"][CONF_LOCALE] == "fr_FR" + assert result["context"]["unique_id"] == "account_id_2" assert len(mock_setup_entry.mock_calls) == 1 From c54eed360726a84927d7f07e7c1fb44eb607d284 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 1 Dec 2024 16:58:24 +0100 Subject: [PATCH 0114/1198] Improve recorder migration logging (#132006) --- .../components/recorder/migration.py | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index c9e36f47218..fffecff149c 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -313,7 +313,7 @@ def _migrate_schema( for version in range(current_version, end_version): new_version = version + 1 - _LOGGER.info("Upgrading recorder db schema to version %s", new_version) + _LOGGER.warning("Upgrading recorder db schema to version %s", new_version) _apply_update(instance, hass, engine, session_maker, new_version, start_version) with session_scope(session=session_maker()) as session: session.add(SchemaChanges(schema_version=new_version)) @@ -2326,9 +2326,15 @@ class BaseMigration(ABC): """ if self.schema_version < self.required_schema_version: # Schema is too old, we must have to migrate + _LOGGER.info( + "Data migration '%s' needed, schema too old", self.migration_id + ) return True if self.migration_changes.get(self.migration_id, -1) >= self.migration_version: # The migration changes table indicates that the migration has been done + _LOGGER.debug( + "Data migration '%s' not needed, already completed", self.migration_id + ) return False # We do not know if the migration is done from the # migration changes table so we must check the index and data @@ -2338,10 +2344,19 @@ class BaseMigration(ABC): and get_index_by_name(session, self.index_to_drop[0], self.index_to_drop[1]) is not None ): + _LOGGER.info( + "Data migration '%s' needed, index to drop still exists", + self.migration_id, + ) return True needs_migrate = self.needs_migrate_impl(instance, session) if needs_migrate.migration_done: _mark_migration_done(session, self.__class__) + _LOGGER.info( + "Data migration '%s' needed: %s", + self.migration_id, + needs_migrate.needs_migrate, + ) return needs_migrate.needs_migrate @@ -2354,10 +2369,17 @@ class BaseOffLineMigration(BaseMigration): """Migrate all data.""" with session_scope(session=session_maker()) as session: if not self.needs_migrate(instance, session): + _LOGGER.debug("Migration not needed for '%s'", self.migration_id) self.migration_done(instance, session) return + _LOGGER.warning( + "The database is about to do data migration step '%s', %s", + self.migration_id, + MIGRATION_NOTE_OFFLINE, + ) while not self.migrate_data(instance): pass + _LOGGER.warning("Data migration step '%s' completed", self.migration_id) @database_job_retry_wrapper_method("migrate data", 10) def migrate_data(self, instance: Recorder) -> bool: From cf0ee635077114961f6e508be56ce7620c718c18 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 1 Dec 2024 18:26:29 +0100 Subject: [PATCH 0115/1198] Simplify recorder RecorderRunsManager (#131785) --- .../recorder/table_managers/recorder_runs.py | 73 +++---------------- .../table_managers/test_recorder_runs.py | 32 ++------ 2 files changed, 15 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index b0b9818118b..4ca0aa18b88 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,8 +2,6 @@ from __future__ import annotations -import bisect -from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -11,34 +9,6 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns -from ..models import process_timestamp - - -def _find_recorder_run_for_start_time( - run_history: _RecorderRunsHistory, start: datetime -) -> RecorderRuns | None: - """Find the recorder run for a start time in _RecorderRunsHistory.""" - run_timestamps = run_history.run_timestamps - runs_by_timestamp = run_history.runs_by_timestamp - - # bisect_left tells us were we would insert - # a value in the list of runs after the start timestamp. - # - # The run before that (idx-1) is when the run started - # - # If idx is 0, history never ran before the start timestamp - # - if idx := bisect.bisect_left(run_timestamps, start.timestamp()): - return runs_by_timestamp[run_timestamps[idx - 1]] - return None - - -@dataclass(frozen=True) -class _RecorderRunsHistory: - """Bisectable history of RecorderRuns.""" - - run_timestamps: list[int] - runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -48,7 +18,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._run_history = _RecorderRunsHistory([], {}) + self._first_run: RecorderRuns | None = None @property def recording_start(self) -> datetime: @@ -58,9 +28,7 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - if runs_by_timestamp := self._run_history.runs_by_timestamp: - return next(iter(runs_by_timestamp.values())) - return self.current + return self._first_run or self.current @property def current(self) -> RecorderRuns: @@ -78,15 +46,6 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None - def get(self, start: datetime) -> RecorderRuns | None: - """Return the recorder run that started before or at start. - - If the first run started after the start, return None - """ - if start >= self.recording_start: - return self.current - return _find_recorder_run_for_start_time(self._run_history, start) - def start(self, session: Session) -> None: """Start a new run. @@ -122,31 +81,17 @@ class RecorderRunsManager: Must run in the recorder thread. """ - run_timestamps: list[int] = [] - runs_by_timestamp: dict[int, RecorderRuns] = {} - - for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): + if ( + run := session.query(RecorderRuns) + .order_by(RecorderRuns.start.asc()) + .first() + ): session.expunge(run) - if run_dt := process_timestamp(run.start): - # Not sure if this is correct or runs_by_timestamp annotation should be changed - timestamp = int(run_dt.timestamp()) - run_timestamps.append(timestamp) - runs_by_timestamp[timestamp] = run - - # - # self._run_history is accessed in get() - # which is allowed to be called from any thread - # - # We use a dataclass to ensure that when we update - # run_timestamps and runs_by_timestamp - # are never out of sync with each other. - # - self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) + self._first_run = run def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - if self._current_run_info: - self._current_run_info = None + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index 41f3a8fef4d..e79def01bad 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,6 +21,11 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) + # Test that the first run falls back to the current run + assert process_timestamp( + instance.recorder_runs_manager.first.start + ) == process_timestamp(instance.recorder_runs_manager.current.start) + with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -29,32 +34,7 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp( - instance.recorder_runs_manager.get( - three_days_ago + timedelta(microseconds=1) - ).start - ) - == three_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - two_days_ago + timedelta(microseconds=1) - ).start - ) - == two_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - one_day_ago + timedelta(microseconds=1) - ).start - ) - == one_day_ago - ) - assert ( - process_timestamp(instance.recorder_runs_manager.get(now).start) - == instance.recorder_runs_manager.recording_start + process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago ) From ff1702eefab97922c1e164db09a62be232e61aae Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 1 Dec 2024 19:40:40 +0100 Subject: [PATCH 0116/1198] Remove unnecessary assignment in Recorder._process_state_changed_event_into_session (#132011) --- homeassistant/components/recorder/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 0c61f8a955e..a3163d5b396 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1128,7 +1128,6 @@ class Recorder(threading.Thread): # Map the event data to the StateAttributes table shared_attrs = shared_attrs_bytes.decode("utf-8") - dbstate.attributes = None # Matching attributes found in the pending commit if pending_event_data := state_attributes_manager.get_pending(shared_attrs): dbstate.state_attributes = pending_event_data From bd3f4323763b415d68fd487ce9fb32686ec78e77 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 1 Dec 2024 20:55:27 +0100 Subject: [PATCH 0117/1198] Clarify description of fan actions, fix typo (#132023) --- homeassistant/components/fan/strings.json | 24 +++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/fan/strings.json b/homeassistant/components/fan/strings.json index aab714d3e07..c4951e88c91 100644 --- a/homeassistant/components/fan/strings.json +++ b/homeassistant/components/fan/strings.json @@ -56,17 +56,17 @@ "services": { "set_preset_mode": { "name": "Set preset mode", - "description": "Sets preset mode.", + "description": "Sets preset fan mode.", "fields": { "preset_mode": { "name": "Preset mode", - "description": "Preset mode." + "description": "Preset fan mode." } } }, "set_percentage": { "name": "Set speed", - "description": "Sets the fan speed.", + "description": "Sets the speed of a fan.", "fields": { "percentage": { "name": "Percentage", @@ -94,45 +94,45 @@ }, "oscillate": { "name": "Oscillate", - "description": "Controls oscillatation of the fan.", + "description": "Controls the oscillation of a fan.", "fields": { "oscillating": { "name": "Oscillating", - "description": "Turn on/off oscillation." + "description": "Turns oscillation on/off." } } }, "toggle": { "name": "[%key:common::action::toggle%]", - "description": "Toggles the fan on/off." + "description": "Toggles a fan on/off." }, "set_direction": { "name": "Set direction", - "description": "Sets the fan rotation direction.", + "description": "Sets a fan's rotation direction.", "fields": { "direction": { "name": "Direction", - "description": "Direction to rotate." + "description": "Direction of the fan rotation." } } }, "increase_speed": { "name": "Increase speed", - "description": "Increases the speed of the fan.", + "description": "Increases the speed of a fan.", "fields": { "percentage_step": { "name": "Increment", - "description": "Increases the speed by a percentage step." + "description": "Percentage step by which the speed should be increased." } } }, "decrease_speed": { "name": "Decrease speed", - "description": "Decreases the speed of the fan.", + "description": "Decreases the speed of a fan.", "fields": { "percentage_step": { "name": "Decrement", - "description": "Decreases the speed by a percentage step." + "description": "Percentage step by which the speed should be decreased." } } } From 82e190dc4b565e5f0ab54049a93aead03993d4dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Dec 2024 14:37:03 -0600 Subject: [PATCH 0118/1198] Bump propcache to 0.2.1 (#132022) --- .github/workflows/wheels.yml | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index e0a850fa340..749f95fa922 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -143,7 +143,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" - skip-binary: aiohttp;multidict;yarl;SQLAlchemy + skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cb7aa1219ab..5c0db0659d6 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -45,7 +45,7 @@ orjson==3.10.12 packaging>=23.1 paho-mqtt==1.6.1 Pillow==11.0.0 -propcache==0.2.0 +propcache==0.2.1 psutil-home-assistant==0.0.1 PyJWT==2.10.0 pymicro-vad==1.0.1 diff --git a/pyproject.toml b/pyproject.toml index 3f2df027b4a..2c143db77f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ dependencies = [ # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.1", "Pillow==11.0.0", - "propcache==0.2.0", + "propcache==0.2.1", "pyOpenSSL==24.2.1", "orjson==3.10.12", "packaging>=23.1", diff --git a/requirements.txt b/requirements.txt index 1fa82f175bb..514ab132bc8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,7 +28,7 @@ lru-dict==1.3.0 PyJWT==2.10.0 cryptography==43.0.1 Pillow==11.0.0 -propcache==0.2.0 +propcache==0.2.1 pyOpenSSL==24.2.1 orjson==3.10.12 packaging>=23.1 From e706a5ef2738fc97b7381c451cfa764e03ca5863 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 1 Dec 2024 21:37:35 +0100 Subject: [PATCH 0119/1198] Set parallel updates for BMW entities (#132019) --- homeassistant/components/bmw_connected_drive/button.py | 2 ++ homeassistant/components/bmw_connected_drive/lock.py | 3 +++ homeassistant/components/bmw_connected_drive/notify.py | 2 ++ homeassistant/components/bmw_connected_drive/number.py | 2 ++ homeassistant/components/bmw_connected_drive/select.py | 2 ++ homeassistant/components/bmw_connected_drive/switch.py | 2 ++ 6 files changed, 13 insertions(+) diff --git a/homeassistant/components/bmw_connected_drive/button.py b/homeassistant/components/bmw_connected_drive/button.py index e6bd92b92d7..85747278cb1 100644 --- a/homeassistant/components/bmw_connected_drive/button.py +++ b/homeassistant/components/bmw_connected_drive/button.py @@ -22,6 +22,8 @@ from .entity import BMWBaseEntity if TYPE_CHECKING: from .coordinator import BMWDataUpdateCoordinator +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/lock.py b/homeassistant/components/bmw_connected_drive/lock.py index 3dfc0b1c4d4..b715a1e38cc 100644 --- a/homeassistant/components/bmw_connected_drive/lock.py +++ b/homeassistant/components/bmw_connected_drive/lock.py @@ -18,7 +18,10 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + DOOR_LOCK_STATE = "door_lock_state" + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/notify.py b/homeassistant/components/bmw_connected_drive/notify.py index 56523351e66..662a73a20cd 100644 --- a/homeassistant/components/bmw_connected_drive/notify.py +++ b/homeassistant/components/bmw_connected_drive/notify.py @@ -22,6 +22,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DOMAIN, BMWConfigEntry +PARALLEL_UPDATES = 1 + ATTR_LOCATION_ATTRIBUTES = ["street", "city", "postal_code", "country"] POI_SCHEMA = vol.Schema( diff --git a/homeassistant/components/bmw_connected_drive/number.py b/homeassistant/components/bmw_connected_drive/number.py index 54519ff9e6b..cce71b3b2fd 100644 --- a/homeassistant/components/bmw_connected_drive/number.py +++ b/homeassistant/components/bmw_connected_drive/number.py @@ -22,6 +22,8 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 323768ad9eb..7bc91b098ae 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -19,6 +19,8 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/switch.py b/homeassistant/components/bmw_connected_drive/switch.py index e8a02efdcfc..f0214bc1262 100644 --- a/homeassistant/components/bmw_connected_drive/switch.py +++ b/homeassistant/components/bmw_connected_drive/switch.py @@ -18,6 +18,8 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) From 36ca4e8866a57b7adaa6099121d5a81906e78212 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 1 Dec 2024 21:42:16 +0100 Subject: [PATCH 0120/1198] Fix description of 'clear_completed_items' to use "remove" (#132014) --- homeassistant/components/shopping_list/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/shopping_list/strings.json b/homeassistant/components/shopping_list/strings.json index c184a1d2227..8618d9241b4 100644 --- a/homeassistant/components/shopping_list/strings.json +++ b/homeassistant/components/shopping_list/strings.json @@ -62,7 +62,7 @@ }, "clear_completed_items": { "name": "Clear completed items", - "description": "Clears completed items from the shopping list." + "description": "Removes completed items from the shopping list." }, "sort": { "name": "Sort all items", From 8fdd095dabf85886dedbac7a465a0074c6be6d92 Mon Sep 17 00:00:00 2001 From: dotvav Date: Sun, 1 Dec 2024 21:43:09 +0100 Subject: [PATCH 0121/1198] Add pre-commit VSCode task (#131637) --- .vscode/tasks.json | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 2b02916a73e..1f95c5eef8f 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -56,6 +56,20 @@ }, "problemMatcher": [] }, + { + "label": "Pre-commit", + "type": "shell", + "command": "pre-commit run --show-diff-on-failure", + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new" + }, + "problemMatcher": [] + }, { "label": "Pylint", "type": "shell", From ffc3aca41f273d1f25d0dd7ef9369b9e7deb3720 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Sun, 1 Dec 2024 15:44:14 -0500 Subject: [PATCH 0122/1198] Bump pydrawise to 2024.12.0 (#132015) --- homeassistant/components/hydrawise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index 9678dc83e5f..50f803c07dc 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2024.9.0"] + "requirements": ["pydrawise==2024.12.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0fa30cee23a..450219d2ac8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1859,7 +1859,7 @@ pydiscovergy==3.0.2 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 668e98a262a..4f94f7e6538 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1503,7 +1503,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.2 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 From 98734ebe4f234a2f7333bad005b9b64be7021010 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 1 Dec 2024 21:45:31 +0100 Subject: [PATCH 0123/1198] Bump bimmer_connected to 0.17.2 (#132005) --- homeassistant/components/bmw_connected_drive/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index d1ca735ce55..81928a59a52 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "requirements": ["bimmer-connected[china]==0.17.0"] + "requirements": ["bimmer-connected[china]==0.17.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 450219d2ac8..56f9f56f061 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -582,7 +582,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.17.0 +bimmer-connected[china]==0.17.2 # homeassistant.components.bizkaibus bizkaibus==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4f94f7e6538..a38e6629b3a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -516,7 +516,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.17.0 +bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome From 521505f9b51fb4ae9bd2472aa2be7d552b069565 Mon Sep 17 00:00:00 2001 From: Charles Garwood Date: Sun, 1 Dec 2024 16:00:21 -0500 Subject: [PATCH 0124/1198] Add additional data_descriptions for Fully Kiosk Browser fields (#131716) --- .../components/fully_kiosk/strings.json | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/fully_kiosk/strings.json b/homeassistant/components/fully_kiosk/strings.json index ec7bd7b1c03..a4b466926f0 100644 --- a/homeassistant/components/fully_kiosk/strings.json +++ b/homeassistant/components/fully_kiosk/strings.json @@ -1,16 +1,22 @@ { "common": { - "data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings." + "data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings.", + "data_description_ssl": "Is the Fully Kiosk app configured to require SSL for the connection?", + "data_description_verify_ssl": "Should SSL certificartes be verified? This should be off for self-signed certificates." }, "config": { "step": { "discovery_confirm": { "description": "Do you want to set up {name} ({host})?", "data": { - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "password": "[%key:component::fully_kiosk::common::data_description_password%]" + "password": "[%key:component::fully_kiosk::common::data_description_password%]", + "ssl": "[%key:component::fully_kiosk::common::data_description_ssl%]", + "verify_ssl": "[%key:component::fully_kiosk::common::data_description_verify_ssl%]" } }, "user": { @@ -22,7 +28,9 @@ }, "data_description": { "host": "The hostname or IP address of the device running your Fully Kiosk Browser application.", - "password": "[%key:component::fully_kiosk::common::data_description_password%]" + "password": "[%key:component::fully_kiosk::common::data_description_password%]", + "ssl": "[%key:component::fully_kiosk::common::data_description_ssl%]", + "verify_ssl": "[%key:component::fully_kiosk::common::data_description_verify_ssl%]" } } }, From bd8cd87faecd1483b07443cacb93554ff0d23776 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 1 Dec 2024 22:01:19 +0100 Subject: [PATCH 0125/1198] Bugfix for Plugwise, small code optimization (#131990) --- homeassistant/components/plugwise/climate.py | 51 +++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index f1f54aa6647..242b0944782 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -78,19 +78,18 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._attr_extra_state_attributes = {} self._attr_unique_id = f"{device_id}-climate" + self._devices = coordinator.data.devices + self._gateway = coordinator.data.gateway + gateway_id: str = self._gateway["gateway_id"] + self._gateway_data = self._devices[gateway_id] + self._location = device_id if (location := self.device.get("location")) is not None: self._location = location - self.cdr_gateway = coordinator.data.gateway - gateway_id: str = coordinator.data.gateway["gateway_id"] - self.gateway_data = coordinator.data.devices[gateway_id] # Determine supported features self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - if ( - self.cdr_gateway["cooling_present"] - and self.cdr_gateway["smile_name"] != "Adam" - ): + if self._gateway["cooling_present"] and self._gateway["smile_name"] != "Adam": self._attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) @@ -116,10 +115,10 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """ # When no cooling available, _previous_mode is always heating if ( - "regulation_modes" in self.gateway_data - and "cooling" in self.gateway_data["regulation_modes"] + "regulation_modes" in self._gateway_data + and "cooling" in self._gateway_data["regulation_modes"] ): - mode = self.gateway_data["select_regulation_mode"] + mode = self._gateway_data["select_regulation_mode"] if mode in ("cooling", "heating"): self._previous_mode = mode @@ -166,17 +165,17 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): def hvac_modes(self) -> list[HVACMode]: """Return a list of available HVACModes.""" hvac_modes: list[HVACMode] = [] - if "regulation_modes" in self.gateway_data: + if "regulation_modes" in self._gateway_data: hvac_modes.append(HVACMode.OFF) if "available_schedules" in self.device: hvac_modes.append(HVACMode.AUTO) - if self.cdr_gateway["cooling_present"]: - if "regulation_modes" in self.gateway_data: - if self.gateway_data["select_regulation_mode"] == "cooling": + if self._gateway["cooling_present"]: + if "regulation_modes" in self._gateway_data: + if self._gateway_data["select_regulation_mode"] == "cooling": hvac_modes.append(HVACMode.COOL) - if self.gateway_data["select_regulation_mode"] == "heating": + if self._gateway_data["select_regulation_mode"] == "heating": hvac_modes.append(HVACMode.HEAT) else: hvac_modes.append(HVACMode.HEAT_COOL) @@ -192,17 +191,21 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._previous_action_mode(self.coordinator) # Adam provides the hvac_action for each thermostat - if (control_state := self.device.get("control_state")) == "cooling": - return HVACAction.COOLING - if control_state == "heating": - return HVACAction.HEATING - if control_state == "preheating": - return HVACAction.PREHEATING - if control_state == "off": + if self._gateway["smile_name"] == "Adam": + if (control_state := self.device.get("control_state")) == "cooling": + return HVACAction.COOLING + if control_state == "heating": + return HVACAction.HEATING + if control_state == "preheating": + return HVACAction.PREHEATING + if control_state == "off": + return HVACAction.IDLE + return HVACAction.IDLE - heater: str = self.coordinator.data.gateway["heater_id"] - heater_data = self.coordinator.data.devices[heater] + # Anna + heater: str = self._gateway["heater_id"] + heater_data = self._devices[heater] if heater_data["binary_sensors"]["heating_state"]: return HVACAction.HEATING if heater_data["binary_sensors"].get("cooling_state", False): From 78ced997e2fe6e73acbc5d94dd02b9fce137e723 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Sun, 1 Dec 2024 22:02:50 +0100 Subject: [PATCH 0126/1198] Add reauthentication flow for Autarco integration (#131816) --- .../components/autarco/config_flow.py | 44 ++++++++++++ .../components/autarco/coordinator.py | 8 ++- .../components/autarco/quality_scale.yaml | 2 +- homeassistant/components/autarco/strings.json | 15 +++- tests/components/autarco/test_config_flow.py | 72 ++++++++++++++++++- tests/components/autarco/test_init.py | 19 +++++ 6 files changed, 154 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/autarco/config_flow.py b/homeassistant/components/autarco/config_flow.py index a66f14047a7..294fa685fb8 100644 --- a/homeassistant/components/autarco/config_flow.py +++ b/homeassistant/components/autarco/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from autarco import Autarco, AutarcoAuthenticationError, AutarcoConnectionError @@ -20,6 +21,12 @@ DATA_SCHEMA = vol.Schema( } ) +STEP_REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Autarco.""" @@ -55,3 +62,40 @@ class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, data_schema=DATA_SCHEMA, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication request from Autarco.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication confirmation.""" + errors = {} + + reauth_entry = self._get_reauth_entry() + if user_input is not None: + client = Autarco( + email=reauth_entry.data[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.get_account() + except AutarcoAuthenticationError: + errors["base"] = "invalid_auth" + except AutarcoConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + data_schema=STEP_REAUTH_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/autarco/coordinator.py b/homeassistant/components/autarco/coordinator.py index 5dd19478ae8..dd8786bca25 100644 --- a/homeassistant/components/autarco/coordinator.py +++ b/homeassistant/components/autarco/coordinator.py @@ -7,6 +7,7 @@ from typing import NamedTuple from autarco import ( AccountSite, Autarco, + AutarcoAuthenticationError, AutarcoConnectionError, Battery, Inverter, @@ -16,6 +17,7 @@ from autarco import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, SCAN_INTERVAL @@ -60,8 +62,10 @@ class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]): inverters = await self.client.get_inverters(self.account_site.public_key) if site.has_battery: battery = await self.client.get_battery(self.account_site.public_key) - except AutarcoConnectionError as error: - raise UpdateFailed(error) from error + except AutarcoAuthenticationError as err: + raise ConfigEntryAuthFailed(err) from err + except AutarcoConnectionError as err: + raise UpdateFailed(err) from err return AutarcoData( solar=solar, inverters=inverters, diff --git a/homeassistant/components/autarco/quality_scale.yaml b/homeassistant/components/autarco/quality_scale.yaml index f0eb4771447..d2e1455af7e 100644 --- a/homeassistant/components/autarco/quality_scale.yaml +++ b/homeassistant/components/autarco/quality_scale.yaml @@ -51,7 +51,7 @@ rules: This integration only polls data using a coordinator. Since the integration is read-only and poll-only (only provide sensor data), there is no need to implement parallel updates. - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold diff --git a/homeassistant/components/autarco/strings.json b/homeassistant/components/autarco/strings.json index 8eda5fe0411..159dbd09781 100644 --- a/homeassistant/components/autarco/strings.json +++ b/homeassistant/components/autarco/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "description": "Connect to your Autarco account to get information about your solar panels.", + "description": "Connect to your Autarco account, to get information about your sites.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" @@ -11,6 +11,16 @@ "email": "The email address of your Autarco account.", "password": "The password of your Autarco account." } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The password for {email} is no longer valid.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::autarco::config::step::user::data_description::password%]" + } } }, "error": { @@ -18,7 +28,8 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py index 621ad7f55c8..47c6a2fb084 100644 --- a/tests/components/autarco/test_config_flow.py +++ b/tests/components/autarco/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Autarco config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from autarco import AutarcoAuthenticationError, AutarcoConnectionError import pytest @@ -92,6 +92,7 @@ async def test_exceptions( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {"base": error} + # Recover from error mock_autarco_client.get_account.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -99,3 +100,72 @@ async def test_exceptions( user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, ) assert result.get("type") is FlowResultType.CREATE_ENTRY + + +async def test_step_reauth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + with patch("homeassistant.components.autarco.config_flow.Autarco", autospec=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AutarcoConnectionError, "cannot_connect"), + (AutarcoAuthenticationError, "invalid_auth"), + ], +) +async def test_step_reauth_exceptions( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions in reauth flow.""" + mock_autarco_client.get_account.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_autarco_client.get_account.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py index 81c5f947251..2707c53d35f 100644 --- a/tests/components/autarco/test_init.py +++ b/tests/components/autarco/test_init.py @@ -4,6 +4,8 @@ from __future__ import annotations from unittest.mock import AsyncMock +from autarco import AutarcoAuthenticationError + from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -26,3 +28,20 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_setup_entry_exception( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test ConfigEntryNotReady when API raises an exception during entry setup.""" + mock_config_entry.add_to_hass(hass) + mock_autarco_client.get_site.side_effect = AutarcoAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" From 86f8b5893f7618d034a13cb951df08f96ceb5cc7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 1 Dec 2024 22:39:26 +0100 Subject: [PATCH 0127/1198] Bump yt-dlp to 2024.11.18 (#132026) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index ebfa79d7190..866215839bf 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.11.04"], + "requirements": ["yt-dlp[default]==2024.11.18"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 56f9f56f061..35359d3744d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3066,7 +3066,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp[default]==2024.11.18 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a38e6629b3a..54d00b8585b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2455,7 +2455,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp[default]==2024.11.18 # homeassistant.components.zamg zamg==0.3.6 From b94a47ceb24507fe84f81e526b95aa1ce6896198 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 1 Dec 2024 22:41:01 +0100 Subject: [PATCH 0128/1198] Change library to livisi (#132001) Co-authored-by: J. Nick Koston --- homeassistant/components/livisi/__init__.py | 2 +- homeassistant/components/livisi/climate.py | 2 +- homeassistant/components/livisi/config_flow.py | 3 ++- homeassistant/components/livisi/coordinator.py | 5 +++-- homeassistant/components/livisi/entity.py | 2 +- homeassistant/components/livisi/manifest.json | 2 +- requirements_all.txt | 6 +++--- requirements_test_all.txt | 6 +++--- tests/components/livisi/test_config_flow.py | 2 +- 9 files changed, 16 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/livisi/__init__.py b/homeassistant/components/livisi/__init__.py index 26e36e68efa..fc9e381a1c3 100644 --- a/homeassistant/components/livisi/__init__.py +++ b/homeassistant/components/livisi/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Final from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi +from livisi.aiolivisi import AioLivisi from homeassistant import core from homeassistant.config_entries import ConfigEntry diff --git a/homeassistant/components/livisi/climate.py b/homeassistant/components/livisi/climate.py index 56fe63d351f..5d70936fc53 100644 --- a/homeassistant/components/livisi/climate.py +++ b/homeassistant/components/livisi/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from aiolivisi.const import CAPABILITY_CONFIG +from livisi.const import CAPABILITY_CONFIG from homeassistant.components.climate import ( ClimateEntity, diff --git a/homeassistant/components/livisi/config_flow.py b/homeassistant/components/livisi/config_flow.py index 7317aec0abc..ce14c0e44e9 100644 --- a/homeassistant/components/livisi/config_flow.py +++ b/homeassistant/components/livisi/config_flow.py @@ -6,7 +6,8 @@ from contextlib import suppress from typing import Any from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi, errors as livisi_errors +from livisi import errors as livisi_errors +from livisi.aiolivisi import AioLivisi import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult diff --git a/homeassistant/components/livisi/coordinator.py b/homeassistant/components/livisi/coordinator.py index 7cb5757310f..b8b282c2829 100644 --- a/homeassistant/components/livisi/coordinator.py +++ b/homeassistant/components/livisi/coordinator.py @@ -6,8 +6,9 @@ from datetime import timedelta from typing import Any from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi, LivisiEvent, Websocket -from aiolivisi.errors import TokenExpiredException +from livisi import LivisiEvent, Websocket +from livisi.aiolivisi import AioLivisi +from livisi.errors import TokenExpiredException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD diff --git a/homeassistant/components/livisi/entity.py b/homeassistant/components/livisi/entity.py index 3160b8f288a..af588b0e360 100644 --- a/homeassistant/components/livisi/entity.py +++ b/homeassistant/components/livisi/entity.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any -from aiolivisi.const import CAPABILITY_MAP +from livisi.const import CAPABILITY_MAP from homeassistant.config_entries import ConfigEntry from homeassistant.core import callback diff --git a/homeassistant/components/livisi/manifest.json b/homeassistant/components/livisi/manifest.json index e6f46324ed8..25cc9d2e9c2 100644 --- a/homeassistant/components/livisi/manifest.json +++ b/homeassistant/components/livisi/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/livisi", "iot_class": "local_polling", - "requirements": ["aiolivisi==0.0.19"] + "requirements": ["livisi==0.0.22"] } diff --git a/requirements_all.txt b/requirements_all.txt index 35359d3744d..7fa4cea354a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -288,9 +288,6 @@ aiolifx-themes==0.5.5 # homeassistant.components.lifx aiolifx==1.1.1 -# homeassistant.components.livisi -aiolivisi==0.0.19 - # homeassistant.components.lookin aiolookin==1.0.0 @@ -1312,6 +1309,9 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 +# homeassistant.components.livisi +livisi==0.0.22 + # homeassistant.components.google_maps locationsharinglib==5.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 54d00b8585b..f75507dfe5b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -270,9 +270,6 @@ aiolifx-themes==0.5.5 # homeassistant.components.lifx aiolifx==1.1.1 -# homeassistant.components.livisi -aiolivisi==0.0.19 - # homeassistant.components.lookin aiolookin==1.0.0 @@ -1093,6 +1090,9 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 +# homeassistant.components.livisi +livisi==0.0.22 + # homeassistant.components.london_underground london-tube-status==0.5 diff --git a/tests/components/livisi/test_config_flow.py b/tests/components/livisi/test_config_flow.py index 9f492b9a45a..cffae711d28 100644 --- a/tests/components/livisi/test_config_flow.py +++ b/tests/components/livisi/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from aiolivisi import errors as livisi_errors +from livisi import errors as livisi_errors import pytest from homeassistant.components.livisi.const import DOMAIN From b17b1f6db8bace2112589a1bb5c330c04e9841d7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 1 Dec 2024 23:05:34 +0100 Subject: [PATCH 0129/1198] Bump spotifyaio to 0.8.11 (#132032) --- homeassistant/components/spotify/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index 6c5b7382bbb..27b8da7cecf 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -8,6 +8,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["spotifyaio"], - "requirements": ["spotifyaio==0.8.10"], + "requirements": ["spotifyaio==0.8.11"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 7fa4cea354a..fb0e39176f5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2719,7 +2719,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.10 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f75507dfe5b..e459093135e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2174,7 +2174,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.10 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 From c2e6f8e761e0555903fb4d3593be5fdc2f77bba3 Mon Sep 17 00:00:00 2001 From: Yazan AbdAl-Rahman Date: Mon, 2 Dec 2024 03:56:15 +0200 Subject: [PATCH 0130/1198] Improve service names and descriptions for 'remote_connect' and 'remote_disconnect' in Home Assistant Cloud (#131993) * Rename and reword 'remote_connect' and 'remote_disconnect' services for clarity * Trigger pipeline * Trigger pipeline * Trigger pipeline * Trigger pipeline * Apply suggestions from code review --------- Co-authored-by: Paulus Schoutsen --- homeassistant/components/cloud/strings.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/cloud/strings.json b/homeassistant/components/cloud/strings.json index 9f7e0dbadcd..1da91f67813 100644 --- a/homeassistant/components/cloud/strings.json +++ b/homeassistant/components/cloud/strings.json @@ -68,12 +68,12 @@ }, "services": { "remote_connect": { - "name": "Remote connect", - "description": "Makes the instance UI accessible from outside of the local network by using Home Assistant Cloud." + "name": "Enable remote access", + "description": "Makes the instance UI accessible from outside of the local network by enabling your Home Assistant Cloud connection." }, "remote_disconnect": { - "name": "Remote disconnect", - "description": "Disconnects the Home Assistant UI from the Home Assistant Cloud. You will no longer be able to access your Home Assistant instance from outside your local network." + "name": "Disable remote access", + "description": "Disconnects the instance UI from Home Assistant Cloud. This disables access to it from outside your local network." } } } From c6cd7e38f70f0e8106ad3e3709519cb5a88dfefe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Dec 2024 20:05:45 -0600 Subject: [PATCH 0131/1198] Bump aiohttp to 3.11.9 (#132036) changelog: https://github.com/aio-libs/aiohttp/compare/v3.11.8...v3.11.9 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5c0db0659d6..bf6730a9c75 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.8 +aiohttp==3.11.9 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 2c143db77f6..ab9001cff42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.8", + "aiohttp==3.11.9", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 514ab132bc8..73153bd90cb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.8 +aiohttp==3.11.9 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From b6458ff9b8d9336f3b6d43143df8b103eafdc89e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Dec 2024 20:06:14 -0600 Subject: [PATCH 0132/1198] Bump cryptography to 44.0.0 and pyOpenSSL to 24.3.0 (#132035) These should be bumped together to make sure we do not have any incompatibility issues. > Note: The Python Cryptographic Authority strongly suggests the use of pyca/cryptography where possible. If you are using pyOpenSSL for anything other than making a TLS connection you should move to cryptography and drop your pyOpenSSL dependency. --- homeassistant/package_constraints.txt | 4 ++-- pyproject.toml | 4 ++-- requirements.txt | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bf6730a9c75..f7b2ce8561d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -25,7 +25,7 @@ bluetooth-data-tools==1.20.0 cached-ipaddress==0.8.0 certifi>=2021.5.30 ciso8601==2.3.1 -cryptography==43.0.1 +cryptography==44.0.0 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 go2rtc-client==0.1.1 @@ -50,7 +50,7 @@ psutil-home-assistant==0.0.1 PyJWT==2.10.0 pymicro-vad==1.0.1 PyNaCl==1.5.0 -pyOpenSSL==24.2.1 +pyOpenSSL==24.3.0 pyserial==3.5 pyspeex-noise==1.0.2 python-slugify==8.0.4 diff --git a/pyproject.toml b/pyproject.toml index ab9001cff42..ec3fe8024d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,10 +55,10 @@ dependencies = [ "lru-dict==1.3.0", "PyJWT==2.10.0", # PyJWT has loose dependency. We want the latest one. - "cryptography==43.0.1", + "cryptography==44.0.0", "Pillow==11.0.0", "propcache==0.2.1", - "pyOpenSSL==24.2.1", + "pyOpenSSL==24.3.0", "orjson==3.10.12", "packaging>=23.1", "psutil-home-assistant==0.0.1", diff --git a/requirements.txt b/requirements.txt index 73153bd90cb..0967b9b0618 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,10 +26,10 @@ ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 PyJWT==2.10.0 -cryptography==43.0.1 +cryptography==44.0.0 Pillow==11.0.0 propcache==0.2.1 -pyOpenSSL==24.2.1 +pyOpenSSL==24.3.0 orjson==3.10.12 packaging>=23.1 psutil-home-assistant==0.0.1 From 782fff198cd1aaedbeeedb272973783693a418a5 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 2 Dec 2024 03:17:07 +0100 Subject: [PATCH 0133/1198] Handle not found playlists in Spotify (#132033) * Handle not found playlists * Handle not found playlists * Handle not found playlists * Handle not found playlists * Handle not found playlists * Update homeassistant/components/spotify/coordinator.py --------- Co-authored-by: Paulus Schoutsen --- .../components/spotify/coordinator.py | 25 ++++- tests/components/spotify/test_media_player.py | 93 +++++++++++++++++++ 2 files changed, 114 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py index a7c95e31245..099b1cb3ca8 100644 --- a/homeassistant/components/spotify/coordinator.py +++ b/homeassistant/components/spotify/coordinator.py @@ -11,6 +11,7 @@ from spotifyaio import ( Playlist, SpotifyClient, SpotifyConnectionError, + SpotifyNotFoundError, UserProfile, ) @@ -62,6 +63,7 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): ) self.client = client self._playlist: Playlist | None = None + self._checked_playlist_id: str | None = None async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -87,15 +89,29 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): dj_playlist = False if (context := current.context) is not None: - if self._playlist is None or self._playlist.uri != context.uri: + dj_playlist = context.uri == SPOTIFY_DJ_PLAYLIST_URI + if not ( + context.uri + in ( + self._checked_playlist_id, + SPOTIFY_DJ_PLAYLIST_URI, + ) + or (self._playlist is None and context.uri == self._checked_playlist_id) + ): + self._checked_playlist_id = context.uri self._playlist = None - if context.uri == SPOTIFY_DJ_PLAYLIST_URI: - dj_playlist = True - elif context.context_type == ContextType.PLAYLIST: + if context.context_type == ContextType.PLAYLIST: # Make sure any playlist lookups don't break the current # playback state update try: self._playlist = await self.client.get_playlist(context.uri) + except SpotifyNotFoundError: + _LOGGER.debug( + "Spotify playlist '%s' not found. " + "Most likely a Spotify-created playlist", + context.uri, + ) + self._playlist = None except SpotifyConnectionError: _LOGGER.debug( "Unable to load spotify playlist '%s'. " @@ -103,6 +119,7 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): context.uri, ) self._playlist = None + self._checked_playlist_id = None return SpotifyCoordinatorData( current_playback=current, position_updated_at=position_updated_at, diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py index b03424f8459..55e0ea8f1d8 100644 --- a/tests/components/spotify/test_media_player.py +++ b/tests/components/spotify/test_media_player.py @@ -10,6 +10,7 @@ from spotifyaio import ( ProductType, RepeatMode as SpotifyRepeatMode, SpotifyConnectionError, + SpotifyNotFoundError, ) from syrupy import SnapshotAssertion @@ -142,6 +143,7 @@ async def test_spotify_dj_list( hass: HomeAssistant, mock_spotify: MagicMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Spotify entities with a Spotify DJ playlist.""" mock_spotify.return_value.get_playback.return_value.context.uri = ( @@ -152,12 +154,67 @@ async def test_spotify_dj_list( assert state assert state.attributes["media_playlist"] == "DJ" + mock_spotify.return_value.get_playlist.assert_not_called() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "DJ" + + mock_spotify.return_value.get_playlist.assert_not_called() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_normal_playlist( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, +) -> None: + """Test normal playlist switching.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + mock_spotify.return_value.get_playback.return_value.context.uri = ( + "spotify:playlist:123123123123123" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playlist.assert_called_with( + "spotify:playlist:123123123123123" + ) + @pytest.mark.usefixtures("setup_credentials") async def test_fetching_playlist_does_not_fail( hass: HomeAssistant, mock_spotify: MagicMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test failing fetching playlist does not fail update.""" mock_spotify.return_value.get_playlist.side_effect = SpotifyConnectionError @@ -166,6 +223,42 @@ async def test_fetching_playlist_does_not_fail( assert state assert "media_playlist" not in state.attributes + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_spotify.return_value.get_playlist.call_count == 2 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fetching_playlist_once( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that not being able to find a playlist doesn't retry.""" + mock_spotify.return_value.get_playlist.side_effect = SpotifyNotFoundError + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + @pytest.mark.usefixtures("setup_credentials") async def test_idle( From 80f28302a13f669e59a2b43e71efe0ebb16c353b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Dec 2024 20:17:36 -0600 Subject: [PATCH 0134/1198] Bump yarl to 1.18.3 (#132025) changelog: https://github.com/aio-libs/yarl/compare/v1.18.0...v1.18.3 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index f7b2ce8561d..d85fa4293a3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -70,7 +70,7 @@ voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 webrtc-models==0.3.0 -yarl==1.18.0 +yarl==1.18.3 zeroconf==0.136.2 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index ec3fe8024d9..9aa53920318 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.18.0", + "yarl==1.18.3", "webrtc-models==0.3.0", ] diff --git a/requirements.txt b/requirements.txt index 0967b9b0618..d0e2be91a99 100644 --- a/requirements.txt +++ b/requirements.txt @@ -47,5 +47,5 @@ uv==0.5.4 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.18.0 +yarl==1.18.3 webrtc-models==0.3.0 From 28eb4f3dff05a882cea97747d5ff13f9edef805a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Dec 2024 07:27:47 +0100 Subject: [PATCH 0135/1198] Use typed config entry in rainbird (#132031) * Use typed config entry in rainbird * Adjust --- homeassistant/components/rainbird/__init__.py | 7 +++---- homeassistant/components/rainbird/config_flow.py | 10 +++------- homeassistant/components/rainbird/coordinator.py | 4 ++-- homeassistant/components/rainbird/types.py | 9 ++++++++- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/rainbird/__init__.py b/homeassistant/components/rainbird/__init__.py index db88902bc3e..4827ac3e67c 100644 --- a/homeassistant/components/rainbird/__init__.py +++ b/homeassistant/components/rainbird/__init__.py @@ -9,7 +9,6 @@ import aiohttp from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -46,7 +45,7 @@ DOMAIN = "rainbird" def _async_register_clientsession_shutdown( hass: HomeAssistant, - entry: ConfigEntry, + entry: RainbirdConfigEntry, clientsession: aiohttp.ClientSession, ) -> None: """Register cleanup hooks for the clientsession.""" @@ -126,7 +125,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> async def _async_fix_unique_id( - hass: HomeAssistant, controller: AsyncRainbirdController, entry: ConfigEntry + hass: HomeAssistant, controller: AsyncRainbirdController, entry: RainbirdConfigEntry ) -> bool: """Update the config entry with a unique id based on the mac address.""" _LOGGER.debug("Checking for migration of config entry (%s)", entry.unique_id) @@ -255,6 +254,6 @@ def _async_fix_device_id( ) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/rainbird/config_flow.py b/homeassistant/components/rainbird/config_flow.py index 86a3c5d5d1c..1390650ea02 100644 --- a/homeassistant/components/rainbird/config_flow.py +++ b/homeassistant/components/rainbird/config_flow.py @@ -12,17 +12,13 @@ from pyrainbird.data import WifiParams from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.device_registry import format_mac +from . import RainbirdConfigEntry from .const import ( ATTR_DURATION, CONF_SERIAL_NUMBER, @@ -69,7 +65,7 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, ) -> RainBirdOptionsFlowHandler: """Define the config flow to handle options.""" return RainBirdOptionsFlowHandler() diff --git a/homeassistant/components/rainbird/coordinator.py b/homeassistant/components/rainbird/coordinator.py index 437aa7ddbd4..2ccfa0af62a 100644 --- a/homeassistant/components/rainbird/coordinator.py +++ b/homeassistant/components/rainbird/coordinator.py @@ -15,13 +15,13 @@ from pyrainbird.async_client import ( ) from pyrainbird.data import ModelAndVersion, Schedule -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, MANUFACTURER, TIMEOUT_SECONDS +from .types import RainbirdConfigEntry UPDATE_INTERVAL = datetime.timedelta(minutes=1) # The calendar data requires RPCs for each program/zone, and the data rarely @@ -140,7 +140,7 @@ class RainbirdUpdateCoordinator(DataUpdateCoordinator[RainbirdDeviceState]): class RainbirdScheduleUpdateCoordinator(DataUpdateCoordinator[Schedule]): """Coordinator for rainbird irrigation schedule calls.""" - config_entry: ConfigEntry + config_entry: RainbirdConfigEntry def __init__( self, diff --git a/homeassistant/components/rainbird/types.py b/homeassistant/components/rainbird/types.py index b452712d971..cc43353ac17 100644 --- a/homeassistant/components/rainbird/types.py +++ b/homeassistant/components/rainbird/types.py @@ -1,13 +1,20 @@ """Types for Rain Bird integration.""" +from __future__ import annotations + from dataclasses import dataclass +from typing import TYPE_CHECKING from pyrainbird.async_client import AsyncRainbirdController from pyrainbird.data import ModelAndVersion from homeassistant.config_entries import ConfigEntry -from .coordinator import RainbirdScheduleUpdateCoordinator, RainbirdUpdateCoordinator +if TYPE_CHECKING: + from .coordinator import ( + RainbirdScheduleUpdateCoordinator, + RainbirdUpdateCoordinator, + ) @dataclass From 5458ee2fa9864869b06814f57b24cf6268d19b92 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Dec 2024 07:28:29 +0100 Subject: [PATCH 0136/1198] Use typed config entry in imap (#132029) * Use typed config entry in imap * Adjust --- homeassistant/components/imap/config_flow.py | 10 +++------- homeassistant/components/imap/coordinator.py | 12 +++++++----- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/imap/config_flow.py b/homeassistant/components/imap/config_flow.py index 994c53b5b3e..df0e63e200a 100644 --- a/homeassistant/components/imap/config_flow.py +++ b/homeassistant/components/imap/config_flow.py @@ -9,12 +9,7 @@ from typing import Any from aioimaplib import AioImapException import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, @@ -35,6 +30,7 @@ from homeassistant.helpers.selector import ( ) from homeassistant.util.ssl import SSLCipherList +from . import ImapConfigEntry from .const import ( CONF_CHARSET, CONF_CUSTOM_EVENT_DATA_TEMPLATE, @@ -212,7 +208,7 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: ImapConfigEntry, ) -> ImapOptionsFlow: """Get the options flow for this handler.""" return ImapOptionsFlow() diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index a9d0fdfbd48..41fd703d79b 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -14,7 +14,6 @@ from typing import TYPE_CHECKING, Any from aioimaplib import AUTH, IMAP4_SSL, NONAUTH, SELECTED, AioImapException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, @@ -53,6 +52,9 @@ from .const import ( ) from .errors import InvalidAuth, InvalidFolder +if TYPE_CHECKING: + from . import ImapConfigEntry + _LOGGER = logging.getLogger(__name__) BACKOFF_TIME = 10 @@ -210,14 +212,14 @@ class ImapMessage: class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): """Base class for imap client.""" - config_entry: ConfigEntry + config_entry: ImapConfigEntry custom_event_template: Template | None def __init__( self, hass: HomeAssistant, imap_client: IMAP4_SSL, - entry: ConfigEntry, + entry: ImapConfigEntry, update_interval: timedelta | None, ) -> None: """Initiate imap client.""" @@ -391,7 +393,7 @@ class ImapPollingDataUpdateCoordinator(ImapDataUpdateCoordinator): """Class for imap client.""" def __init__( - self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ConfigEntry + self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ImapConfigEntry ) -> None: """Initiate imap client.""" _LOGGER.debug( @@ -437,7 +439,7 @@ class ImapPushDataUpdateCoordinator(ImapDataUpdateCoordinator): """Class for imap client.""" def __init__( - self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ConfigEntry + self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ImapConfigEntry ) -> None: """Initiate imap client.""" _LOGGER.debug("Connected to server %s using IMAP push", entry.data[CONF_SERVER]) From 4eb5734d73ee601bbd6223941ea12e8ffe0b3583 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 2 Dec 2024 07:39:48 +0100 Subject: [PATCH 0137/1198] Remove CONF_NAME from config entry in solarlog (#131738) * Remove CONF_NAME from config entry * Remove name from strings.json --- .../components/solarlog/config_flow.py | 17 ++----- homeassistant/components/solarlog/const.py | 1 - .../components/solarlog/coordinator.py | 1 - homeassistant/components/solarlog/entity.py | 2 +- .../components/solarlog/strings.json | 1 - tests/components/solarlog/conftest.py | 5 +- tests/components/solarlog/const.py | 1 - .../solarlog/snapshots/test_diagnostics.ambr | 1 - .../solarlog/snapshots/test_sensor.ambr | 46 +++++++++---------- tests/components/solarlog/test_config_flow.py | 40 +++++++--------- tests/components/solarlog/test_init.py | 4 +- 11 files changed, 49 insertions(+), 70 deletions(-) diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index a61f825aa5e..767079ea1f8 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -1,7 +1,6 @@ """Config flow for solarlog integration.""" from collections.abc import Mapping -import logging from typing import Any from urllib.parse import ParseResult, urlparse @@ -14,12 +13,9 @@ from solarlog_cli.solarlog_exceptions import ( import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD -from homeassistant.util import slugify +from homeassistant.const import CONF_HOST, CONF_PASSWORD -from .const import CONF_HAS_PWD, DEFAULT_HOST, DEFAULT_NAME, DOMAIN - -_LOGGER = logging.getLogger(__name__) +from .const import CONF_HAS_PWD, DEFAULT_HOST, DOMAIN class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): @@ -84,24 +80,21 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - user_input[CONF_NAME] = slugify(user_input[CONF_NAME]) - if await self._test_connection(user_input[CONF_HOST]): if user_input[CONF_HAS_PWD]: self._user_input = user_input return await self.async_step_password() return self.async_create_entry( - title=user_input[CONF_NAME], data=user_input + title=user_input[CONF_HOST], data=user_input ) else: - user_input = {CONF_NAME: DEFAULT_NAME, CONF_HOST: DEFAULT_HOST} + user_input = {CONF_HOST: DEFAULT_HOST} return self.async_show_form( step_id="user", data_schema=vol.Schema( { - vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str, vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, vol.Required(CONF_HAS_PWD, default=False): bool, } @@ -120,7 +113,7 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): ): self._user_input |= user_input return self.async_create_entry( - title=self._user_input[CONF_NAME], data=self._user_input + title=self._user_input[CONF_HOST], data=self._user_input ) else: user_input = {CONF_PASSWORD: ""} diff --git a/homeassistant/components/solarlog/const.py b/homeassistant/components/solarlog/const.py index f86d103f830..3e814705589 100644 --- a/homeassistant/components/solarlog/const.py +++ b/homeassistant/components/solarlog/const.py @@ -6,6 +6,5 @@ DOMAIN = "solarlog" # Default config for solarlog. DEFAULT_HOST = "http://solar-log" -DEFAULT_NAME = "solarlog" CONF_HAS_PWD = "has_password" diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 6e8867c0f52..11f268db32a 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -52,7 +52,6 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): path = url.path if url.netloc else "" url = ParseResult("http", netloc, path, *url[3:]) self.unique_id = entry.entry_id - self.name = entry.title self.host = url.geturl() self.solarlog = SolarLogConnector( diff --git a/homeassistant/components/solarlog/entity.py b/homeassistant/components/solarlog/entity.py index b0f3ddf99f9..bfdc52dccf1 100644 --- a/homeassistant/components/solarlog/entity.py +++ b/homeassistant/components/solarlog/entity.py @@ -43,7 +43,7 @@ class SolarLogCoordinatorEntity(SolarLogBaseEntity): manufacturer="Solar-Log", model="Controller", identifiers={(DOMAIN, coordinator.unique_id)}, - name=coordinator.name, + name="SolarLog", configuration_url=coordinator.host, ) diff --git a/homeassistant/components/solarlog/strings.json b/homeassistant/components/solarlog/strings.json index fb724c02adb..bbd9b509ecf 100644 --- a/homeassistant/components/solarlog/strings.json +++ b/homeassistant/components/solarlog/strings.json @@ -5,7 +5,6 @@ "title": "Define your Solar-Log connection", "data": { "host": "[%key:common::config_flow::data::host%]", - "name": "The prefix to be used for your Solar-Log sensors", "has_password": "I have the password for the Solar-Log user account." }, "data_description": { diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 2d4b4e32522..caa3621b9bb 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -10,9 +10,9 @@ from homeassistant.components.solarlog.const import ( CONF_HAS_PWD, DOMAIN as SOLARLOG_DOMAIN, ) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD +from homeassistant.const import CONF_HOST, CONF_PASSWORD -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry, load_json_object_fixture @@ -38,7 +38,6 @@ def mock_config_entry() -> MockConfigEntry: title="solarlog", data={ CONF_HOST: HOST, - CONF_NAME: NAME, CONF_HAS_PWD: True, CONF_PASSWORD: "pwd", }, diff --git a/tests/components/solarlog/const.py b/tests/components/solarlog/const.py index e23633c80ae..1294a376b01 100644 --- a/tests/components/solarlog/const.py +++ b/tests/components/solarlog/const.py @@ -1,4 +1,3 @@ """Common const used across tests for SolarLog.""" -NAME = "Solarlog test 1 2 3" HOST = "http://1.1.1.1" diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr index 4b37ea63dce..e0f1bc2623c 100644 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -5,7 +5,6 @@ 'data': dict({ 'has_password': True, 'host': '**REDACTED**', - 'name': 'Solarlog test 1 2 3', 'password': 'pwd', }), 'disabled_by': None, diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 32be560fc62..06bc01f9d39 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -254,7 +254,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Alternator loss', + 'friendly_name': 'SolarLog Alternator loss', 'state_class': , 'unit_of_measurement': , }), @@ -308,7 +308,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Capacity', + 'friendly_name': 'SolarLog Capacity', 'state_class': , 'unit_of_measurement': '%', }), @@ -359,7 +359,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Consumption AC', + 'friendly_name': 'SolarLog Consumption AC', 'state_class': , 'unit_of_measurement': , }), @@ -416,7 +416,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption day', + 'friendly_name': 'SolarLog Consumption day', 'state_class': , 'unit_of_measurement': , }), @@ -473,7 +473,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption month', + 'friendly_name': 'SolarLog Consumption month', 'state_class': , 'unit_of_measurement': , }), @@ -530,7 +530,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption total', + 'friendly_name': 'SolarLog Consumption total', 'state_class': , 'unit_of_measurement': , }), @@ -587,7 +587,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption year', + 'friendly_name': 'SolarLog Consumption year', 'state_class': , 'unit_of_measurement': , }), @@ -642,7 +642,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption yesterday', + 'friendly_name': 'SolarLog Consumption yesterday', 'unit_of_measurement': , }), 'context': , @@ -695,7 +695,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Efficiency', + 'friendly_name': 'SolarLog Efficiency', 'state_class': , 'unit_of_measurement': '%', }), @@ -746,7 +746,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Installed peak power', + 'friendly_name': 'SolarLog Installed peak power', 'state_class': , 'unit_of_measurement': , }), @@ -795,7 +795,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', - 'friendly_name': 'solarlog Last update', + 'friendly_name': 'SolarLog Last update', }), 'context': , 'entity_id': 'sensor.solarlog_last_update', @@ -844,7 +844,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power AC', + 'friendly_name': 'SolarLog Power AC', 'state_class': , 'unit_of_measurement': , }), @@ -895,7 +895,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power available', + 'friendly_name': 'SolarLog Power available', 'state_class': , 'unit_of_measurement': , }), @@ -946,7 +946,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power DC', + 'friendly_name': 'SolarLog Power DC', 'state_class': , 'unit_of_measurement': , }), @@ -997,7 +997,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Self-consumption year', + 'friendly_name': 'SolarLog Self-consumption year', 'state_class': , 'unit_of_measurement': , }), @@ -1051,7 +1051,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Usage', + 'friendly_name': 'SolarLog Usage', 'state_class': , 'unit_of_measurement': '%', }), @@ -1102,7 +1102,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', - 'friendly_name': 'solarlog Voltage AC', + 'friendly_name': 'SolarLog Voltage AC', 'state_class': , 'unit_of_measurement': , }), @@ -1153,7 +1153,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', - 'friendly_name': 'solarlog Voltage DC', + 'friendly_name': 'SolarLog Voltage DC', 'state_class': , 'unit_of_measurement': , }), @@ -1210,7 +1210,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield day', + 'friendly_name': 'SolarLog Yield day', 'state_class': , 'unit_of_measurement': , }), @@ -1267,7 +1267,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield month', + 'friendly_name': 'SolarLog Yield month', 'state_class': , 'unit_of_measurement': , }), @@ -1324,7 +1324,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield total', + 'friendly_name': 'SolarLog Yield total', 'state_class': , 'unit_of_measurement': , }), @@ -1378,7 +1378,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield year', + 'friendly_name': 'SolarLog Yield year', 'state_class': , 'unit_of_measurement': , }), @@ -1433,7 +1433,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield yesterday', + 'friendly_name': 'SolarLog Yield yesterday', 'unit_of_measurement': , }), 'context': , diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 8a34407ff54..3de3c08fcd0 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -12,11 +12,11 @@ from solarlog_cli.solarlog_exceptions import ( from homeassistant.components.solarlog import config_flow from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD +from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry @@ -33,12 +33,12 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_HOST: HOST, CONF_NAME: NAME, CONF_HAS_PWD: False}, + {CONF_HOST: HOST, CONF_HAS_PWD: False}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "solarlog_test_1_2_3" + assert result2["title"] == HOST assert result2["data"][CONF_HOST] == "http://1.1.1.1" assert result2["data"][CONF_HAS_PWD] is False assert len(mock_setup_entry.mock_calls) == 1 @@ -66,12 +66,12 @@ async def test_user( # tests with all provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, CONF_HAS_PWD: False} + result["flow_id"], {CONF_HOST: HOST, CONF_HAS_PWD: False} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" + assert result["title"] == HOST assert result["data"][CONF_HOST] == HOST assert len(mock_setup_entry.mock_calls) == 1 @@ -106,9 +106,7 @@ async def test_form_exceptions( mock_solarlog_connector.test_connection.side_effect = exception1 # tests with connection error - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: False} - ) + result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: False}) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM @@ -119,9 +117,7 @@ async def test_form_exceptions( mock_solarlog_connector.test_connection.side_effect = None mock_solarlog_connector.test_extended_data_available.side_effect = exception2 - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: True} - ) + result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: True}) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM @@ -137,13 +133,11 @@ async def test_form_exceptions( mock_solarlog_connector.test_extended_data_available.side_effect = None # tests with all provided (no password) - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: False} - ) + result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: False}) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" + assert result["title"] == HOST assert result["data"][CONF_HOST] == HOST assert result["data"][CONF_HAS_PWD] is False @@ -152,16 +146,14 @@ async def test_form_exceptions( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" + assert result["title"] == HOST assert result["data"][CONF_PASSWORD] == "pwd" async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) -> None: """Test we abort if the device is already setup.""" - MockConfigEntry(domain=DOMAIN, data={CONF_NAME: NAME, CONF_HOST: HOST}).add_to_hass( - hass - ) + MockConfigEntry(domain=DOMAIN, data={CONF_HOST: HOST}).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -173,7 +165,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) - result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", CONF_HAS_PWD: False}, + {CONF_HOST: HOST, CONF_HAS_PWD: False}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -196,7 +188,7 @@ async def test_reconfigure_flow( """Test config flow options.""" entry = MockConfigEntry( domain=DOMAIN, - title="solarlog_test_1_2_3", + title=HOST, data={ CONF_HOST: HOST, CONF_HAS_PWD: False, @@ -221,7 +213,7 @@ async def test_reconfigure_flow( entry = hass.config_entries.async_get_entry(entry.entry_id) assert entry - assert entry.title == "solarlog_test_1_2_3" + assert entry.title == HOST assert entry.data[CONF_HAS_PWD] == has_password assert entry.data[CONF_PASSWORD] == password @@ -244,7 +236,7 @@ async def test_reauth( entry = MockConfigEntry( domain=DOMAIN, - title="solarlog_test_1_2_3", + title=HOST, data={ CONF_HOST: HOST, CONF_HAS_PWD: True, diff --git a/tests/components/solarlog/test_init.py b/tests/components/solarlog/test_init.py index b4ef270e78b..a9a595f8962 100644 --- a/tests/components/solarlog/test_init.py +++ b/tests/components/solarlog/test_init.py @@ -19,7 +19,7 @@ from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry from . import setup_platform -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry @@ -140,7 +140,7 @@ async def test_migrate_config_entry( """Test successful migration of entry data.""" entry = MockConfigEntry( domain=DOMAIN, - title=NAME, + title=HOST, data={ CONF_HOST: HOST, }, From 4eb75a56e60a0c3e3a7879549ef82691120e0835 Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Mon, 2 Dec 2024 01:19:43 -0600 Subject: [PATCH 0138/1198] Use runtime data in HEOS (#132030) * Adopt runtime_data * Fix missing variable assignment * Address PR feedback --- homeassistant/components/heos/__init__.py | 70 ++++++++++--------- homeassistant/components/heos/const.py | 4 -- homeassistant/components/heos/media_player.py | 43 +++++------- tests/components/heos/test_init.py | 21 ++---- tests/components/heos/test_media_player.py | 12 ++-- 5 files changed, 62 insertions(+), 88 deletions(-) diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index 1573ff3f23e..de56e541501 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -3,10 +3,11 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass from datetime import timedelta import logging -from pyheos import Heos, HeosError, const as heos_const +from pyheos import Heos, HeosError, HeosPlayer, const as heos_const import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry @@ -27,10 +28,6 @@ from .config_flow import format_title from .const import ( COMMAND_RETRY_ATTEMPTS, COMMAND_RETRY_DELAY, - DATA_CONTROLLER_MANAGER, - DATA_ENTITY_ID_MAP, - DATA_GROUP_MANAGER, - DATA_SOURCE_MANAGER, DOMAIN, SIGNAL_HEOS_PLAYER_ADDED, SIGNAL_HEOS_UPDATED, @@ -51,6 +48,19 @@ MIN_UPDATE_SOURCES = timedelta(seconds=1) _LOGGER = logging.getLogger(__name__) +@dataclass +class HeosRuntimeData: + """Runtime data and coordinators for HEOS config entries.""" + + controller_manager: ControllerManager + group_manager: GroupManager + source_manager: SourceManager + players: dict[int, HeosPlayer] + + +type HeosConfigEntry = ConfigEntry[HeosRuntimeData] + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the HEOS component.""" if DOMAIN not in config: @@ -75,7 +85,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Initialize config entry which represents the HEOS controller.""" # For backwards compat if entry.unique_id is None: @@ -128,17 +138,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: source_manager = SourceManager(favorites, inputs) source_manager.connect_update(hass, controller) - group_manager = GroupManager(hass, controller) + group_manager = GroupManager(hass, controller, players) - hass.data[DOMAIN] = { - DATA_CONTROLLER_MANAGER: controller_manager, - DATA_GROUP_MANAGER: group_manager, - DATA_SOURCE_MANAGER: source_manager, - Platform.MEDIA_PLAYER: players, - # Maps player_id to entity_id. Populated by the individual - # HeosMediaPlayer entities. - DATA_ENTITY_ID_MAP: {}, - } + entry.runtime_data = HeosRuntimeData( + controller_manager, group_manager, source_manager, players + ) services.register(hass, controller) group_manager.connect_update() @@ -149,11 +153,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Unload a config entry.""" - controller_manager = hass.data[DOMAIN][DATA_CONTROLLER_MANAGER] - await controller_manager.disconnect() - hass.data.pop(DOMAIN) + await entry.runtime_data.controller_manager.disconnect() services.remove(hass) @@ -246,21 +248,25 @@ class ControllerManager: class GroupManager: """Class that manages HEOS groups.""" - def __init__(self, hass, controller): + def __init__( + self, hass: HomeAssistant, controller: Heos, players: dict[int, HeosPlayer] + ) -> None: """Init group manager.""" self._hass = hass - self._group_membership = {} + self._group_membership: dict[str, str] = {} self._disconnect_player_added = None self._initialized = False self.controller = controller + self.players = players + self.entity_id_map: dict[int, str] = {} def _get_entity_id_to_player_id_map(self) -> dict: """Return mapping of all HeosMediaPlayer entity_ids to player_ids.""" - return {v: k for k, v in self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP].items()} + return {v: k for k, v in self.entity_id_map.items()} - async def async_get_group_membership(self): + async def async_get_group_membership(self) -> dict[str, list[str]]: """Return all group members for each player as entity_ids.""" - group_info_by_entity_id = { + group_info_by_entity_id: dict[str, list[str]] = { player_entity_id: [] for player_entity_id in self._get_entity_id_to_player_id_map() } @@ -271,7 +277,7 @@ class GroupManager: _LOGGER.error("Unable to get HEOS group info: %s", err) return group_info_by_entity_id - player_id_to_entity_id_map = self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP] + player_id_to_entity_id_map = self.entity_id_map for group in groups.values(): leader_entity_id = player_id_to_entity_id_map.get(group.leader.player_id) member_entity_ids = [ @@ -282,9 +288,9 @@ class GroupManager: # Make sure the group leader is always the first element group_info = [leader_entity_id, *member_entity_ids] if leader_entity_id: - group_info_by_entity_id[leader_entity_id] = group_info + group_info_by_entity_id[leader_entity_id] = group_info # type: ignore[assignment] for member_entity_id in member_entity_ids: - group_info_by_entity_id[member_entity_id] = group_info + group_info_by_entity_id[member_entity_id] = group_info # type: ignore[assignment] return group_info_by_entity_id @@ -358,13 +364,9 @@ class GroupManager: # When adding a new HEOS player we need to update the groups. async def _async_handle_player_added(): - # Avoid calling async_update_groups when `DATA_ENTITY_ID_MAP` has not been + # Avoid calling async_update_groups when the entity_id map has not been # fully populated yet. This may only happen during early startup. - if ( - len(self._hass.data[DOMAIN][Platform.MEDIA_PLAYER]) - <= len(self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP]) - and not self._initialized - ): + if len(self.players) <= len(self.entity_id_map) and not self._initialized: self._initialized = True await self.async_update_groups(SIGNAL_HEOS_PLAYER_ADDED) diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 636751d150b..827a0c53fbf 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,10 +4,6 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" COMMAND_RETRY_ATTEMPTS = 2 COMMAND_RETRY_DELAY = 1 -DATA_CONTROLLER_MANAGER = "controller" -DATA_ENTITY_ID_MAP = "entity_id_map" -DATA_GROUP_MANAGER = "group_manager" -DATA_SOURCE_MANAGER = "source_manager" DATA_DISCOVERED_HOSTS = "heos_discovered_hosts" DOMAIN = "heos" SERVICE_SIGN_IN = "sign_in" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 0f9f7facd33..5255d369c2f 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -13,7 +13,6 @@ from pyheos import HeosError, const as heos_const from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, - DOMAIN as MEDIA_PLAYER_DOMAIN, BrowseMedia, MediaPlayerEnqueue, MediaPlayerEntity, @@ -22,7 +21,6 @@ from homeassistant.components.media_player import ( MediaType, async_process_play_media_url, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import ( @@ -32,14 +30,8 @@ from homeassistant.helpers.dispatcher import ( from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow -from .const import ( - DATA_ENTITY_ID_MAP, - DATA_GROUP_MANAGER, - DATA_SOURCE_MANAGER, - DOMAIN as HEOS_DOMAIN, - SIGNAL_HEOS_PLAYER_ADDED, - SIGNAL_HEOS_UPDATED, -) +from . import GroupManager, HeosConfigEntry, SourceManager +from .const import DOMAIN as HEOS_DOMAIN, SIGNAL_HEOS_PLAYER_ADDED, SIGNAL_HEOS_UPDATED BASE_SUPPORTED_FEATURES = ( MediaPlayerEntityFeature.VOLUME_MUTE @@ -80,11 +72,16 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: HeosConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Add media players for a config entry.""" - players = hass.data[HEOS_DOMAIN][MEDIA_PLAYER_DOMAIN] - devices = [HeosMediaPlayer(player) for player in players.values()] + players = entry.runtime_data.players + devices = [ + HeosMediaPlayer( + player, entry.runtime_data.source_manager, entry.runtime_data.group_manager + ) + for player in players.values() + ] async_add_entities(devices, True) @@ -120,13 +117,15 @@ class HeosMediaPlayer(MediaPlayerEntity): _attr_has_entity_name = True _attr_name = None - def __init__(self, player): + def __init__( + self, player, source_manager: SourceManager, group_manager: GroupManager + ) -> None: """Initialize.""" self._media_position_updated_at = None self._player = player - self._signals = [] - self._source_manager = None - self._group_manager = None + self._signals: list = [] + self._source_manager = source_manager + self._group_manager = group_manager self._attr_unique_id = str(player.player_id) self._attr_device_info = DeviceInfo( identifiers={(HEOS_DOMAIN, player.player_id)}, @@ -161,9 +160,7 @@ class HeosMediaPlayer(MediaPlayerEntity): async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated) ) # Register this player's entity_id so it can be resolved by the group manager - self.hass.data[HEOS_DOMAIN][DATA_ENTITY_ID_MAP][self._player.player_id] = ( - self.entity_id - ) + self._group_manager.entity_id_map[self._player.player_id] = self.entity_id async_dispatcher_send(self.hass, SIGNAL_HEOS_PLAYER_ADDED) @log_command_error("clear playlist") @@ -294,12 +291,6 @@ class HeosMediaPlayer(MediaPlayerEntity): ior, current_support, BASE_SUPPORTED_FEATURES ) - if self._group_manager is None: - self._group_manager = self.hass.data[HEOS_DOMAIN][DATA_GROUP_MANAGER] - - if self._source_manager is None: - self._source_manager = self.hass.data[HEOS_DOMAIN][DATA_SOURCE_MANAGER] - @log_command_error("unjoin_player") async def async_unjoin_player(self) -> None: """Remove this player from any group.""" diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index 9341c8fbace..04b745135d4 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -8,15 +8,11 @@ import pytest from homeassistant.components.heos import ( ControllerManager, + HeosRuntimeData, async_setup_entry, async_unload_entry, ) -from homeassistant.components.heos.const import ( - DATA_CONTROLLER_MANAGER, - DATA_SOURCE_MANAGER, - DOMAIN, -) -from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN +from homeassistant.components.heos.const import DOMAIN from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -92,10 +88,6 @@ async def test_async_setup_entry_loads_platforms( assert controller.get_favorites.call_count == 1 assert controller.get_input_sources.call_count == 1 controller.disconnect.assert_not_called() - assert hass.data[DOMAIN][DATA_CONTROLLER_MANAGER].controller == controller - assert hass.data[DOMAIN][MEDIA_PLAYER_DOMAIN] == controller.players - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].favorites == favorites - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].inputs == input_sources async def test_async_setup_entry_not_signed_in_loads_platforms( @@ -121,10 +113,6 @@ async def test_async_setup_entry_not_signed_in_loads_platforms( assert controller.get_favorites.call_count == 0 assert controller.get_input_sources.call_count == 1 controller.disconnect.assert_not_called() - assert hass.data[DOMAIN][DATA_CONTROLLER_MANAGER].controller == controller - assert hass.data[DOMAIN][MEDIA_PLAYER_DOMAIN] == controller.players - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].favorites == {} - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].inputs == input_sources assert ( "127.0.0.1 is not logged in to a HEOS account and will be unable to retrieve " "HEOS favorites: Use the 'heos.sign_in' service to sign-in to a HEOS account" @@ -163,7 +151,8 @@ async def test_async_setup_entry_player_failure( async def test_unload_entry(hass: HomeAssistant, config_entry, controller) -> None: """Test entries are unloaded correctly.""" controller_manager = Mock(ControllerManager) - hass.data[DOMAIN] = {DATA_CONTROLLER_MANAGER: controller_manager} + config_entry.runtime_data = HeosRuntimeData(controller_manager, None, None, {}) + with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=True ) as unload: @@ -186,7 +175,7 @@ async def test_update_sources_retry( assert await async_setup_component(hass, DOMAIN, config) controller.get_favorites.reset_mock() controller.get_input_sources.reset_mock() - source_manager = hass.data[DOMAIN][DATA_SOURCE_MANAGER] + source_manager = config_entry.runtime_data.source_manager source_manager.retry_delay = 0 source_manager.max_retry_attempts = 1 controller.get_favorites.side_effect = CommandFailedError("Test", "test", 0) diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 089fa1cceea..fa3f01107c1 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -8,11 +8,7 @@ from pyheos.error import HeosError import pytest from homeassistant.components.heos import media_player -from homeassistant.components.heos.const import ( - DATA_SOURCE_MANAGER, - DOMAIN, - SIGNAL_HEOS_UPDATED, -) +from homeassistant.components.heos.const import DOMAIN, SIGNAL_HEOS_UPDATED from homeassistant.components.media_player import ( ATTR_GROUP_MEMBERS, ATTR_INPUT_SOURCE, @@ -106,7 +102,7 @@ async def test_state_attributes( assert ATTR_INPUT_SOURCE not in state.attributes assert ( state.attributes[ATTR_INPUT_SOURCE_LIST] - == hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + == config_entry.runtime_data.source_manager.source_list ) @@ -219,7 +215,7 @@ async def test_updates_from_sources_updated( const.SIGNAL_CONTROLLER_EVENT, const.EVENT_SOURCES_CHANGED, {} ) await event.wait() - source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + source_list = config_entry.runtime_data.source_manager.source_list assert len(source_list) == 2 state = hass.states.get("media_player.test_player") assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list @@ -318,7 +314,7 @@ async def test_updates_from_user_changed( const.SIGNAL_CONTROLLER_EVENT, const.EVENT_USER_CHANGED, None ) await event.wait() - source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + source_list = config_entry.runtime_data.source_manager.source_list assert len(source_list) == 1 state = hass.states.get("media_player.test_player") assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list From 5dc390b6b93a1f800badfb10389790c34bfc85f6 Mon Sep 17 00:00:00 2001 From: TimL Date: Mon, 2 Dec 2024 19:24:49 +1100 Subject: [PATCH 0139/1198] Bump psymlight v0.1.4 (#132045) --- homeassistant/components/smlight/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index c1eca45871b..cb791ac111b 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.1.3"], + "requirements": ["pysmlight==0.1.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index fb0e39176f5..150fc195f8d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2269,7 +2269,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.3 +pysmlight==0.1.4 # homeassistant.components.snmp pysnmp==6.2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e459093135e..d79571888af 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1832,7 +1832,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.3 +pysmlight==0.1.4 # homeassistant.components.snmp pysnmp==6.2.6 From 8d1493036a405f710bbf8c75abed933f8cea57e3 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:59:57 +0100 Subject: [PATCH 0140/1198] Set PARALLEL_UPDATES in renault and bump quality scale (#132047) --- homeassistant/components/renault/binary_sensor.py | 3 +++ homeassistant/components/renault/button.py | 4 ++++ homeassistant/components/renault/device_tracker.py | 3 +++ homeassistant/components/renault/manifest.json | 1 + homeassistant/components/renault/quality_scale.yaml | 2 +- homeassistant/components/renault/select.py | 4 ++++ homeassistant/components/renault/sensor.py | 3 +++ 7 files changed, 19 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/renault/binary_sensor.py b/homeassistant/components/renault/binary_sensor.py index 98c298761ce..a8fdf324f1c 100644 --- a/homeassistant/components/renault/binary_sensor.py +++ b/homeassistant/components/renault/binary_sensor.py @@ -19,6 +19,9 @@ from homeassistant.helpers.typing import StateType from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RenaultBinarySensorEntityDescription( diff --git a/homeassistant/components/renault/button.py b/homeassistant/components/renault/button.py index d3666388fbb..6a9f5e05a38 100644 --- a/homeassistant/components/renault/button.py +++ b/homeassistant/components/renault/button.py @@ -13,6 +13,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RenaultConfigEntry from .entity import RenaultEntity +# Coordinator is used to centralize the data updates +# but renault servers are unreliable and it's safer to queue action calls +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class RenaultButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/renault/device_tracker.py b/homeassistant/components/renault/device_tracker.py index 2f7aeda5c39..08a2a698802 100644 --- a/homeassistant/components/renault/device_tracker.py +++ b/homeassistant/components/renault/device_tracker.py @@ -16,6 +16,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RenaultTrackerEntityDescription( diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 396410dfc20..111f296fc85 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -7,5 +7,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["renault_api"], + "quality_scale": "silver", "requirements": ["renault-api==0.2.7"] } diff --git a/homeassistant/components/renault/quality_scale.yaml b/homeassistant/components/renault/quality_scale.yaml index b49ff669895..f2d70622192 100644 --- a/homeassistant/components/renault/quality_scale.yaml +++ b/homeassistant/components/renault/quality_scale.yaml @@ -28,7 +28,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: done # Gold diff --git a/homeassistant/components/renault/select.py b/homeassistant/components/renault/select.py index b430da9396e..cab1d1f4d8a 100644 --- a/homeassistant/components/renault/select.py +++ b/homeassistant/components/renault/select.py @@ -15,6 +15,10 @@ from homeassistant.helpers.typing import StateType from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +# but renault servers are unreliable and it's safer to queue action calls +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class RenaultSelectEntityDescription( diff --git a/homeassistant/components/renault/sensor.py b/homeassistant/components/renault/sensor.py index 78e64ae9acc..7854d70b1c4 100644 --- a/homeassistant/components/renault/sensor.py +++ b/homeassistant/components/renault/sensor.py @@ -40,6 +40,9 @@ from .coordinator import T from .entity import RenaultDataEntity, RenaultDataEntityDescription from .renault_vehicle import RenaultVehicleProxy +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RenaultSensorEntityDescription( From 66d0d2eb6c7f76f23640c2c58cbd33d0f347b591 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Mon, 2 Dec 2024 09:50:49 +0000 Subject: [PATCH 0141/1198] Add translated native unit of measurement - QBitTorrent (#131918) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/qbittorrent/sensor.py | 4 ---- homeassistant/components/qbittorrent/strings.json | 12 ++++++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/qbittorrent/sensor.py b/homeassistant/components/qbittorrent/sensor.py index abc23f39975..67eb856bb83 100644 --- a/homeassistant/components/qbittorrent/sensor.py +++ b/homeassistant/components/qbittorrent/sensor.py @@ -100,13 +100,11 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ALL_TORRENTS, translation_key="all_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states(coordinator, []), ), QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ACTIVE_TORRENTS, translation_key="active_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["downloading", "uploading"] ), @@ -114,7 +112,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_INACTIVE_TORRENTS, translation_key="inactive_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["stalledDL", "stalledUP"] ), @@ -122,7 +119,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_PAUSED_TORRENTS, translation_key="paused_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["pausedDL", "pausedUP"] ), diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 88015dad5c3..9c9ee371737 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -36,16 +36,20 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "inactive_torrents": { - "name": "Inactive torrents" + "name": "Inactive torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "all_torrents": { - "name": "All torrents" + "name": "All torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { From 79ed6d865f89dcc368368f9e485a531c1038c5d6 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Mon, 2 Dec 2024 09:51:32 +0000 Subject: [PATCH 0142/1198] Add translated native unit of measurement - Transmission (#131913) --- homeassistant/components/transmission/sensor.py | 5 ----- .../components/transmission/strings.json | 15 ++++++++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/transmission/sensor.py b/homeassistant/components/transmission/sensor.py index 737520adb5f..652f5d51fbb 100644 --- a/homeassistant/components/transmission/sensor.py +++ b/homeassistant/components/transmission/sensor.py @@ -83,7 +83,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="active_torrents", translation_key="active_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.active_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="active_torrents" @@ -92,7 +91,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="paused_torrents", translation_key="paused_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.paused_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="paused_torrents" @@ -101,7 +99,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="total_torrents", translation_key="total_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="total_torrents" @@ -110,7 +107,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="completed_torrents", translation_key="completed_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["completed_torrents"]) ), @@ -121,7 +117,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="started_torrents", translation_key="started_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["started_torrents"]) ), diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 20ae6ca723d..578bc262589 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -60,19 +60,24 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "total_torrents": { - "name": "Total torrents" + "name": "Total torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "completed_torrents": { - "name": "Completed torrents" + "name": "Completed torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "started_torrents": { - "name": "Started torrents" + "name": "Started torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { From 584bb7bca87c483c2a8e481105b2fbe4dd78c480 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Mon, 2 Dec 2024 09:51:50 +0000 Subject: [PATCH 0143/1198] Add translated native unit of measurement - PiHole (#131915) --- homeassistant/components/pi_hole/sensor.py | 29 ++++--------------- homeassistant/components/pi_hole/strings.json | 24 ++++++++++----- 2 files changed, 21 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/pi_hole/sensor.py b/homeassistant/components/pi_hole/sensor.py index 503883e9326..4cf5133e700 100644 --- a/homeassistant/components/pi_hole/sensor.py +++ b/homeassistant/components/pi_hole/sensor.py @@ -18,7 +18,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="ads_blocked_today", translation_key="ads_blocked_today", - native_unit_of_measurement="ads", ), SensorEntityDescription( key="ads_percentage_today", @@ -28,38 +27,20 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="clients_ever_seen", translation_key="clients_ever_seen", - native_unit_of_measurement="clients", ), SensorEntityDescription( - key="dns_queries_today", - translation_key="dns_queries_today", - native_unit_of_measurement="queries", + key="dns_queries_today", translation_key="dns_queries_today" ), SensorEntityDescription( key="domains_being_blocked", translation_key="domains_being_blocked", - native_unit_of_measurement="domains", ), + SensorEntityDescription(key="queries_cached", translation_key="queries_cached"), SensorEntityDescription( - key="queries_cached", - translation_key="queries_cached", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="queries_forwarded", - translation_key="queries_forwarded", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="unique_clients", - translation_key="unique_clients", - native_unit_of_measurement="clients", - ), - SensorEntityDescription( - key="unique_domains", - translation_key="unique_domains", - native_unit_of_measurement="domains", + key="queries_forwarded", translation_key="queries_forwarded" ), + SensorEntityDescription(key="unique_clients", translation_key="unique_clients"), + SensorEntityDescription(key="unique_domains", translation_key="unique_domains"), ) diff --git a/homeassistant/components/pi_hole/strings.json b/homeassistant/components/pi_hole/strings.json index b76b61f1903..9e1d5948a09 100644 --- a/homeassistant/components/pi_hole/strings.json +++ b/homeassistant/components/pi_hole/strings.json @@ -41,31 +41,39 @@ }, "sensor": { "ads_blocked_today": { - "name": "Ads blocked today" + "name": "Ads blocked today", + "unit_of_measurement": "ads" }, "ads_percentage_today": { "name": "Ads percentage blocked today" }, "clients_ever_seen": { - "name": "Seen clients" + "name": "Seen clients", + "unit_of_measurement": "clients" }, "dns_queries_today": { - "name": "DNS queries today" + "name": "DNS queries today", + "unit_of_measurement": "queries" }, "domains_being_blocked": { - "name": "Domains blocked" + "name": "Domains blocked", + "unit_of_measurement": "domains" }, "queries_cached": { - "name": "DNS queries cached" + "name": "DNS queries cached", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "queries_forwarded": { - "name": "DNS queries forwarded" + "name": "DNS queries forwarded", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "unique_clients": { - "name": "DNS unique clients" + "name": "DNS unique clients", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::clients_ever_seen::unit_of_measurement%]" }, "unique_domains": { - "name": "DNS unique domains" + "name": "DNS unique domains", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::domains_being_blocked::unit_of_measurement%]" } }, "update": { From 56ec70815cb48006d48e021897d68ae5a0c590a6 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Mon, 2 Dec 2024 09:54:37 +0000 Subject: [PATCH 0144/1198] Add translated native unit of measurement - squeezebox (#131912) --- homeassistant/components/squeezebox/sensor.py | 6 ------ .../components/squeezebox/strings.json | 18 ++++++++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/squeezebox/sensor.py b/homeassistant/components/squeezebox/sensor.py index ff9f86ccf1f..0ca33179f9f 100644 --- a/homeassistant/components/squeezebox/sensor.py +++ b/homeassistant/components/squeezebox/sensor.py @@ -33,12 +33,10 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_ALBUMS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="albums", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_ARTISTS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="artists", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_DURATION, @@ -49,12 +47,10 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_GENRES, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="genres", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_SONGS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="songs", ), SensorEntityDescription( key=STATUS_SENSOR_LASTSCAN, @@ -63,13 +59,11 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_PLAYER_COUNT, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="players", ), SensorEntityDescription( key=STATUS_SENSOR_OTHER_PLAYER_COUNT, state_class=SensorStateClass.TOTAL, entity_registry_visible_default=False, - native_unit_of_measurement="players", ), ) diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index b1b71cd8c1d..406c7243a1a 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -76,25 +76,31 @@ "name": "Last scan" }, "info_total_albums": { - "name": "Total albums" + "name": "Total albums", + "unit_of_measurement": "albums" }, "info_total_artists": { - "name": "Total artists" + "name": "Total artists", + "unit_of_measurement": "artists" }, "info_total_duration": { "name": "Total duration" }, "info_total_genres": { - "name": "Total genres" + "name": "Total genres", + "unit_of_measurement": "genres" }, "info_total_songs": { - "name": "Total songs" + "name": "Total songs", + "unit_of_measurement": "songs" }, "player_count": { - "name": "Player count" + "name": "Player count", + "unit_of_measurement": "players" }, "other_player_count": { - "name": "Player count off service" + "name": "Player count off service", + "unit_of_measurement": "[%key:component::squeezebox::entity::sensor::player_count::unit_of_measurement%]" } } } From e37ae8bf8d8813cd2a2fa2ebb9d39d19475b25c3 Mon Sep 17 00:00:00 2001 From: ashionky <35916938+ashionky@users.noreply.github.com> Date: Mon, 2 Dec 2024 18:05:09 +0800 Subject: [PATCH 0145/1198] Bump refoss to v1.2.5 (#132051) --- homeassistant/components/refoss/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/refoss/manifest.json b/homeassistant/components/refoss/manifest.json index bf046e954d1..da7050433f3 100644 --- a/homeassistant/components/refoss/manifest.json +++ b/homeassistant/components/refoss/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/refoss", "iot_class": "local_polling", - "requirements": ["refoss-ha==1.2.4"] + "requirements": ["refoss-ha==1.2.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 150fc195f8d..a5a1c0b4c57 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2544,7 +2544,7 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d79571888af..754840e190b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2038,7 +2038,7 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 From 3d26fa7864b612d7686519d1701538b35bd08ce7 Mon Sep 17 00:00:00 2001 From: nasWebio <140073814+nasWebio@users.noreply.github.com> Date: Mon, 2 Dec 2024 11:07:37 +0100 Subject: [PATCH 0146/1198] Bump webio_api to 0.1.11 (#131730) --- homeassistant/components/nasweb/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nasweb/manifest.json b/homeassistant/components/nasweb/manifest.json index 69efdafbc82..8a4ecdbee84 100644 --- a/homeassistant/components/nasweb/manifest.json +++ b/homeassistant/components/nasweb/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/nasweb", "integration_type": "hub", "iot_class": "local_push", - "requirements": ["webio-api==0.1.8"] + "requirements": ["webio-api==0.1.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index a5a1c0b4c57..e5026acf875 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2987,7 +2987,7 @@ weatherflow4py==1.0.6 webexpythonsdk==2.0.1 # homeassistant.components.nasweb -webio-api==0.1.8 +webio-api==0.1.11 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 754840e190b..be797bc16a4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2385,7 +2385,7 @@ watchdog==2.3.1 weatherflow4py==1.0.6 # homeassistant.components.nasweb -webio-api==0.1.8 +webio-api==0.1.11 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 From 11a2a62144fdfa01b97e324afddeeaff892e612d Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:33:54 +0100 Subject: [PATCH 0147/1198] Update livisi to 0.0.24 (#132058) --- homeassistant/components/livisi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/livisi/manifest.json b/homeassistant/components/livisi/manifest.json index 25cc9d2e9c2..1077cacf2c4 100644 --- a/homeassistant/components/livisi/manifest.json +++ b/homeassistant/components/livisi/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/livisi", "iot_class": "local_polling", - "requirements": ["livisi==0.0.22"] + "requirements": ["livisi==0.0.24"] } diff --git a/requirements_all.txt b/requirements_all.txt index e5026acf875..7e3e19578d1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1310,7 +1310,7 @@ linear-garage-door==0.2.9 linode-api==4.1.9b1 # homeassistant.components.livisi -livisi==0.0.22 +livisi==0.0.24 # homeassistant.components.google_maps locationsharinglib==5.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index be797bc16a4..eb4305709b9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1091,7 +1091,7 @@ libsoundtouch==0.8 linear-garage-door==0.2.9 # homeassistant.components.livisi -livisi==0.0.22 +livisi==0.0.24 # homeassistant.components.london_underground london-tube-status==0.5 From ea7f1b2a4e245da5e1607436227ea9a7e473803d Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:35:36 +0100 Subject: [PATCH 0148/1198] Add additional number entities to IronOS (#131943) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/iron_os/__init__.py | 25 +- .../components/iron_os/coordinator.py | 73 +- homeassistant/components/iron_os/entity.py | 11 +- homeassistant/components/iron_os/icons.json | 57 + homeassistant/components/iron_os/number.py | 328 +++++- homeassistant/components/iron_os/sensor.py | 4 +- homeassistant/components/iron_os/strings.json | 54 + homeassistant/components/iron_os/update.py | 2 +- tests/components/iron_os/conftest.py | 29 +- .../iron_os/snapshots/test_number.ambr | 1009 +++++++++++++++++ .../iron_os/snapshots/test_sensor.ambr | 2 +- tests/components/iron_os/test_init.py | 33 +- tests/components/iron_os/test_number.py | 104 +- 13 files changed, 1692 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 56a83117e68..35b426d11ab 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -19,15 +19,22 @@ from homeassistant.helpers.typing import ConfigType from homeassistant.util.hass_dict import HassKey from .const import DOMAIN -from .coordinator import IronOSFirmwareUpdateCoordinator, IronOSLiveDataCoordinator +from .coordinator import ( + IronOSCoordinators, + IronOSFirmwareUpdateCoordinator, + IronOSLiveDataCoordinator, + IronOSSettingsCoordinator, +) PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.UPDATE] -type IronOSConfigEntry = ConfigEntry[IronOSLiveDataCoordinator] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +type IronOSConfigEntry = ConfigEntry[IronOSCoordinators] IRON_OS_KEY: HassKey[IronOSFirmwareUpdateCoordinator] = HassKey(DOMAIN) -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) @@ -59,10 +66,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: IronOSConfigEntry) -> bo device = Pynecil(ble_device) - coordinator = IronOSLiveDataCoordinator(hass, device) - await coordinator.async_config_entry_first_refresh() + live_data = IronOSLiveDataCoordinator(hass, device) + await live_data.async_config_entry_first_refresh() - entry.runtime_data = coordinator + settings = IronOSSettingsCoordinator(hass, device) + await settings.async_config_entry_first_refresh() + + entry.runtime_data = IronOSCoordinators( + live_data=live_data, + settings=settings, + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index 699f5a01704..cfd40d66ac7 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -2,15 +2,23 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta import logging from typing import TYPE_CHECKING from aiogithubapi import GitHubAPI, GitHubException, GitHubReleaseModel -from pynecil import CommunicationError, DeviceInfoResponse, LiveDataResponse, Pynecil +from pynecil import ( + CommunicationError, + DeviceInfoResponse, + LiveDataResponse, + Pynecil, + SettingsDataResponse, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -19,24 +27,58 @@ _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=5) SCAN_INTERVAL_GITHUB = timedelta(hours=3) +SCAN_INTERVAL_SETTINGS = timedelta(seconds=60) -class IronOSLiveDataCoordinator(DataUpdateCoordinator[LiveDataResponse]): - """IronOS live data coordinator.""" +@dataclass +class IronOSCoordinators: + """IronOS data class holding coordinators.""" + + live_data: IronOSLiveDataCoordinator + settings: IronOSSettingsCoordinator + + +class IronOSBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """IronOS base coordinator.""" device_info: DeviceInfoResponse config_entry: ConfigEntry - def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + def __init__( + self, + hass: HomeAssistant, + device: Pynecil, + update_interval: timedelta, + ) -> None: """Initialize IronOS coordinator.""" + super().__init__( hass, _LOGGER, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=update_interval, + request_refresh_debouncer=Debouncer( + hass, _LOGGER, cooldown=3, immediate=False + ), ) self.device = device + async def _async_setup(self) -> None: + """Set up the coordinator.""" + try: + self.device_info = await self.device.get_device_info() + + except CommunicationError as e: + raise UpdateFailed("Cannot connect to device") from e + + +class IronOSLiveDataCoordinator(IronOSBaseCoordinator): + """IronOS coordinator.""" + + def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + """Initialize IronOS coordinator.""" + super().__init__(hass, device=device, update_interval=SCAN_INTERVAL) + async def _async_update_data(self) -> LiveDataResponse: """Fetch data from Device.""" @@ -80,3 +122,24 @@ class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]) assert release.data return release.data + + +class IronOSSettingsCoordinator(IronOSBaseCoordinator): + """IronOS coordinator.""" + + def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + """Initialize IronOS coordinator.""" + super().__init__(hass, device=device, update_interval=SCAN_INTERVAL_SETTINGS) + + async def _async_update_data(self) -> SettingsDataResponse: + """Fetch data from Device.""" + + characteristics = set(self.async_contexts()) + + if self.device.is_connected and characteristics: + try: + return await self.device.get_settings(list(characteristics)) + except CommunicationError as e: + _LOGGER.debug("Failed to fetch settings", exc_info=e) + + return self.data or SettingsDataResponse() diff --git a/homeassistant/components/iron_os/entity.py b/homeassistant/components/iron_os/entity.py index 77bebda9390..684957a2197 100644 --- a/homeassistant/components/iron_os/entity.py +++ b/homeassistant/components/iron_os/entity.py @@ -2,28 +2,29 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import MANUFACTURER, MODEL -from .coordinator import IronOSLiveDataCoordinator +from .coordinator import IronOSBaseCoordinator -class IronOSBaseEntity(CoordinatorEntity[IronOSLiveDataCoordinator]): +class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): """Base IronOS entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: IronOSLiveDataCoordinator, + coordinator: IronOSBaseCoordinator, entity_description: EntityDescription, + context: Any | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(coordinator) + super().__init__(coordinator, context=context) self.entity_description = entity_description self._attr_unique_id = ( diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index fa14b8134d0..24d27457689 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -3,6 +3,63 @@ "number": { "setpoint_temperature": { "default": "mdi:thermometer" + }, + "sleep_temperature": { + "default": "mdi:thermometer-low" + }, + "sleep_timeout": { + "default": "mdi:timer-sand" + }, + "qc_max_voltage": { + "default": "mdi:flash-alert-outline" + }, + "pd_timeout": { + "default": "mdi:timer-alert-outline" + }, + "boost_temp": { + "default": "mdi:thermometer-high" + }, + "shutdown_timeout": { + "default": "mdi:thermometer-off" + }, + "display_brightness": { + "default": "mdi:brightness-6" + }, + "voltage_div": { + "default": "mdi:call-split" + }, + "temp_increment_short": { + "default": "mdi:gesture-tap-button" + }, + "temp_increment_long": { + "default": "mdi:gesture-tap-button" + }, + "accel_sensitivity": { + "default": "mdi:motion" + }, + "calibration_offset": { + "default": "mdi:contrast" + }, + "hall_sensitivity": { + "default": "mdi:leak" + }, + "keep_awake_pulse_delay": { + "default": "mdi:clock-end" + }, + "keep_awake_pulse_duration": { + "default": "mdi:clock-start" + }, + "keep_awake_pulse_power": { + "default": "mdi:waves-arrow-up" + }, + "min_voltage_per_cell": { + "default": "mdi:fuel-cell" + }, + "min_dc_voltage_cells": { + "default": "mdi:battery-arrow-down" + }, + "power_limit": { + "default": "mdi:flash-alert" } }, "sensor": { diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index 2da80aac327..a288a61b021 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -6,21 +6,34 @@ from collections.abc import Callable from dataclasses import dataclass from enum import StrEnum -from pynecil import CharSetting, CommunicationError, LiveDataResponse +from pynecil import ( + CharSetting, + CommunicationError, + LiveDataResponse, + SettingsDataResponse, +) from homeassistant.components.number import ( + DEFAULT_MAX_VALUE, NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, ) -from homeassistant.const import UnitOfTemperature +from homeassistant.const import ( + EntityCategory, + UnitOfElectricPotential, + UnitOfPower, + UnitOfTemperature, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import IronOSConfigEntry from .const import DOMAIN, MAX_TEMP, MIN_TEMP +from .coordinator import IronOSCoordinators from .entity import IronOSBaseEntity PARALLEL_UPDATES = 0 @@ -30,15 +43,39 @@ PARALLEL_UPDATES = 0 class IronOSNumberEntityDescription(NumberEntityDescription): """Describes IronOS number entity.""" - value_fn: Callable[[LiveDataResponse], float | int | None] - max_value_fn: Callable[[LiveDataResponse], float | int] - set_key: CharSetting + value_fn: Callable[[LiveDataResponse, SettingsDataResponse], float | int | None] + max_value_fn: Callable[[LiveDataResponse], float | int] | None = None + characteristic: CharSetting + raw_value_fn: Callable[[float], float | int] | None = None class PinecilNumber(StrEnum): """Number controls for Pinecil device.""" SETPOINT_TEMP = "setpoint_temperature" + SLEEP_TEMP = "sleep_temperature" + SLEEP_TIMEOUT = "sleep_timeout" + QC_MAX_VOLTAGE = "qc_max_voltage" + PD_TIMEOUT = "pd_timeout" + BOOST_TEMP = "boost_temp" + SHUTDOWN_TIMEOUT = "shutdown_timeout" + DISPLAY_BRIGHTNESS = "display_brightness" + POWER_LIMIT = "power_limit" + CALIBRATION_OFFSET = "calibration_offset" + HALL_SENSITIVITY = "hall_sensitivity" + MIN_VOLTAGE_PER_CELL = "min_voltage_per_cell" + ACCEL_SENSITIVITY = "accel_sensitivity" + KEEP_AWAKE_PULSE_POWER = "keep_awake_pulse_power" + KEEP_AWAKE_PULSE_DELAY = "keep_awake_pulse_delay" + KEEP_AWAKE_PULSE_DURATION = "keep_awake_pulse_duration" + VOLTAGE_DIV = "voltage_div" + TEMP_INCREMENT_SHORT = "temp_increment_short" + TEMP_INCREMENT_LONG = "temp_increment_long" + + +def multiply(value: float | None, multiplier: float) -> float | None: + """Multiply if not None.""" + return value * multiplier if value is not None else None PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( @@ -47,13 +84,249 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( translation_key=PinecilNumber.SETPOINT_TEMP, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=NumberDeviceClass.TEMPERATURE, - value_fn=lambda data: data.setpoint_temp, - set_key=CharSetting.SETPOINT_TEMP, + value_fn=lambda data, _: data.setpoint_temp, + characteristic=CharSetting.SETPOINT_TEMP, mode=NumberMode.BOX, native_min_value=MIN_TEMP, native_step=5, max_value_fn=lambda data: min(data.max_tip_temp_ability or MAX_TEMP, MAX_TEMP), ), + IronOSNumberEntityDescription( + key=PinecilNumber.SLEEP_TEMP, + translation_key=PinecilNumber.SLEEP_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + value_fn=lambda _, settings: settings.get("sleep_temp"), + characteristic=CharSetting.SLEEP_TEMP, + mode=NumberMode.BOX, + native_min_value=MIN_TEMP, + native_max_value=MAX_TEMP, + native_step=10, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.BOOST_TEMP, + translation_key=PinecilNumber.BOOST_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + value_fn=lambda _, settings: settings.get("boost_temp"), + characteristic=CharSetting.BOOST_TEMP, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=MAX_TEMP, + native_step=10, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.QC_MAX_VOLTAGE, + translation_key=PinecilNumber.QC_MAX_VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=NumberDeviceClass.VOLTAGE, + value_fn=lambda _, settings: settings.get("qc_ideal_voltage"), + characteristic=CharSetting.QC_IDEAL_VOLTAGE, + mode=NumberMode.BOX, + native_min_value=9.0, + native_max_value=22.0, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.PD_TIMEOUT, + translation_key=PinecilNumber.PD_TIMEOUT, + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=NumberDeviceClass.DURATION, + value_fn=lambda _, settings: settings.get("pd_negotiation_timeout"), + characteristic=CharSetting.PD_NEGOTIATION_TIMEOUT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=5.0, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.SHUTDOWN_TIMEOUT, + translation_key=PinecilNumber.SHUTDOWN_TIMEOUT, + native_unit_of_measurement=UnitOfTime.MINUTES, + device_class=NumberDeviceClass.DURATION, + value_fn=lambda _, settings: settings.get("shutdown_time"), + characteristic=CharSetting.SHUTDOWN_TIME, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=60, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.DISPLAY_BRIGHTNESS, + translation_key=PinecilNumber.DISPLAY_BRIGHTNESS, + value_fn=lambda _, settings: settings.get("display_brightness"), + characteristic=CharSetting.DISPLAY_BRIGHTNESS, + mode=NumberMode.SLIDER, + native_min_value=1, + native_max_value=5, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.SLEEP_TIMEOUT, + translation_key=PinecilNumber.SLEEP_TIMEOUT, + value_fn=lambda _, settings: settings.get("sleep_timeout"), + characteristic=CharSetting.SLEEP_TIMEOUT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=15, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.POWER_LIMIT, + translation_key=PinecilNumber.POWER_LIMIT, + value_fn=lambda _, settings: settings.get("power_limit"), + characteristic=CharSetting.POWER_LIMIT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=12, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfPower.WATT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.CALIBRATION_OFFSET, + translation_key=PinecilNumber.CALIBRATION_OFFSET, + value_fn=lambda _, settings: settings.get("calibration_offset"), + characteristic=CharSetting.CALIBRATION_OFFSET, + mode=NumberMode.BOX, + native_min_value=100, + native_max_value=2500, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricPotential.MICROVOLT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.HALL_SENSITIVITY, + translation_key=PinecilNumber.HALL_SENSITIVITY, + value_fn=lambda _, settings: settings.get("hall_sensitivity"), + characteristic=CharSetting.HALL_SENSITIVITY, + mode=NumberMode.SLIDER, + native_min_value=0, + native_max_value=9, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.MIN_VOLTAGE_PER_CELL, + translation_key=PinecilNumber.MIN_VOLTAGE_PER_CELL, + value_fn=lambda _, settings: settings.get("min_voltage_per_cell"), + characteristic=CharSetting.MIN_VOLTAGE_PER_CELL, + mode=NumberMode.BOX, + native_min_value=2.4, + native_max_value=3.8, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.ACCEL_SENSITIVITY, + translation_key=PinecilNumber.ACCEL_SENSITIVITY, + value_fn=lambda _, settings: settings.get("accel_sensitivity"), + characteristic=CharSetting.ACCEL_SENSITIVITY, + mode=NumberMode.SLIDER, + native_min_value=0, + native_max_value=9, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_POWER, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_POWER, + value_fn=lambda _, settings: settings.get("keep_awake_pulse_power"), + characteristic=CharSetting.KEEP_AWAKE_PULSE_POWER, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=9.9, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfPower.WATT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_DELAY, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_DELAY, + value_fn=( + lambda _, settings: multiply(settings.get("keep_awake_pulse_delay"), 2.5) + ), + characteristic=CharSetting.KEEP_AWAKE_PULSE_DELAY, + raw_value_fn=lambda value: value / 2.5, + mode=NumberMode.BOX, + native_min_value=2.5, + native_max_value=22.5, + native_step=2.5, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_DURATION, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_DURATION, + value_fn=( + lambda _, settings: multiply(settings.get("keep_awake_pulse_duration"), 250) + ), + characteristic=CharSetting.KEEP_AWAKE_PULSE_DURATION, + raw_value_fn=lambda value: value / 250, + mode=NumberMode.BOX, + native_min_value=250, + native_max_value=2250, + native_step=250, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.MILLISECONDS, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.VOLTAGE_DIV, + translation_key=PinecilNumber.VOLTAGE_DIV, + value_fn=(lambda _, settings: settings.get("voltage_div")), + characteristic=CharSetting.VOLTAGE_DIV, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=360, + native_max_value=900, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.TEMP_INCREMENT_SHORT, + translation_key=PinecilNumber.TEMP_INCREMENT_SHORT, + value_fn=(lambda _, settings: settings.get("temp_increment_short")), + characteristic=CharSetting.TEMP_INCREMENT_SHORT, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=1, + native_max_value=50, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.TEMP_INCREMENT_LONG, + translation_key=PinecilNumber.TEMP_INCREMENT_LONG, + value_fn=(lambda _, settings: settings.get("temp_increment_long")), + characteristic=CharSetting.TEMP_INCREMENT_LONG, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=5, + native_max_value=90, + native_step=5, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), ) @@ -76,23 +349,56 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity): entity_description: IronOSNumberEntityDescription + def __init__( + self, + coordinator: IronOSCoordinators, + entity_description: IronOSNumberEntityDescription, + ) -> None: + """Initialize the number entity.""" + super().__init__( + coordinator.live_data, entity_description, entity_description.characteristic + ) + + self.settings = coordinator.settings + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" + if raw_value_fn := self.entity_description.raw_value_fn: + value = raw_value_fn(value) try: - await self.coordinator.device.write(self.entity_description.set_key, value) + await self.coordinator.device.write( + self.entity_description.characteristic, value + ) except CommunicationError as e: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="submit_setting_failed", ) from e - self.async_write_ha_state() + await self.settings.async_request_refresh() @property def native_value(self) -> float | int | None: """Return sensor state.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn( + self.coordinator.data, self.settings.data + ) @property def native_max_value(self) -> float: """Return sensor state.""" - return self.entity_description.max_value_fn(self.coordinator.data) + + if self.entity_description.max_value_fn is not None: + return self.entity_description.max_value_fn(self.coordinator.data) + + return self.entity_description.native_max_value or DEFAULT_MAX_VALUE + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + await super().async_added_to_hass() + self.async_on_remove( + self.settings.async_add_listener( + self._handle_coordinator_update, self.entity_description.characteristic + ) + ) + await self.settings.async_request_refresh() diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index b21fa2e5591..05d56db26d3 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -141,7 +141,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfElectricPotential.MICROVOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=3, + suggested_display_precision=0, value_fn=lambda data: data.tip_voltage, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -181,7 +181,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensors from a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.live_data async_add_entities( IronOSSensorEntity(coordinator, description) diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 92441b39fc3..c474b704677 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -23,6 +23,60 @@ "number": { "setpoint_temperature": { "name": "Setpoint temperature" + }, + "sleep_temperature": { + "name": "Sleep temperature" + }, + "sleep_timeout": { + "name": "Sleep timeout" + }, + "qc_max_voltage": { + "name": "Quick Charge voltage" + }, + "pd_timeout": { + "name": "Power Delivery timeout" + }, + "boost_temp": { + "name": "Boost temperature" + }, + "shutdown_timeout": { + "name": "Shutdown timeout" + }, + "display_brightness": { + "name": "Display brightness" + }, + "power_limit": { + "name": "Power limit" + }, + "calibration_offset": { + "name": "Calibration offset" + }, + "hall_sensitivity": { + "name": "Hall effect sensitivity" + }, + "min_voltage_per_cell": { + "name": "Min. voltage per cell" + }, + "accel_sensitivity": { + "name": "Motion sensitivity" + }, + "keep_awake_pulse_power": { + "name": "Keep-awake pulse intensity" + }, + "keep_awake_pulse_delay": { + "name": "Keep-awake pulse delay" + }, + "keep_awake_pulse_duration": { + "name": "Keep-awake pulse duration" + }, + "voltage_div": { + "name": "Voltage divider" + }, + "temp_increment_short": { + "name": "Short-press temperature step" + }, + "temp_increment_long": { + "name": "Long-press temperature step" } }, "sensor": { diff --git a/homeassistant/components/iron_os/update.py b/homeassistant/components/iron_os/update.py index 0da0786821e..b431d321f24 100644 --- a/homeassistant/components/iron_os/update.py +++ b/homeassistant/components/iron_os/update.py @@ -30,7 +30,7 @@ async def async_setup_entry( ) -> None: """Set up IronOS update platform.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.live_data async_add_entities( [IronOSUpdate(coordinator, hass.data[IRON_OS_KEY], UPDATE_DESCRIPTION)] diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index a7c3592ae73..eda9c2c5d1d 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -5,7 +5,13 @@ from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak -from pynecil import DeviceInfoResponse, LiveDataResponse, OperatingMode, PowerSource +from pynecil import ( + DeviceInfoResponse, + LiveDataResponse, + OperatingMode, + PowerSource, + SettingsDataResponse, +) import pytest from homeassistant.components.iron_os import DOMAIN @@ -145,6 +151,27 @@ def mock_pynecil() -> Generator[AsyncMock]: device_sn="0000c0ffeec0ffee", name=DEFAULT_NAME, ) + client.get_settings.return_value = SettingsDataResponse( + sleep_temp=150, + sleep_timeout=5, + min_dc_voltage_cells=0, + min_volltage_per_cell=3.3, + qc_ideal_voltage=9.0, + accel_sensitivity=7, + shutdown_time=10, + keep_awake_pulse_power=0.5, + keep_awake_pulse_delay=4, + keep_awake_pulse_duration=1, + voltage_div=600, + boost_temp=420, + calibration_offset=900, + power_limit=12.0, + temp_increment_long=10, + temp_increment_short=1, + hall_sensitivity=7, + pd_negotiation_timeout=2.0, + display_brightness=3, + ) client.get_live_data.return_value = LiveDataResponse( live_temp=298, setpoint_temp=300, diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr index 2f5ee62e37e..24663cc4b0f 100644 --- a/tests/components/iron_os/snapshots/test_number.ambr +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -1,4 +1,732 @@ # serializer version: 1 +# name: test_state[number.pinecil_boost_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 0, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_boost_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Boost temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_boost_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_boost_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Boost temperature', + 'max': 450, + 'min': 0, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_boost_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '420', + }) +# --- +# name: test_state[number.pinecil_calibration_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2500, + 'min': 100, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_calibration_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibration offset', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_calibration_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_calibration_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Calibration offset', + 'max': 2500, + 'min': 100, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_calibration_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '900', + }) +# --- +# name: test_state[number.pinecil_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_display_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Display brightness', + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_state[number.pinecil_hall_effect_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_hall_effect_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hall effect sensitivity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_hall_effect_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Hall effect sensitivity', + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_hall_effect_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 22.5, + 'min': 2.5, + 'mode': , + 'step': 2.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_delay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse delay', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_delay', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse delay', + 'max': 22.5, + 'min': 2.5, + 'mode': , + 'step': 2.5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2250, + 'min': 250, + 'mode': , + 'step': 250, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse duration', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse duration', + 'max': 2250, + 'min': 250, + 'mode': , + 'step': 250, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '250', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_intensity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9.9, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse intensity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_power', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_intensity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse intensity', + 'max': 9.9, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_intensity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_state[number.pinecil_long_press_temperature_step-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 90, + 'min': 5, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_long_press_temperature_step', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long-press temperature step', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_increment_long', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_long_press_temperature_step-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Long-press temperature step', + 'max': 90, + 'min': 5, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_long_press_temperature_step', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_state[number.pinecil_min_voltage_per_cell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.8, + 'min': 2.4, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_min_voltage_per_cell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Min. voltage per cell', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_min_voltage_per_cell', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_min_voltage_per_cell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Min. voltage per cell', + 'max': 3.8, + 'min': 2.4, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_min_voltage_per_cell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_state[number.pinecil_motion_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_motion_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensitivity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_accel_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_motion_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Motion sensitivity', + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_motion_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_state[number.pinecil_power_delivery_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5.0, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_power_delivery_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power Delivery timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_pd_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_power_delivery_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Power Delivery timeout', + 'max': 5.0, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_power_delivery_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_state[number.pinecil_power_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 12, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_power_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power limit', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_power_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Power limit', + 'max': 12, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_power_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_state[number.pinecil_quick_charge_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 22.0, + 'min': 9.0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_quick_charge_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Quick Charge voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_qc_max_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_quick_charge_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil Quick Charge voltage', + 'max': 22.0, + 'min': 9.0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_quick_charge_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.0', + }) +# --- # name: test_state[number.pinecil_setpoint_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -56,3 +784,284 @@ 'state': '300', }) # --- +# name: test_state[number.pinecil_short_press_temperature_step-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 50, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_short_press_temperature_step', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short-press temperature step', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_increment_short', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_short_press_temperature_step-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Short-press temperature step', + 'max': 50, + 'min': 1, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_short_press_temperature_step', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_state[number.pinecil_shutdown_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 60, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_shutdown_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Shutdown timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_shutdown_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_shutdown_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Shutdown timeout', + 'max': 60, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_shutdown_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_state[number.pinecil_sleep_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 10, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_sleep_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sleep temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_sleep_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_sleep_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Sleep temperature', + 'max': 450, + 'min': 10, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_sleep_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150', + }) +# --- +# name: test_state[number.pinecil_sleep_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 15, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_sleep_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sleep timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_sleep_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_sleep_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Sleep timeout', + 'max': 15, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_sleep_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_state[number.pinecil_voltage_divider-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 900, + 'min': 360, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_voltage_divider', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Voltage divider', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_voltage_div', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_voltage_divider-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Voltage divider', + 'max': 900, + 'min': 360, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_voltage_divider', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '600', + }) +# --- diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr index 44a17dd6ea5..9ab5d47eec8 100644 --- a/tests/components/iron_os/snapshots/test_sensor.ambr +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -502,7 +502,7 @@ 'name': None, 'options': dict({ 'sensor': dict({ - 'suggested_display_precision': 3, + 'suggested_display_precision': 0, }), }), 'original_device_class': , diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index f7db2a813ec..21194a55eea 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -1,14 +1,17 @@ """Test init of IronOS integration.""" +from datetime import datetime, timedelta from unittest.mock import AsyncMock +from freezegun.api import FrozenDateTimeFactory from pynecil import CommunicationError import pytest from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.usefixtures("mock_pynecil", "ble_device") @@ -45,16 +48,42 @@ async def test_update_data_config_entry_not_ready( assert config_entry.state is ConfigEntryState.SETUP_RETRY -@pytest.mark.usefixtures("ble_device") +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") async def test_setup_config_entry_not_ready( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, ) -> None: """Test config entry not ready.""" + mock_pynecil.get_settings.side_effect = CommunicationError mock_pynecil.get_device_info.side_effect = CommunicationError config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_settings_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test skipping of settings on exception.""" + mock_pynecil.get_settings.side_effect = CommunicationError + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + async_fire_time_changed(hass, datetime.now() + timedelta(seconds=60)) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert (state := hass.states.get("number.pinecil_boost_temperature")) + assert state.state == STATE_UNKNOWN diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index 781492987ee..e0617a5012f 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -1,8 +1,10 @@ """Tests for the IronOS number platform.""" from collections.abc import AsyncGenerator +from datetime import timedelta from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from pynecil import CharSetting, CommunicationError import pytest from syrupy.assertion import SnapshotAssertion @@ -18,11 +20,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None]: +async def number_only() -> AsyncGenerator[None]: """Enable only the number platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", @@ -39,6 +41,7 @@ async def test_state( config_entry: MockConfigEntry, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the IronOS number platform states.""" config_entry.add_to_hass(hass) @@ -47,14 +50,105 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + ("entity_id", "characteristic", "value", "expected_value"), + [ + ( + "number.pinecil_setpoint_temperature", + CharSetting.SETPOINT_TEMP, + 300, + 300, + ), + ( + "number.pinecil_boost_temperature", + CharSetting.BOOST_TEMP, + 420, + 420, + ), + ( + "number.pinecil_calibration_offset", + CharSetting.CALIBRATION_OFFSET, + 600, + 600, + ), + ( + "number.pinecil_display_brightness", + CharSetting.DISPLAY_BRIGHTNESS, + 3, + 3, + ), + ( + "number.pinecil_hall_effect_sensitivity", + CharSetting.HALL_SENSITIVITY, + 7, + 7, + ), + ( + "number.pinecil_keep_awake_pulse_delay", + CharSetting.KEEP_AWAKE_PULSE_DELAY, + 10.0, + 4, + ), + ( + "number.pinecil_keep_awake_pulse_duration", + CharSetting.KEEP_AWAKE_PULSE_DURATION, + 500, + 2, + ), + ( + "number.pinecil_keep_awake_pulse_intensity", + CharSetting.KEEP_AWAKE_PULSE_POWER, + 0.5, + 0.5, + ), + ( + "number.pinecil_long_press_temperature_step", + CharSetting.TEMP_INCREMENT_LONG, + 10, + 10, + ), + ( + "number.pinecil_min_voltage_per_cell", + CharSetting.MIN_VOLTAGE_PER_CELL, + 3.3, + 3.3, + ), + ("number.pinecil_motion_sensitivity", CharSetting.ACCEL_SENSITIVITY, 7, 7), + ( + "number.pinecil_power_delivery_timeout", + CharSetting.PD_NEGOTIATION_TIMEOUT, + 2.0, + 2.0, + ), + ("number.pinecil_power_limit", CharSetting.POWER_LIMIT, 12.0, 12.0), + ("number.pinecil_quick_charge_voltage", CharSetting.QC_IDEAL_VOLTAGE, 9.0, 9.0), + ( + "number.pinecil_short_press_temperature_step", + CharSetting.TEMP_INCREMENT_SHORT, + 1, + 1, + ), + ("number.pinecil_shutdown_timeout", CharSetting.SHUTDOWN_TIME, 10, 10), + ("number.pinecil_sleep_temperature", CharSetting.SLEEP_TEMP, 150, 150), + ("number.pinecil_sleep_timeout", CharSetting.SLEEP_TIMEOUT, 5, 5), + ("number.pinecil_voltage_divider", CharSetting.VOLTAGE_DIV, 600, 600), + ], +) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") async def test_set_value( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + entity_id: str, + characteristic: CharSetting, + value: float, + expected_value: float, ) -> None: """Test the IronOS number platform set value service.""" @@ -67,12 +161,12 @@ async def test_set_value( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - service_data={ATTR_VALUE: 300}, - target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + service_data={ATTR_VALUE: value}, + target={ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert len(mock_pynecil.write.mock_calls) == 1 - mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) + mock_pynecil.write.assert_called_once_with(characteristic, expected_value) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") From 1cf00d9bbcc5ab3220de485e48e8e0b2b58f08b1 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:38:39 +0100 Subject: [PATCH 0149/1198] Use format_mac correctly for acaia (#132062) --- homeassistant/components/acaia/config_flow.py | 10 +++++----- homeassistant/components/acaia/entity.py | 7 ++++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/acaia/config_flow.py b/homeassistant/components/acaia/config_flow.py index 36727059c8a..fb2639fc886 100644 --- a/homeassistant/components/acaia/config_flow.py +++ b/homeassistant/components/acaia/config_flow.py @@ -42,7 +42,7 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - mac = format_mac(user_input[CONF_ADDRESS]) + mac = user_input[CONF_ADDRESS] try: is_new_style_scale = await is_new_scale(mac) except AcaiaDeviceNotFound: @@ -53,12 +53,12 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): except AcaiaUnknownDevice: return self.async_abort(reason="unsupported_device") else: - await self.async_set_unique_id(mac) + await self.async_set_unique_id(format_mac(mac)) self._abort_if_unique_id_configured() if not errors: return self.async_create_entry( - title=self._discovered_devices[user_input[CONF_ADDRESS]], + title=self._discovered_devices[mac], data={ CONF_ADDRESS: mac, CONF_IS_NEW_STYLE_SCALE: is_new_style_scale, @@ -99,10 +99,10 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a discovered Bluetooth device.""" - self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address) + self._discovered[CONF_ADDRESS] = discovery_info.address self._discovered[CONF_NAME] = discovery_info.name - await self.async_set_unique_id(mac) + await self.async_set_unique_id(format_mac(discovery_info.address)) self._abort_if_unique_id_configured() try: diff --git a/homeassistant/components/acaia/entity.py b/homeassistant/components/acaia/entity.py index 8a2108d2687..db01b414b99 100644 --- a/homeassistant/components/acaia/entity.py +++ b/homeassistant/components/acaia/entity.py @@ -2,7 +2,7 @@ from dataclasses import dataclass -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceInfo, format_mac from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -25,10 +25,11 @@ class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]): super().__init__(coordinator) self.entity_description = entity_description self._scale = coordinator.scale - self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}" + formatted_mac = format_mac(self._scale.mac) + self._attr_unique_id = f"{formatted_mac}_{entity_description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._scale.mac)}, + identifiers={(DOMAIN, formatted_mac)}, manufacturer="Acaia", model=self._scale.model, suggested_area="Kitchen", From a419fde0ebe8a8b5dd0655818b18951373c97ff4 Mon Sep 17 00:00:00 2001 From: Simone Rescio Date: Mon, 2 Dec 2024 13:18:53 +0100 Subject: [PATCH 0150/1198] Bump pyezviz to 0.2.2.3 (#132060) --- homeassistant/components/ezviz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ezviz/manifest.json b/homeassistant/components/ezviz/manifest.json index 53976bf3002..7c796c74ef7 100644 --- a/homeassistant/components/ezviz/manifest.json +++ b/homeassistant/components/ezviz/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/ezviz", "iot_class": "cloud_polling", "loggers": ["paho_mqtt", "pyezviz"], - "requirements": ["pyezviz==0.2.1.2"] + "requirements": ["pyezviz==0.2.2.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7e3e19578d1..8f6737e2fcf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1907,7 +1907,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.1.2 +pyezviz==0.2.2.3 # homeassistant.components.fibaro pyfibaro==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eb4305709b9..ebecfd1f42a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1539,7 +1539,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.1.2 +pyezviz==0.2.2.3 # homeassistant.components.fibaro pyfibaro==0.8.0 From 29b48d02de3b274c0f1af152bdbd67cf42c8beae Mon Sep 17 00:00:00 2001 From: Petar Petrov Date: Mon, 2 Dec 2024 14:21:54 +0200 Subject: [PATCH 0151/1198] Bump zwave-js-server-python to 0.60.0 (#132059) --- .../components/zwave_js/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/zwave_js/test_api.py | 10 +++++----- tests/components/zwave_js/test_services.py | 18 ++++++++---------- 5 files changed, 16 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index ad435b97cbc..011776f4556 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -9,7 +9,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["zwave_js_server"], - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.59.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.60.0"], "usb": [ { "vid": "0658", diff --git a/requirements_all.txt b/requirements_all.txt index 8f6737e2fcf..4ef04b87b49 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3093,7 +3093,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.60.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ebecfd1f42a..73439609bd5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2470,7 +2470,7 @@ zeversolar==0.3.2 zha==0.0.41 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.60.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 0807e9e09a5..357ec29b810 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -3176,10 +3176,10 @@ async def test_set_raw_config_parameter( args = client.async_send_command_no_wait.call_args[0][0] assert args["command"] == "endpoint.set_raw_config_parameter_value" assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 - assert args["options"]["value"] == 1 - assert args["options"]["valueSize"] == 2 - assert args["options"]["valueFormat"] == 1 + assert args["parameter"] == 102 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 # Reset the mock for async_send_command_no_wait instead client.async_send_command_no_wait.reset_mock() @@ -3250,7 +3250,7 @@ async def test_get_raw_config_parameter( args = client.async_send_command.call_args[0][0] assert args["command"] == "endpoint.get_raw_config_parameter_value" assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 + assert args["parameter"] == 102 client.async_send_command.reset_mock() diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index 41477f18b97..14084a6b846 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -255,11 +255,10 @@ async def test_set_config_parameter( assert args["command"] == "endpoint.set_raw_config_parameter_value" assert args["nodeId"] == 52 assert args["endpoint"] == 0 - options = args["options"] - assert options["parameter"] == 2 - assert options["value"] == 1 - assert options["valueSize"] == 2 - assert options["valueFormat"] == 1 + assert args["parameter"] == 2 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 client.async_send_command_no_wait.reset_mock() @@ -284,11 +283,10 @@ async def test_set_config_parameter( assert args["command"] == "endpoint.set_raw_config_parameter_value" assert args["nodeId"] == 2 assert args["endpoint"] == 1 - options = args["options"] - assert options["parameter"] == 32 - assert options["value"] == 1 - assert options["valueSize"] == 2 - assert options["valueFormat"] == 1 + assert args["parameter"] == 32 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 client.async_send_command_no_wait.reset_mock() client.async_send_command.reset_mock() From c610f16e903f074e097f39af774a28f08188eefa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:25:38 +0100 Subject: [PATCH 0152/1198] Bump dawidd6/action-download-artifact from 6 to 7 (#132040) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index c2fee9512fb..f4e4de97e78 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -94,7 +94,7 @@ jobs: - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v6 + uses: dawidd6/action-download-artifact@v7 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/frontend @@ -105,7 +105,7 @@ jobs: - name: Download nightly wheels of intents if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v6 + uses: dawidd6/action-download-artifact@v7 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/intents-package From 0c693b6ae1dab284881a84285862e7a92a52f056 Mon Sep 17 00:00:00 2001 From: Jan Rieger <271149+jrieger@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:28:54 +0100 Subject: [PATCH 0153/1198] Add translated native unit of measurement to Jellyfin (#132055) --- homeassistant/components/jellyfin/sensor.py | 1 - homeassistant/components/jellyfin/strings.json | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/jellyfin/sensor.py b/homeassistant/components/jellyfin/sensor.py index 24aeecab7e5..5c519f661ee 100644 --- a/homeassistant/components/jellyfin/sensor.py +++ b/homeassistant/components/jellyfin/sensor.py @@ -36,7 +36,6 @@ SENSOR_TYPES: tuple[JellyfinSensorEntityDescription, ...] = ( key="watching", translation_key="watching", value_fn=_count_now_playing, - native_unit_of_measurement="clients", ), ) diff --git a/homeassistant/components/jellyfin/strings.json b/homeassistant/components/jellyfin/strings.json index f2afa0c8ad5..a9816b1fb78 100644 --- a/homeassistant/components/jellyfin/strings.json +++ b/homeassistant/components/jellyfin/strings.json @@ -29,7 +29,8 @@ "entity": { "sensor": { "watching": { - "name": "Active clients" + "name": "Active clients", + "unit_of_measurement": "clients" } } }, From 99063ba14117d290da4772615e02b82272089b23 Mon Sep 17 00:00:00 2001 From: Mike Degatano Date: Mon, 2 Dec 2024 07:34:39 -0500 Subject: [PATCH 0154/1198] Reboot host to aiohasupervisor (#130391) * Reboot host to aiohasupervisor * Remove invalid test * Remove unnecessary init --- homeassistant/components/hassio/__init__.py | 1 - homeassistant/components/hassio/handler.py | 10 ------ .../homeassistant_yellow/config_flow.py | 20 +++++++----- tests/components/conftest.py | 1 + tests/components/hassio/test_handler.py | 14 --------- .../homeassistant_yellow/test_config_flow.py | 31 ++++++++++++------- 6 files changed, 33 insertions(+), 44 deletions(-) diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index 306c9d43d72..a2a9d8ff028 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -119,7 +119,6 @@ from .handler import ( # noqa: F401 async_create_backup, async_get_green_settings, async_get_yellow_settings, - async_reboot_host, async_set_green_settings, async_set_yellow_settings, async_update_diagnostics, diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index 58f2aa8c144..254c392462c 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -133,16 +133,6 @@ async def async_set_yellow_settings( ) -@api_data -async def async_reboot_host(hass: HomeAssistant) -> dict: - """Reboot the host. - - Returns an empty dict. - """ - hassio: HassIO = hass.data[DOMAIN] - return await hassio.send_command("/host/reboot", method="post", timeout=60) - - class HassIO: """Small API wrapper for Hass.io.""" diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index 9edc5009171..2c58ecdfc1c 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -14,8 +14,8 @@ import voluptuous as vol from homeassistant.components.hassio import ( HassioAPIError, async_get_yellow_settings, - async_reboot_host, async_set_yellow_settings, + get_supervisor_client, ) from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( BaseFirmwareConfigFlow, @@ -31,7 +31,7 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, async_get_hass, callback from homeassistant.helpers import discovery_flow, selector from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA @@ -67,11 +67,12 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): ) -> OptionsFlow: """Return the options flow.""" firmware_type = ApplicationType(config_entry.data[FIRMWARE]) + hass = async_get_hass() if firmware_type is ApplicationType.CPC: - return HomeAssistantYellowMultiPanOptionsFlowHandler(config_entry) + return HomeAssistantYellowMultiPanOptionsFlowHandler(hass, config_entry) - return HomeAssistantYellowOptionsFlowHandler(config_entry) + return HomeAssistantYellowOptionsFlowHandler(hass, config_entry) async def async_step_system( self, data: dict[str, Any] | None = None @@ -107,6 +108,11 @@ class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC): _hw_settings: dict[str, bool] | None = None + def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + self._supervisor_client = get_supervisor_client(hass) + @abstractmethod async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: """Show the main menu.""" @@ -172,7 +178,7 @@ class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Reboot now.""" - await async_reboot_host(self.hass) + await self._supervisor_client.host.reboot() return self.async_create_entry(data={}) async def async_step_reboot_later( @@ -251,9 +257,9 @@ class HomeAssistantYellowOptionsFlowHandler( ): """Handle a firmware options flow for Home Assistant Yellow.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None: """Instantiate options flow.""" - super().__init__(*args, **kwargs) + super().__init__(hass, *args, **kwargs) self._hardware_name = BOARD_NAME self._device = RADIO_DEVICE diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 5628a2b1aaf..97b1d337e82 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -516,6 +516,7 @@ def supervisor_client() -> Generator[AsyncMock]: supervisor_client.addons = AsyncMock() supervisor_client.discovery = AsyncMock() supervisor_client.homeassistant = AsyncMock() + supervisor_client.host = AsyncMock() supervisor_client.os = AsyncMock() supervisor_client.resolution = AsyncMock() supervisor_client.supervisor = AsyncMock() diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index 56f0dcb706c..e6375171dab 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -341,20 +341,6 @@ async def test_api_set_yellow_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") -async def test_api_reboot_host( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping.""" - aioclient_mock.post( - "http://127.0.0.1/host/reboot", - json={"result": "ok", "data": {}}, - ) - - assert await handler.async_reboot_host(hass) == {} - assert aioclient_mock.call_count == 1 - - @pytest.mark.usefixtures("hassio_stubs") async def test_send_command_invalid_command(hass: HomeAssistant) -> None: """Test send command fails when command is invalid.""" diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index ab6f158b211..1067be7b56e 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Home Assistant Yellow config flow.""" from collections.abc import Generator -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -36,6 +36,16 @@ def config_flow_handler(hass: HomeAssistant) -> Generator[None]: yield +@pytest.fixture(autouse=True) +def mock_get_supervisor_client(supervisor_client: AsyncMock) -> Generator[None]: + """Mock get_supervisor_client method.""" + with patch( + "homeassistant.components.homeassistant_yellow.config_flow.get_supervisor_client", + return_value=supervisor_client, + ): + yield + + @pytest.fixture(name="get_yellow_settings") def mock_get_yellow_settings(): """Mock getting yellow settings.""" @@ -56,12 +66,9 @@ def mock_set_yellow_settings(): @pytest.fixture(name="reboot_host") -def mock_reboot_host(): +def mock_reboot_host(supervisor_client: AsyncMock) -> AsyncMock: """Mock rebooting host.""" - with patch( - "homeassistant.components.homeassistant_yellow.config_flow.async_reboot_host", - ) as reboot_host: - yield reboot_host + return supervisor_client.host.reboot async def test_config_flow(hass: HomeAssistant) -> None: @@ -130,11 +137,11 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: ) async def test_option_flow_led_settings( hass: HomeAssistant, - get_yellow_settings, - set_yellow_settings, - reboot_host, - reboot_menu_choice, - reboot_calls, + get_yellow_settings: AsyncMock, + set_yellow_settings: AsyncMock, + reboot_host: AsyncMock, + reboot_menu_choice: str, + reboot_calls: int, ) -> None: """Test updating LED settings.""" mock_integration(hass, MockModule("hassio")) @@ -176,7 +183,7 @@ async def test_option_flow_led_settings( {"next_step_id": reboot_menu_choice}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert len(reboot_host.mock_calls) == reboot_calls + assert reboot_host.call_count == reboot_calls async def test_option_flow_led_settings_unchanged( From 6db8fced60636d876a2f7ff24c97fa289a13698e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Mon, 2 Dec 2024 12:52:59 +0000 Subject: [PATCH 0155/1198] Update buienradar sensors only after being added to HA (#131830) * Update buienradar sensors only after being added to HA * Move check to util * Check for platform in sensor state property * Move check to unit translation key property * Add test for sensor check * Properly handle added_to_hass * Remove redundant comment --- homeassistant/components/buienradar/sensor.py | 21 ++++++++-- homeassistant/components/sensor/__init__.py | 5 +++ tests/components/sensor/test_init.py | 39 +++++++++++++++++++ 3 files changed, 61 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index afce293402e..712f765237e 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -742,6 +742,7 @@ class BrSensor(SensorEntity): ) -> None: """Initialize the sensor.""" self.entity_description = description + self._data: BrData | None = None self._measured = None self._attr_unique_id = ( f"{coordinates[CONF_LATITUDE]:2.6f}{coordinates[CONF_LONGITUDE]:2.6f}" @@ -756,17 +757,29 @@ class BrSensor(SensorEntity): if description.key.startswith(PRECIPITATION_FORECAST): self._timeframe = None + async def async_added_to_hass(self) -> None: + """Handle entity being added to hass.""" + if self._data is None: + return + self._update() + @callback def data_updated(self, data: BrData): - """Update data.""" - if self._load_data(data.data) and self.hass: + """Handle data update.""" + self._data = data + if not self.hass: + return + self._update() + + def _update(self): + """Update sensor data.""" + _LOGGER.debug("Updating sensor %s", self.entity_id) + if self._load_data(self._data.data): self.async_write_ha_state() @callback def _load_data(self, data): # noqa: C901 """Load the sensor with relevant data.""" - # Find sensor - # Check if we have a new measurement, # otherwise we do not have to update the sensor if self._measured == data.get(MEASURED): diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 6b264efdd46..064914a5dfa 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -472,6 +472,11 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return translation key for unit of measurement.""" if self.translation_key is None: return None + if self.platform is None: + raise ValueError( + f"Sensor {type(self)} cannot have a translation key for " + "unit of measurement before being added to the entity platform" + ) platform = self.platform return ( f"component.{platform.platform_name}.entity.{platform.domain}" diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 44ad076807c..0ea46a41273 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -545,6 +545,45 @@ async def test_translated_unit_with_native_unit_raises( assert entity0.entity_id is None +async def test_unit_translation_key_without_platform_raises( + hass: HomeAssistant, +) -> None: + """Test that unit translation key property raises if the entity has no platform yet.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + ) + with pytest.raises( + ValueError, + match="cannot have a translation key for unit of measurement before " + "being added to the entity platform", + ): + unit = entity0.unit_of_measurement # noqa: F841 + + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + + # Should not raise after being added to the platform + unit = entity0.unit_of_measurement # noqa: F841 + assert unit == "Tests" + + @pytest.mark.parametrize( ( "device_class", From 89ee49e50c799ebb03787b9b8ffc0fe05eaef1e8 Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:04:39 +0100 Subject: [PATCH 0156/1198] Round status light brightness number in HomeWizard (#132069) --- homeassistant/components/homewizard/number.py | 2 +- tests/components/homewizard/snapshots/test_number.ambr | 4 ++-- tests/components/homewizard/test_number.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/homewizard/number.py b/homeassistant/components/homewizard/number.py index 1b4a0643dbe..1ed4c642f6b 100644 --- a/homeassistant/components/homewizard/number.py +++ b/homeassistant/components/homewizard/number.py @@ -64,4 +64,4 @@ class HWEnergyNumberEntity(HomeWizardEntity, NumberEntity): or (brightness := self.coordinator.data.state.brightness) is None ): return None - return brightness_to_value((0, 100), brightness) + return round(brightness_to_value((0, 100), brightness)) diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index 49f23cf8e2f..b14028cd97c 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -14,7 +14,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-11].1 @@ -106,7 +106,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-21].1 diff --git a/tests/components/homewizard/test_number.py b/tests/components/homewizard/test_number.py index ddadf09bb6e..623ba018dee 100644 --- a/tests/components/homewizard/test_number.py +++ b/tests/components/homewizard/test_number.py @@ -42,7 +42,7 @@ async def test_number_entities( assert snapshot == device_entry # Test unknown handling - assert state.state == "100.0" + assert state.state == "100" mock_homewizardenergy.state.return_value.brightness = None From fe0f414e990d9349e4be8e530d86d2505d6a46db Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:40:13 +0100 Subject: [PATCH 0157/1198] Update mypy-dev to 1.14.0a5 (#132063) --- homeassistant/components/renault/entity.py | 2 +- homeassistant/helpers/template.py | 16 ++++++++++++++-- requirements_test.txt | 2 +- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/renault/entity.py b/homeassistant/components/renault/entity.py index 10de028b2d0..c7f83c1e6f2 100644 --- a/homeassistant/components/renault/entity.py +++ b/homeassistant/components/renault/entity.py @@ -60,5 +60,5 @@ class RenaultDataEntity( def _get_data_attr(self, key: str) -> StateType: """Return the attribute value from the coordinator data.""" if self.coordinator.data is None: - return None # type: ignore[unreachable] + return None return cast(StateType, getattr(self.coordinator.data, key)) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 57587dc21d6..5b4a48bb07c 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -23,7 +23,16 @@ import statistics from struct import error as StructError, pack, unpack_from import sys from types import CodeType, TracebackType -from typing import Any, Concatenate, Literal, NoReturn, Self, cast, overload +from typing import ( + TYPE_CHECKING, + Any, + Concatenate, + Literal, + NoReturn, + Self, + cast, + overload, +) from urllib.parse import urlencode as urllib_urlencode import weakref @@ -88,6 +97,9 @@ from .singleton import singleton from .translation import async_translate_state from .typing import TemplateVarsType +if TYPE_CHECKING: + from _typeshed import OptExcInfo + # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) @@ -532,7 +544,7 @@ class Template: self._compiled: jinja2.Template | None = None self.hass = hass self.is_static = not is_template_string(template) - self._exc_info: sys._OptExcInfo | None = None + self._exc_info: OptExcInfo | None = None self._limited: bool | None = None self._strict: bool | None = None self._log_fn: Callable[[int, str], None] | None = None diff --git a/requirements_test.txt b/requirements_test.txt index f9763630767..bac3ba2e20c 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -12,7 +12,7 @@ coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.14.0a3 +mypy-dev==1.14.0a5 pre-commit==4.0.0 pydantic==1.10.19 pylint==3.3.1 From 4b9d89a480c09c8813b8eb4290940b3d0bea02ab Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:57:47 +0100 Subject: [PATCH 0158/1198] Change wording in config flow dialog for fyta (#132075) --- homeassistant/components/fyta/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fyta/strings.json b/homeassistant/components/fyta/strings.json index 5adde02c0cb..edd65ad228d 100644 --- a/homeassistant/components/fyta/strings.json +++ b/homeassistant/components/fyta/strings.json @@ -3,13 +3,13 @@ "step": { "user": { "title": "Credentials for FYTA API", - "description": "Provide username and password to connect to the FYTA server", + "description": "Provide email and password to connect to the FYTA server", "data": { - "username": "[%key:common::config_flow::data::username%]", + "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "username": "The username to login to your FYTA account.", + "username": "The email address to login to your FYTA account.", "password": "The password to login to your FYTA account." } }, From 2f644eb61cc7aa64f407643fd94e2185920b3ddc Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 2 Dec 2024 15:01:28 +0100 Subject: [PATCH 0159/1198] Remove option to update settings using second config flow in Reolink (#131695) --- .../components/reolink/config_flow.py | 2 +- tests/components/reolink/test_config_flow.py | 45 ------------------- 2 files changed, 1 insertion(+), 46 deletions(-) diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 0b1ed7b4b15..1909545714f 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -278,7 +278,7 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_update_reload_and_abort( entry=self._get_reconfigure_entry(), data=user_input ) - self._abort_if_unique_id_configured(updates=user_input) + self._abort_if_unique_id_configured() return self.async_create_entry( title=str(host.api.nvr_name), diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index bb896428b99..19ad885f638 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -286,51 +286,6 @@ async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> } -async def test_change_connection_settings( - hass: HomeAssistant, mock_setup_entry: MagicMock -) -> None: - """Test changing connection settings by issuing a second user config flow.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=format_mac(TEST_MAC), - data={ - CONF_HOST: TEST_HOST, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, - }, - options={ - CONF_PROTOCOL: DEFAULT_PROTOCOL, - }, - title=TEST_NVR_NAME, - ) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: TEST_HOST2, - CONF_USERNAME: TEST_USERNAME2, - CONF_PASSWORD: TEST_PASSWORD2, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == TEST_HOST2 - assert config_entry.data[CONF_USERNAME] == TEST_USERNAME2 - assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 - - async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test a reauth flow.""" config_entry = MockConfigEntry( From 92520fe365dfd8d424f01a761c206db42afba137 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Mon, 2 Dec 2024 09:18:17 -0500 Subject: [PATCH 0160/1198] Ensure Schlage config entry uniqueness (#131732) Co-authored-by: Joost Lekkerkerker --- .../components/schlage/config_flow.py | 1 + tests/components/schlage/test_config_flow.py | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/homeassistant/components/schlage/config_flow.py b/homeassistant/components/schlage/config_flow.py index f359f7dda71..6e8f94473dd 100644 --- a/homeassistant/components/schlage/config_flow.py +++ b/homeassistant/components/schlage/config_flow.py @@ -40,6 +40,7 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_user_form(errors) await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() return self.async_create_entry( title=username, data={ diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index 88b5f113863..3161ebe4097 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -12,6 +12,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import MockSchlageConfigEntry +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -54,6 +56,32 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 +async def test_form_requires_unique_id( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pyschlage_auth: Mock, +) -> None: + """Test entries have unique ids.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert init_result["type"] is FlowResultType.FORM + assert init_result["errors"] == {} + + create_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + { + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + + mock_pyschlage_auth.authenticate.assert_called_once_with() + assert create_result["type"] is FlowResultType.ABORT + assert create_result["reason"] == "already_configured" + + async def test_form_invalid_auth( hass: HomeAssistant, mock_pyschlage_auth: Mock ) -> None: From 13e9f1935dabf3203ca61f9f6ec996c3940f586a Mon Sep 17 00:00:00 2001 From: Tom Date: Mon, 2 Dec 2024 15:21:03 +0100 Subject: [PATCH 0161/1198] Record Plugwise Quality Scale (#131888) Co-authored-by: Joost Lekkerkerker --- .../components/plugwise/quality_scale.yaml | 113 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/plugwise/quality_scale.yaml diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml new file mode 100644 index 00000000000..b61071a285d --- /dev/null +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -0,0 +1,113 @@ +rules: + ## Bronze + config-flow: done + test-before-configure: done + unique-config-entry: + status: todo + comment: Add tests preventing second entry for same device + config-flow-test-coverage: + status: todo + comment: Cover test_form and zeroconf + runtime-data: + status: todo + comment: Clean up test_init for testing internals + test-before-setup: done + appropriate-polling: + status: todo + comment: Clean up coordinator (L71) check for mypy happiness + entity-unique-id: done + has-entity-name: + status: todo + comment: Clean up climate (already in superclass) + entity-event-setup: done + dependency-transparency: done + action-setup: + status: todo + comment: Check if we have these, otherwise exempt + common-modules: + status: todo + comment: Verify entity for async_added_to_hass usage (discard?) + docs-high-level-description: + status: todo + comment: Rewrite top section + docs-installation-instructions: + status: todo + comment: Docs PR 36087 + docs-removal-instructions: + status: todo + comment: Docs PR 36055 (done, but mark todo for benchmark) + docs-actions: done + brands: done + ## Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: todo + comment: Climate exception on ValueError should be ServiceValidationError + reauthentication-flow: + status: exempt + comment: The hubs have a hardcoded `Smile ID` printed on the sticker used as password, it can not be changed + parallel-updates: + status: todo + comment: Using coordinator, but required due to mutable platform + test-coverage: + status: todo + comment: Consider using snapshots + consistency in setup calls + add numerical tests + use fixtures + integration-owner: done + docs-installation-parameters: + status: todo + comment: Docs PR 36087 (partial) + todo rewrite generically + docs-configuration-parameters: + status: exempt + comment: Plugwise has no options flow + ## Gold + entity-translations: + status: todo + comment: Clean up name where not needed, remove translation key on deviceclasses + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: done + diagnostics: done + exception-translations: + status: todo + comment: Add coordinator, util and climate exceptions + icon-translations: done + reconfiguration-flow: + status: todo + comment: This integration does not have any reconfiguration steps (yet) investigate how/why + dynamic-devices: + status: todo + comment: Add missing logic to button for unloading and creation + discovery-update-info: done + repair-issues: + status: exempt + comment: This integration does not have repairs + docs-use-cases: + status: todo + comment: Check for completeness + docs-supported-devices: + status: todo + comment: The list is there but could be improved for readability + docs-supported-functions: + status: todo + comment: Check for completeness + docs-data-update: + status: todo + comment: Docs PR 36055 (done, but mark todo for benchmark) + docs-known-limitations: + status: todo + comment: Partial in 36087 but could be more elaborat + docs-troubleshooting: + status: todo + comment: Check for completeness + docs-examples: + status: todo + comment: Check for completeness + ## Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 734c6d57faf..4af580dca84 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -819,7 +819,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "plaato", "plant", "plex", - "plugwise", "plum_lightpad", "pocketcasts", "point", From 54c5d1002bb86585b4a075149822a05c5d661eab Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:27:44 +0100 Subject: [PATCH 0162/1198] Set connections on device for acaia (#132064) --- homeassistant/components/acaia/entity.py | 7 ++++++- tests/components/acaia/snapshots/test_init.ambr | 4 ++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/acaia/entity.py b/homeassistant/components/acaia/entity.py index db01b414b99..bef1ac313ca 100644 --- a/homeassistant/components/acaia/entity.py +++ b/homeassistant/components/acaia/entity.py @@ -2,7 +2,11 @@ from dataclasses import dataclass -from homeassistant.helpers.device_registry import DeviceInfo, format_mac +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -33,6 +37,7 @@ class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]): manufacturer="Acaia", model=self._scale.model, suggested_area="Kitchen", + connections={(CONNECTION_BLUETOOTH, self._scale.mac)}, ) @property diff --git a/tests/components/acaia/snapshots/test_init.ambr b/tests/components/acaia/snapshots/test_init.ambr index 1cc3d8dbbc0..7011b20f68c 100644 --- a/tests/components/acaia/snapshots/test_init.ambr +++ b/tests/components/acaia/snapshots/test_init.ambr @@ -5,6 +5,10 @@ 'config_entries': , 'configuration_url': None, 'connections': set({ + tuple( + 'bluetooth', + 'aa:bb:cc:dd:ee:ff', + ), }), 'disabled_by': None, 'entry_type': None, From d7cdb357dc27547b85b48febbeef5b1d8f811db6 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 2 Dec 2024 15:39:44 +0100 Subject: [PATCH 0163/1198] Add Reolink quality scale yaml (#131123) Co-authored-by: Joost Lekkerkerker --- .../components/reolink/quality_scale.yaml | 71 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/reolink/quality_scale.yaml diff --git a/homeassistant/components/reolink/quality_scale.yaml b/homeassistant/components/reolink/quality_scale.yaml new file mode 100644 index 00000000000..540cf19e22a --- /dev/null +++ b/homeassistant/components/reolink/quality_scale.yaml @@ -0,0 +1,71 @@ +rules: + # Bronze + action-setup: + status: done + comment: | + play_chime service is setup in async_setup + ptz_move service is setup in async_setup_entry since it is a entity_service + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: | + Coordinators are used and asyncio mutex locks ensure safe operation in the upstream lib + Parallel_update=0 set on all platforms + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: done + comment: | + For standalone cameras this does not apply: the integration should be removed. + For cameras connected to a NVR/Hub: the entities of a device are marked unavailable when power is unplugged. They can be removed using async_remove_config_entry_device. + Chimes can be uncoupled from the doorbell and removed from HA using async_remove_config_entry_device + Automatic removal lead to many user issues when a device was temporarily out of wifi range or disconnected from power, so not implemented anymore. + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 4af580dca84..4f3c7ea7cbc 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -876,7 +876,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "remember_the_milk", "remote_rpi_gpio", "renson", - "reolink", "repetier", "rest", "rest_command", From e52182940b148dedaeae4391d1f6d77f87d65952 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 2 Dec 2024 13:09:35 -0600 Subject: [PATCH 0164/1198] Bump hassil and intents (#132092) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 4 ++-- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 4 ++-- tests/testing_config/custom_sentences/en/beer.yaml | 4 ++-- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 26265a37cce..2d2f2f58a3a 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.4", "home-assistant-intents==2024.11.27"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d85fa4293a3..197be108f2e 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,10 +32,10 @@ go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 hass-nabucasa==0.85.0 -hassil==2.0.4 +hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.1 -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 4ef04b87b49..c87f022392d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hass-nabucasa==0.85.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -1130,7 +1130,7 @@ holidays==0.61 home-assistant-frontend==20241127.1 # homeassistant.components.conversation -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 73439609bd5..56285925739 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -928,7 +928,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 # homeassistant.components.conversation -hassil==2.0.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -956,7 +956,7 @@ holidays==0.61 home-assistant-frontend==20241127.1 # homeassistant.components.conversation -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index b6fbbdd1172..38b8ba5e8d0 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.4 home-assistant-intents==2024.11.27 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.2 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 966abd63d78..a3edd4fa51c 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -535,7 +535,7 @@ 'name': 'HassTurnOn', }), 'match': True, - 'sentence_template': ' on [all] in ', + 'sentence_template': ' on [] ', 'slots': dict({ 'area': 'kitchen', 'domain': 'light', @@ -606,7 +606,7 @@ 'name': 'OrderBeer', }), 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", + 'sentence_template': "[I'd like to ]order a {beer_style} [please]", 'slots': dict({ 'beer_style': 'lager', }), diff --git a/tests/testing_config/custom_sentences/en/beer.yaml b/tests/testing_config/custom_sentences/en/beer.yaml index f318e0221b2..7222ffcb0ca 100644 --- a/tests/testing_config/custom_sentences/en/beer.yaml +++ b/tests/testing_config/custom_sentences/en/beer.yaml @@ -3,11 +3,11 @@ intents: OrderBeer: data: - sentences: - - "I'd like to order a {beer_style} [please]" + - "[I'd like to ]order a {beer_style} [please]" OrderFood: data: - sentences: - - "I'd like to order {food_name:name} [please]" + - "[I'd like to ]order {food_name:name} [please]" lists: beer_style: values: From 32b8c8985e6e25bd83570e4ec6e5ad7c7128b33d Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 2 Dec 2024 21:41:13 +0100 Subject: [PATCH 0165/1198] Fix type hints in IronOS coordinators (#132107) Fix coordinators return type in IronOS --- homeassistant/components/iron_os/coordinator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index cfd40d66ac7..690dd6f1893 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -72,7 +72,7 @@ class IronOSBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]): raise UpdateFailed("Cannot connect to device") from e -class IronOSLiveDataCoordinator(IronOSBaseCoordinator): +class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): """IronOS coordinator.""" def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: @@ -124,7 +124,7 @@ class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]) return release.data -class IronOSSettingsCoordinator(IronOSBaseCoordinator): +class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]): """IronOS coordinator.""" def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: From 755d36d82fc92d3f4e8f04a4fcc42de5c498382e Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Mon, 2 Dec 2024 21:54:57 +0100 Subject: [PATCH 0166/1198] Mark trend sensor unavailable when source entity is unknown/unavailable (#132080) --- .../components/trend/binary_sensor.py | 9 +++- tests/components/trend/test_binary_sensor.py | 44 ++++++++++++++++++- 2 files changed, 50 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/trend/binary_sensor.py b/homeassistant/components/trend/binary_sensor.py index 681680f180f..9691ecf0744 100644 --- a/homeassistant/components/trend/binary_sensor.py +++ b/homeassistant/components/trend/binary_sensor.py @@ -227,10 +227,15 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): state = new_state.attributes.get(self._attribute) else: state = new_state.state - if state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + + if state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + self._attr_available = False + else: + self._attr_available = True sample = (new_state.last_updated.timestamp(), float(state)) # type: ignore[arg-type] self.samples.append(sample) - self.async_schedule_update_ha_state(True) + + self.async_schedule_update_ha_state(True) except (ValueError, TypeError) as ex: _LOGGER.error(ex) diff --git a/tests/components/trend/test_binary_sensor.py b/tests/components/trend/test_binary_sensor.py index ad85f65a9fc..4a829bb86d2 100644 --- a/tests/components/trend/test_binary_sensor.py +++ b/tests/components/trend/test_binary_sensor.py @@ -9,7 +9,7 @@ import pytest from homeassistant import setup from homeassistant.components.trend.const import DOMAIN -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -395,3 +395,45 @@ async def test_device_id( trend_entity = entity_registry.async_get("binary_sensor.trend") assert trend_entity is not None assert trend_entity.device_id == source_entity.device_id + + +@pytest.mark.parametrize( + "error_state", + [ + STATE_UNKNOWN, + STATE_UNAVAILABLE, + ], +) +async def test_unavailable_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + setup_component: ComponentSetup, + error_state: str, +) -> None: + """Test for unavailable source.""" + await setup_component( + { + "sample_duration": 10000, + "min_gradient": 1, + "max_samples": 25, + "min_samples": 5, + }, + ) + + for val in (10, 20, 30, 40, 50, 60): + freezer.tick(timedelta(seconds=2)) + hass.states.async_set("sensor.test_state", val) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" + + hass.states.async_set("sensor.test_state", error_state) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == STATE_UNAVAILABLE + + hass.states.async_set("sensor.test_state", 50) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" From e1772d25f27d828b56961d957dd837da459341d1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Dec 2024 21:56:13 +0100 Subject: [PATCH 0167/1198] Cleanup dead code in renault coordinator (#132078) --- homeassistant/components/renault/coordinator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/renault/coordinator.py b/homeassistant/components/renault/coordinator.py index d7aed6e3560..988349e76f4 100644 --- a/homeassistant/components/renault/coordinator.py +++ b/homeassistant/components/renault/coordinator.py @@ -27,6 +27,8 @@ _PARALLEL_SEMAPHORE = asyncio.Semaphore(1) class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): """Handle vehicle communication with Renault servers.""" + update_method: Callable[[], Awaitable[T]] + def __init__( self, hass: HomeAssistant, @@ -50,8 +52,6 @@ class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): async def _async_update_data(self) -> T: """Fetch the latest data from the source.""" - if self.update_method is None: - raise NotImplementedError("Update method not implemented") try: async with _PARALLEL_SEMAPHORE: data = await self.update_method() From 0a977d070b0d1325e0df339238beac8f33994085 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Dec 2024 21:57:45 +0100 Subject: [PATCH 0168/1198] Improve Renault reauth test (#132077) --- tests/components/renault/test_init.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/components/renault/test_init.py b/tests/components/renault/test_init.py index 0f9d9cbaf5b..a71192dda47 100644 --- a/tests/components/renault/test_init.py +++ b/tests/components/renault/test_init.py @@ -9,7 +9,7 @@ import pytest from renault_api.gigya.exceptions import GigyaException, InvalidCredentialsException from homeassistant.components.renault.const import DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -62,6 +62,11 @@ async def test_setup_entry_bad_password( assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == SOURCE_REAUTH + assert flows[0]["context"]["entry_id"] == config_entry.entry_id + @pytest.mark.parametrize("side_effect", [aiohttp.ClientConnectionError, GigyaException]) async def test_setup_entry_exception( From db430beb5b6d1acb1b4b5b200d8d3a32f843ac97 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 2 Dec 2024 22:18:24 +0100 Subject: [PATCH 0169/1198] Fix Reolink dispatcher ID for onvif fallback (#131953) --- .../components/reolink/binary_sensor.py | 4 ++-- homeassistant/components/reolink/host.py | 10 ++++---- tests/components/reolink/test_host.py | 23 +++++++++++++++---- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index c59c1e7785f..c168c97e809 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -176,14 +176,14 @@ class ReolinkPushBinarySensorEntity(ReolinkBinarySensorEntity): self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_{self._channel}", + f"{self._host.unique_id}_{self._channel}", self._async_handle_event, ) ) self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_all", + f"{self._host.unique_id}_all", self._async_handle_event, ) ) diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index a8e1de07642..97d888c0323 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -723,7 +723,7 @@ class ReolinkHost: self._hass, POLL_INTERVAL_NO_PUSH, self._poll_job ) - self._signal_write_ha_state(None) + self._signal_write_ha_state() async def handle_webhook( self, hass: HomeAssistant, webhook_id: str, request: Request @@ -782,7 +782,7 @@ class ReolinkHost: "Could not poll motion state after losing connection during receiving ONVIF event" ) return - async_dispatcher_send(hass, f"{webhook_id}_all", {}) + self._signal_write_ha_state() return message = data.decode("utf-8") @@ -795,14 +795,14 @@ class ReolinkHost: self._signal_write_ha_state(channels) - def _signal_write_ha_state(self, channels: list[int] | None) -> None: + def _signal_write_ha_state(self, channels: list[int] | None = None) -> None: """Update the binary sensors with async_write_ha_state.""" if channels is None: - async_dispatcher_send(self._hass, f"{self.webhook_id}_all", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_all", {}) return for channel in channels: - async_dispatcher_send(self._hass, f"{self.webhook_id}_{channel}", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_{channel}", {}) @property def event_connection(self) -> str: diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index 2286ca5d266..c777e4064f0 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -21,13 +21,15 @@ from homeassistant.components.reolink.host import ( ) from homeassistant.components.webhook import async_handle_webhook from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.network import NoURLAvailableError from homeassistant.util.aiohttp import MockRequest +from .conftest import TEST_NVR_NAME + from tests.common import MockConfigEntry, async_fire_time_changed from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -92,23 +94,32 @@ async def test_webhook_callback( entity_registry: er.EntityRegistry, ) -> None: """Test webhook callback with motion sensor.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) + reolink_connect.motion_detected.return_value = False + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" webhook_id = config_entry.runtime_data.host.webhook_id + unique_id = config_entry.runtime_data.host.unique_id signal_all = MagicMock() signal_ch = MagicMock() - async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) - async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + async_dispatcher_connect(hass, f"{unique_id}_all", signal_all) + async_dispatcher_connect(hass, f"{unique_id}_0", signal_ch) client = await hass_client_no_auth() + assert hass.states.get(entity_id).state == STATE_OFF + # test webhook callback success all channels + reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = None await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_called_once() + assert hass.states.get(entity_id).state == STATE_ON freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) async_fire_time_changed(hass) @@ -120,10 +131,14 @@ async def test_webhook_callback( await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_not_called() + assert hass.states.get(entity_id).state == STATE_ON + # test webhook callback success single channel + reolink_connect.motion_detected.return_value = False reolink_connect.ONVIF_event_callback.return_value = [0] await client.post(f"/api/webhook/{webhook_id}", data="test_data") signal_ch.assert_called_once() + assert hass.states.get(entity_id).state == STATE_OFF # test webhook callback single channel with error in event callback signal_ch.reset_mock() From 0e5b03b343f0e6bd5e6108ba84ad8a6bf3408099 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 2 Dec 2024 22:39:48 +0100 Subject: [PATCH 0170/1198] Rename 'Reolink IP NVR/camera' to 'Reolink' (#132113) --- homeassistant/components/reolink/manifest.json | 2 +- homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 913864a92fa..72bf21ccfd9 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -1,6 +1,6 @@ { "domain": "reolink", - "name": "Reolink IP NVR/camera", + "name": "Reolink", "codeowners": ["@starkillerOG"], "config_flow": true, "dependencies": ["webhook"], diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 9fee6abb894..ae7e0dd6c59 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5120,7 +5120,7 @@ "iot_class": "local_polling" }, "reolink": { - "name": "Reolink IP NVR/camera", + "name": "Reolink", "integration_type": "hub", "config_flow": true, "iot_class": "local_push" From 5dadabe50cfc526bfbce7cfecb1cbd099132fe1e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 2 Dec 2024 23:11:44 +0100 Subject: [PATCH 0171/1198] Add data description to Nord pool config flow (#132115) --- homeassistant/components/nordpool/strings.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index 1a4551fe61a..96c22633c9e 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -12,12 +12,20 @@ "data": { "currency": "Currency", "areas": "Areas" + }, + "data_description": { + "currency": "Select currency to display prices in, EUR is the base currency.", + "areas": "Areas to display prices for according to Nordpool market areas." } }, "reconfigure": { "data": { "currency": "[%key:component::nordpool::config::step::user::data::currency%]", "areas": "[%key:component::nordpool::config::step::user::data::areas%]" + }, + "data_description": { + "currency": "[%key:component::nordpool::config::step::user::data_description::currency%]", + "areas": "[%key:component::nordpool::config::step::user::data_description::areas%]" } } } From 03be1b9f38a399245fe56668f362e39263f0c73e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Dec 2024 00:12:49 +0100 Subject: [PATCH 0172/1198] Drop operating mode property in sharkiq (#132097) --- homeassistant/components/sharkiq/vacuum.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/sharkiq/vacuum.py b/homeassistant/components/sharkiq/vacuum.py index 8f0547980c3..997d229e6b9 100644 --- a/homeassistant/components/sharkiq/vacuum.py +++ b/homeassistant/components/sharkiq/vacuum.py @@ -150,12 +150,6 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum return None return self.sharkiq.error_text - @property - def operating_mode(self) -> str | None: - """Operating mode.""" - op_mode = self.sharkiq.get_property_value(Properties.OPERATING_MODE) - return OPERATING_STATE_MAP.get(op_mode) - @property def recharging_to_resume(self) -> int | None: """Return True if vacuum set to recharge and resume cleaning.""" @@ -171,7 +165,8 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum """ if self.sharkiq.get_property_value(Properties.CHARGING_STATUS): return STATE_DOCKED - return self.operating_mode + op_mode = self.sharkiq.get_property_value(Properties.OPERATING_MODE) + return OPERATING_STATE_MAP.get(op_mode) @property def available(self) -> bool: From 101bb091baa3ed44a2b7d64a4efa34d7300ad658 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Tue, 3 Dec 2024 00:08:51 -0500 Subject: [PATCH 0173/1198] Fix bad hassil tests on CI (#132132) * Fix CI * Fix whitespace --------- Co-authored-by: Michael Hansen --- .../conversation/snapshots/test_default_agent.ambr | 6 +++--- tests/components/conversation/test_default_agent.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index b1f2ea0db75..f1e220b10b2 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -308,7 +308,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', + 'speech': 'Sorry, I am not aware of any area called late added', }), }), }), @@ -378,7 +378,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, I am not aware of any area called kitchen', }), }), }), @@ -428,7 +428,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', + 'speech': 'Sorry, I am not aware of any area called renamed', }), }), }), diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 20fa41944f2..dab1e61ab81 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -2930,7 +2930,7 @@ async def test_intent_cache_fuzzy(hass: HomeAssistant) -> None: ) result = await agent.async_recognize_intent(user_input) assert result is not None - assert result.unmatched_entities["name"].text == "test light" + assert result.unmatched_entities["area"].text == "test " # Mark this result so we know it is from cache next time mark = "_from_cache" From bb7dc079ce6160ebdd9828864ac9f8df255083b7 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Tue, 3 Dec 2024 09:11:44 +0100 Subject: [PATCH 0174/1198] Remove unneeded step from reauth in Reolink (#132143) --- homeassistant/components/reolink/config_flow.py | 9 ++------- homeassistant/components/reolink/strings.json | 4 ---- tests/components/reolink/test_config_flow.py | 8 -------- 3 files changed, 2 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 1909545714f..c28e076aab4 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -128,13 +128,8 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Dialog that informs the user that reauth is required.""" - if user_input is not None: - return await self.async_step_user() - placeholders = {"name": self.context["title_placeholders"]["name"]} - return self.async_show_form( - step_id="reauth_confirm", description_placeholders=placeholders - ) + """Perform a reauthentication.""" + return await self.async_step_user() async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 3fe7fe14ec5..726a9fab2f3 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -18,10 +18,6 @@ "username": "Username to login to the Reolink device itself. Not the Reolink cloud account.", "password": "Password to login to the Reolink device itself. Not the Reolink cloud account." } - }, - "reauth_confirm": { - "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Reolink integration needs to re-authenticate your connection details" } }, "error": { diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 19ad885f638..b358670ac6b 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -310,14 +310,6 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} From 39b2cf6ed27d32f70c1b259ed406f6e9b654709c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 09:37:33 +0100 Subject: [PATCH 0175/1198] Revert "bump hassil and intents" (#132138) * Revert "Fix bad hassil tests on CI (#132132)" This reverts commit 101bb091baa3ed44a2b7d64a4efa34d7300ad658. * Revert "Bump hassil and intents (#132092)" This reverts commit e52182940b148dedaeae4391d1f6d77f87d65952. --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 4 ++-- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- script/hassfest/docker/Dockerfile | 2 +- .../conversation/snapshots/test_default_agent.ambr | 6 +++--- tests/components/conversation/snapshots/test_http.ambr | 4 ++-- tests/components/conversation/test_default_agent.py | 2 +- tests/testing_config/custom_sentences/en/beer.yaml | 4 ++-- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 2d2f2f58a3a..26265a37cce 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.2"] + "requirements": ["hassil==2.0.4", "home-assistant-intents==2024.11.27"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 197be108f2e..d85fa4293a3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,10 +32,10 @@ go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 hass-nabucasa==0.85.0 -hassil==2.0.5 +hassil==2.0.4 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.1 -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.11.27 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index c87f022392d..4ef04b87b49 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hass-nabucasa==0.85.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.5 +hassil==2.0.4 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -1130,7 +1130,7 @@ holidays==0.61 home-assistant-frontend==20241127.1 # homeassistant.components.conversation -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.11.27 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 56285925739..73439609bd5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -928,7 +928,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 # homeassistant.components.conversation -hassil==2.0.5 +hassil==2.0.4 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -956,7 +956,7 @@ holidays==0.61 home-assistant-frontend==20241127.1 # homeassistant.components.conversation -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.11.27 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 38b8ba5e8d0..b6fbbdd1172 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.2 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.4 home-assistant-intents==2024.11.27 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index f1e220b10b2..b1f2ea0db75 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -308,7 +308,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called late added', + 'speech': 'Sorry, I am not aware of any device called late added light', }), }), }), @@ -378,7 +378,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called kitchen', + 'speech': 'Sorry, I am not aware of any device called kitchen light', }), }), }), @@ -428,7 +428,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called renamed', + 'speech': 'Sorry, I am not aware of any device called renamed light', }), }), }), diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index a3edd4fa51c..966abd63d78 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -535,7 +535,7 @@ 'name': 'HassTurnOn', }), 'match': True, - 'sentence_template': ' on [] ', + 'sentence_template': ' on [all] in ', 'slots': dict({ 'area': 'kitchen', 'domain': 'light', @@ -606,7 +606,7 @@ 'name': 'OrderBeer', }), 'match': True, - 'sentence_template': "[I'd like to ]order a {beer_style} [please]", + 'sentence_template': "I'd like to order a {beer_style} [please]", 'slots': dict({ 'beer_style': 'lager', }), diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index dab1e61ab81..20fa41944f2 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -2930,7 +2930,7 @@ async def test_intent_cache_fuzzy(hass: HomeAssistant) -> None: ) result = await agent.async_recognize_intent(user_input) assert result is not None - assert result.unmatched_entities["area"].text == "test " + assert result.unmatched_entities["name"].text == "test light" # Mark this result so we know it is from cache next time mark = "_from_cache" diff --git a/tests/testing_config/custom_sentences/en/beer.yaml b/tests/testing_config/custom_sentences/en/beer.yaml index 7222ffcb0ca..f318e0221b2 100644 --- a/tests/testing_config/custom_sentences/en/beer.yaml +++ b/tests/testing_config/custom_sentences/en/beer.yaml @@ -3,11 +3,11 @@ intents: OrderBeer: data: - sentences: - - "[I'd like to ]order a {beer_style} [please]" + - "I'd like to order a {beer_style} [please]" OrderFood: data: - sentences: - - "[I'd like to ]order {food_name:name} [please]" + - "I'd like to order {food_name:name} [please]" lists: beer_style: values: From 3e2bac96e654432ada121f608605c6c49eabbd1b Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 3 Dec 2024 10:49:32 +0100 Subject: [PATCH 0176/1198] Move set_room_setpoint to opentherm_gw hub (#132152) --- homeassistant/components/opentherm_gw/__init__.py | 10 +++++++++- homeassistant/components/opentherm_gw/climate.py | 7 +------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 5ce9d808b21..8c92c70ab49 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -47,6 +47,7 @@ from .const import ( CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, + CONF_TEMPORARY_OVRD_MODE, CONNECTION_TIMEOUT, DATA_GATEWAYS, DATA_OPENTHERM_GW, @@ -105,6 +106,7 @@ PLATFORMS = [ async def options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] + gateway.options = entry.options async_dispatcher_send(hass, gateway.options_update_signal, entry) @@ -469,7 +471,7 @@ class OpenThermGatewayHub: self.device_path = config_entry.data[CONF_DEVICE] self.hub_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] - self.climate_config = config_entry.options + self.options = config_entry.options self.config_entry_id = config_entry.entry_id self.update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_options_update" @@ -565,3 +567,9 @@ class OpenThermGatewayHub: def connected(self): """Report whether or not we are connected to the gateway.""" return self.gateway.connection.connected + + async def set_room_setpoint(self, temp) -> float: + """Set the room temperature setpoint on the gateway. Return the new temperature.""" + return await self.gateway.set_target_temp( + temp, self.options.get(CONF_TEMPORARY_OVRD_MODE, True) + ) diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 6edfeb35ec3..e93a76fe7b7 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -28,7 +28,6 @@ from . import OpenThermGatewayHub from .const import ( CONF_READ_PRECISION, CONF_SET_PRECISION, - CONF_TEMPORARY_OVRD_MODE, DATA_GATEWAYS, DATA_OPENTHERM_GW, THERMOSTAT_DEVICE_DESCRIPTION, @@ -102,14 +101,12 @@ class OpenThermClimate(OpenThermStatusEntity, ClimateEntity): if CONF_READ_PRECISION in options: self._attr_precision = options[CONF_READ_PRECISION] self._attr_target_temperature_step = options.get(CONF_SET_PRECISION) - self.temporary_ovrd_mode = options.get(CONF_TEMPORARY_OVRD_MODE, True) @callback def update_options(self, entry): """Update climate entity options.""" self._attr_precision = entry.options[CONF_READ_PRECISION] self._attr_target_temperature_step = entry.options[CONF_SET_PRECISION] - self.temporary_ovrd_mode = entry.options[CONF_TEMPORARY_OVRD_MODE] self.async_write_ha_state() async def async_added_to_hass(self) -> None: @@ -195,7 +192,5 @@ class OpenThermClimate(OpenThermStatusEntity, ClimateEntity): temp = float(kwargs[ATTR_TEMPERATURE]) if temp == self.target_temperature: return - self._new_target_temperature = await self._gateway.gateway.set_target_temp( - temp, self.temporary_ovrd_mode - ) + self._new_target_temperature = await self._gateway.set_room_setpoint(temp) self.async_write_ha_state() From 003d4d712a52d2f6fc1d1a76b7efa9b2ddf8d8b9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 11:31:54 +0100 Subject: [PATCH 0177/1198] Bump syrupy to 4.8.0 (#132134) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index bac3ba2e20c..34dcdfc1244 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -32,7 +32,7 @@ pytest-xdist==3.6.1 pytest==8.3.3 requests-mock==1.12.1 respx==0.21.1 -syrupy==4.7.2 +syrupy==4.8.0 tqdm==4.66.5 types-aiofiles==24.1.0.20240626 types-atomicwrites==1.4.5.1 From aeab8a0143b1688a0a01db485f590d1d81aff6b9 Mon Sep 17 00:00:00 2001 From: Tom Date: Tue, 3 Dec 2024 12:34:03 +0100 Subject: [PATCH 0178/1198] Plugwise fixes from quality review (#132158) --- homeassistant/components/plugwise/binary_sensor.py | 2 -- homeassistant/components/plugwise/climate.py | 2 -- homeassistant/components/plugwise/quality_scale.yaml | 8 ++------ homeassistant/components/plugwise/strings.json | 3 --- homeassistant/components/plugwise/switch.py | 1 - 5 files changed, 2 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index fb271ea7264..f422d4facf3 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -34,7 +34,6 @@ class PlugwiseBinarySensorEntityDescription(BinarySensorEntityDescription): BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( PlugwiseBinarySensorEntityDescription( key="low_battery", - translation_key="low_battery", device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -56,7 +55,6 @@ BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( PlugwiseBinarySensorEntityDescription( key="flame_state", translation_key="flame_state", - name="Flame state", entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 242b0944782..06b8171a528 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -60,7 +60,6 @@ async def async_setup_entry( class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """Representation of a Plugwise thermostat.""" - _attr_has_entity_name = True _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN @@ -75,7 +74,6 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): ) -> None: """Set up the Plugwise API.""" super().__init__(coordinator, device_id) - self._attr_extra_state_attributes = {} self._attr_unique_id = f"{device_id}-climate" self._devices = coordinator.data.devices diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index b61071a285d..0881e79c1c0 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -16,9 +16,7 @@ rules: status: todo comment: Clean up coordinator (L71) check for mypy happiness entity-unique-id: done - has-entity-name: - status: todo - comment: Clean up climate (already in superclass) + has-entity-name: done entity-event-setup: done dependency-transparency: done action-setup: @@ -62,9 +60,7 @@ rules: status: exempt comment: Plugwise has no options flow ## Gold - entity-translations: - status: todo - comment: Clean up name where not needed, remove translation key on deviceclasses + entity-translations: done entity-device-class: done devices: done entity-category: done diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index c09323f458b..f74fc036e2a 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -30,9 +30,6 @@ }, "entity": { "binary_sensor": { - "low_battery": { - "name": "Battery state" - }, "compressor_state": { "name": "Compressor state" }, diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index 744fc0a2b72..305518f4bef 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -48,7 +48,6 @@ SWITCHES: tuple[PlugwiseSwitchEntityDescription, ...] = ( PlugwiseSwitchEntityDescription( key="cooling_ena_switch", translation_key="cooling_ena_switch", - name="Cooling", entity_category=EntityCategory.CONFIG, ), ) From 50936b4e280a9e5d3f2dc4f66346c955e1c4139c Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Tue, 3 Dec 2024 13:06:18 +0100 Subject: [PATCH 0179/1198] Add support for features changing at runtime in Matter integration (#129426) --- homeassistant/components/matter/adapter.py | 18 ++++--- .../components/matter/binary_sensor.py | 1 + homeassistant/components/matter/button.py | 1 + homeassistant/components/matter/const.py | 2 + homeassistant/components/matter/discovery.py | 24 ++++++++-- homeassistant/components/matter/entity.py | 39 ++++++++++++++- homeassistant/components/matter/lock.py | 1 - homeassistant/components/matter/models.py | 7 +++ .../matter/fixtures/nodes/door_lock.json | 2 +- .../matter/snapshots/test_binary_sensor.ambr | 47 ------------------- tests/components/matter/test_binary_sensor.py | 32 +++++++++++++ 11 files changed, 113 insertions(+), 61 deletions(-) diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index 475e4a44538..0ccd3e065ff 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -45,6 +45,7 @@ class MatterAdapter: self.hass = hass self.config_entry = config_entry self.platform_handlers: dict[Platform, AddEntitiesCallback] = {} + self.discovered_entities: set[str] = set() def register_platform_handler( self, platform: Platform, add_entities: AddEntitiesCallback @@ -54,23 +55,19 @@ class MatterAdapter: async def setup_nodes(self) -> None: """Set up all existing nodes and subscribe to new nodes.""" - initialized_nodes: set[int] = set() for node in self.matter_client.get_nodes(): - initialized_nodes.add(node.node_id) self._setup_node(node) def node_added_callback(event: EventType, node: MatterNode) -> None: """Handle node added event.""" - initialized_nodes.add(node.node_id) self._setup_node(node) def node_updated_callback(event: EventType, node: MatterNode) -> None: """Handle node updated event.""" - if node.node_id in initialized_nodes: - return if not node.available: return - initialized_nodes.add(node.node_id) + # We always run the discovery logic again, + # because the firmware version could have been changed or features added. self._setup_node(node) def endpoint_added_callback(event: EventType, data: dict[str, int]) -> None: @@ -237,11 +234,20 @@ class MatterAdapter: self._create_device_registry(endpoint) # run platform discovery from device type instances for entity_info in async_discover_entities(endpoint): + discovery_key = ( + f"{entity_info.platform}_{endpoint.node.node_id}_{endpoint.endpoint_id}_" + f"{entity_info.primary_attribute.cluster_id}_" + f"{entity_info.primary_attribute.attribute_id}_" + f"{entity_info.entity_description.key}" + ) + if discovery_key in self.discovered_entities: + continue LOGGER.debug( "Creating %s entity for %s", entity_info.platform, entity_info.primary_attribute, ) + self.discovered_entities.add(discovery_key) new_entity = entity_info.entity_class( self.matter_client, endpoint, entity_info ) diff --git a/homeassistant/components/matter/binary_sensor.py b/homeassistant/components/matter/binary_sensor.py index 875b063dc88..6882078a712 100644 --- a/homeassistant/components/matter/binary_sensor.py +++ b/homeassistant/components/matter/binary_sensor.py @@ -159,6 +159,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterBinarySensor, required_attributes=(clusters.DoorLock.Attributes.DoorState,), + featuremap_contains=clusters.DoorLock.Bitmaps.Feature.kDoorPositionSensor, ), MatterDiscoverySchema( platform=Platform.BINARY_SENSOR, diff --git a/homeassistant/components/matter/button.py b/homeassistant/components/matter/button.py index 918b334061b..153124a4f7e 100644 --- a/homeassistant/components/matter/button.py +++ b/homeassistant/components/matter/button.py @@ -69,6 +69,7 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterCommandButton, required_attributes=(clusters.Identify.Attributes.AcceptedCommandList,), value_contains=clusters.Identify.Commands.Identify.command_id, + allow_multi=True, ), MatterDiscoverySchema( platform=Platform.BUTTON, diff --git a/homeassistant/components/matter/const.py b/homeassistant/components/matter/const.py index a0e160a6c01..8018d5e09ed 100644 --- a/homeassistant/components/matter/const.py +++ b/homeassistant/components/matter/const.py @@ -13,3 +13,5 @@ LOGGER = logging.getLogger(__package__) # prefixes to identify device identifier id types ID_TYPE_DEVICE_ID = "deviceid" ID_TYPE_SERIAL = "serial" + +FEATUREMAP_ATTRIBUTE_ID = 65532 diff --git a/homeassistant/components/matter/discovery.py b/homeassistant/components/matter/discovery.py index 5b07f9a069f..3b9fb0b8a94 100644 --- a/homeassistant/components/matter/discovery.py +++ b/homeassistant/components/matter/discovery.py @@ -13,6 +13,7 @@ from homeassistant.core import callback from .binary_sensor import DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS from .button import DISCOVERY_SCHEMAS as BUTTON_SCHEMAS from .climate import DISCOVERY_SCHEMAS as CLIMATE_SENSOR_SCHEMAS +from .const import FEATUREMAP_ATTRIBUTE_ID from .cover import DISCOVERY_SCHEMAS as COVER_SCHEMAS from .event import DISCOVERY_SCHEMAS as EVENT_SCHEMAS from .fan import DISCOVERY_SCHEMAS as FAN_SCHEMAS @@ -121,12 +122,24 @@ def async_discover_entities( continue # check for required value in (primary) attribute + primary_attribute = schema.required_attributes[0] + primary_value = endpoint.get_attribute_value(None, primary_attribute) if schema.value_contains is not None and ( - (primary_attribute := next((x for x in schema.required_attributes), None)) - is None - or (value := endpoint.get_attribute_value(None, primary_attribute)) is None - or not isinstance(value, list) - or schema.value_contains not in value + isinstance(primary_value, list) + and schema.value_contains not in primary_value + ): + continue + + # check for required value in cluster featuremap + if schema.featuremap_contains is not None and ( + not bool( + int( + endpoint.get_attribute_value( + primary_attribute.cluster_id, FEATUREMAP_ATTRIBUTE_ID + ) + ) + & schema.featuremap_contains + ) ): continue @@ -147,6 +160,7 @@ def async_discover_entities( attributes_to_watch=attributes_to_watch, entity_description=schema.entity_description, entity_class=schema.entity_class, + discovery_schema=schema, ) # prevent re-discovery of the primary attribute if not allowed diff --git a/homeassistant/components/matter/entity.py b/homeassistant/components/matter/entity.py index 7c378fe465e..50a0f2b1fee 100644 --- a/homeassistant/components/matter/entity.py +++ b/homeassistant/components/matter/entity.py @@ -16,9 +16,10 @@ from propcache import cached_property from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription +import homeassistant.helpers.entity_registry as er from homeassistant.helpers.typing import UndefinedType -from .const import DOMAIN, ID_TYPE_DEVICE_ID +from .const import DOMAIN, FEATUREMAP_ATTRIBUTE_ID, ID_TYPE_DEVICE_ID from .helpers import get_device_id if TYPE_CHECKING: @@ -140,6 +141,19 @@ class MatterEntity(Entity): node_filter=self._endpoint.node.node_id, ) ) + # subscribe to FeatureMap attribute (as that can dynamically change) + self._unsubscribes.append( + self.matter_client.subscribe_events( + callback=self._on_featuremap_update, + event_filter=EventType.ATTRIBUTE_UPDATED, + node_filter=self._endpoint.node.node_id, + attr_path_filter=create_attribute_path( + endpoint=self._endpoint.endpoint_id, + cluster_id=self._entity_info.primary_attribute.cluster_id, + attribute_id=FEATUREMAP_ATTRIBUTE_ID, + ), + ) + ) @cached_property def name(self) -> str | UndefinedType | None: @@ -159,6 +173,29 @@ class MatterEntity(Entity): self._update_from_device() self.async_write_ha_state() + @callback + def _on_featuremap_update( + self, event: EventType, data: tuple[int, str, int] | None + ) -> None: + """Handle FeatureMap attribute updates.""" + if data is None: + return + new_value = data[2] + # handle edge case where a Feature is removed from a cluster + if ( + self._entity_info.discovery_schema.featuremap_contains is not None + and not bool( + new_value & self._entity_info.discovery_schema.featuremap_contains + ) + ): + # this entity is no longer supported by the device + ent_reg = er.async_get(self.hass) + ent_reg.async_remove(self.entity_id) + + return + # all other cases, just update the entity + self._on_matter_event(event, data) + @callback def _update_from_device(self) -> None: """Update data from Matter device.""" diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index c5e10554fe7..d69d0fd3dab 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -206,6 +206,5 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterLock, required_attributes=(clusters.DoorLock.Attributes.LockState,), - optional_attributes=(clusters.DoorLock.Attributes.DoorState,), ), ] diff --git a/homeassistant/components/matter/models.py b/homeassistant/components/matter/models.py index f04c0f7e107..a00963c825a 100644 --- a/homeassistant/components/matter/models.py +++ b/homeassistant/components/matter/models.py @@ -51,6 +51,9 @@ class MatterEntityInfo: # entity class to use to instantiate the entity entity_class: type + # the original discovery schema used to create this entity + discovery_schema: MatterDiscoverySchema + @property def primary_attribute(self) -> type[ClusterAttributeDescriptor]: """Return Primary Attribute belonging to the entity.""" @@ -113,6 +116,10 @@ class MatterDiscoverySchema: # NOTE: only works for list values value_contains: Any | None = None + # [optional] the primary attribute's cluster featuremap must contain this value + # for example for the DoorSensor on a DoorLock Cluster + featuremap_contains: int | None = None + # [optional] bool to specify if this primary value may be discovered # by multiple platforms allow_multi: bool = False diff --git a/tests/components/matter/fixtures/nodes/door_lock.json b/tests/components/matter/fixtures/nodes/door_lock.json index b6231e04af4..acd327ac56c 100644 --- a/tests/components/matter/fixtures/nodes/door_lock.json +++ b/tests/components/matter/fixtures/nodes/door_lock.json @@ -495,7 +495,7 @@ "1/257/48": 3, "1/257/49": 10, "1/257/51": false, - "1/257/65532": 3507, + "1/257/65532": 0, "1/257/65533": 6, "1/257/65528": [12, 15, 18, 28, 35, 37], "1/257/65529": [ diff --git a/tests/components/matter/snapshots/test_binary_sensor.ambr b/tests/components/matter/snapshots/test_binary_sensor.ambr index 2e3367121e9..82dcc166f13 100644 --- a/tests/components/matter/snapshots/test_binary_sensor.ambr +++ b/tests/components/matter/snapshots/test_binary_sensor.ambr @@ -46,53 +46,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LockDoorStateSensor-257-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Mock Door Lock Door', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index 7ae483162bf..cddee975ac8 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -4,6 +4,7 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode +from matter_server.common.models import EventType import pytest from syrupy import SnapshotAssertion @@ -115,3 +116,34 @@ async def test_battery_sensor( state = hass.states.get(entity_id) assert state assert state.state == "on" + + +@pytest.mark.parametrize("node_fixture", ["door_lock"]) +async def test_optional_sensor_from_featuremap( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test discovery of optional doorsensor in doorlock featuremap.""" + entity_id = "binary_sensor.mock_door_lock_door" + state = hass.states.get(entity_id) + assert state is None + + # update the feature map to include the optional door sensor feature + # and fire a node updated event + set_node_attribute(matter_node, 1, 257, 65532, 32) + await trigger_subscription_callback( + hass, matter_client, event=EventType.NODE_UPDATED, data=matter_node + ) + # this should result in a new binary sensor entity being discovered + state = hass.states.get(entity_id) + assert state + assert state.state == "off" + # now test the reverse, by removing the feature from the feature map + set_node_attribute(matter_node, 1, 257, 65532, 0) + await trigger_subscription_callback( + hass, matter_client, data=(matter_node.node_id, "1/257/65532", 0) + ) + state = hass.states.get(entity_id) + assert state is None From d66a6d95964355c3219791711849d552d7999c71 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 3 Dec 2024 13:06:54 +0100 Subject: [PATCH 0180/1198] Fix imap sensor in case of alternative empty search response (#132081) --- homeassistant/components/imap/coordinator.py | 12 +++++++++++- tests/components/imap/const.py | 2 ++ tests/components/imap/test_init.py | 13 +++++++++++-- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index 41fd703d79b..1df107196ff 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -334,7 +334,17 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): raise UpdateFailed( f"Invalid response for search '{self.config_entry.data[CONF_SEARCH]}': {result} / {lines[0]}" ) - if not (count := len(message_ids := lines[0].split())): + # Check we do have returned items. + # + # In rare cases, when no UID's are returned, + # only the status line is returned, and not an empty line. + # See: https://github.com/home-assistant/core/issues/132042 + # + # Strictly the RfC notes that 0 or more numbers should be returned + # delimited by a space. + # + # See: https://datatracker.ietf.org/doc/html/rfc3501#section-7.2.5 + if len(lines) == 1 or not (count := len(message_ids := lines[0].split())): self._last_message_uid = None return 0 last_message_uid = ( diff --git a/tests/components/imap/const.py b/tests/components/imap/const.py index 037960c9e5d..8f6761bd795 100644 --- a/tests/components/imap/const.py +++ b/tests/components/imap/const.py @@ -141,6 +141,8 @@ TEST_CONTENT_MULTIPART_BASE64_INVALID = ( ) EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."]) +EMPTY_SEARCH_RESPONSE_ALT = ("OK", [b"Search completed (0.0001 + 0.000 secs)."]) + BAD_RESPONSE = ("BAD", [b"", b"Unexpected error"]) TEST_SEARCH_RESPONSE = ("OK", [b"1", b"Search completed (0.0001 + 0.000 secs)."]) diff --git a/tests/components/imap/test_init.py b/tests/components/imap/test_init.py index 7bdfc44571a..d4281b9e513 100644 --- a/tests/components/imap/test_init.py +++ b/tests/components/imap/test_init.py @@ -20,6 +20,7 @@ from homeassistant.util.dt import utcnow from .const import ( BAD_RESPONSE, EMPTY_SEARCH_RESPONSE, + EMPTY_SEARCH_RESPONSE_ALT, TEST_BADLY_ENCODED_CONTENT, TEST_FETCH_RESPONSE_BINARY, TEST_FETCH_RESPONSE_HTML, @@ -517,6 +518,11 @@ async def test_fetch_number_of_messages( assert state.state == STATE_UNAVAILABLE +@pytest.mark.parametrize( + "empty_search_reponse", + [EMPTY_SEARCH_RESPONSE, EMPTY_SEARCH_RESPONSE_ALT], + ids=["regular_empty_search_response", "alt_empty_search_response"], +) @pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE]) @pytest.mark.parametrize( ("imap_fetch", "valid_date"), @@ -525,7 +531,10 @@ async def test_fetch_number_of_messages( ) @pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"]) async def test_reset_last_message( - hass: HomeAssistant, mock_imap_protocol: MagicMock, valid_date: bool + hass: HomeAssistant, + mock_imap_protocol: MagicMock, + valid_date: bool, + empty_search_reponse: tuple[str, list[bytes]], ) -> None: """Test receiving a message successfully.""" event = asyncio.Event() # needed for pushed coordinator to make a new loop @@ -580,7 +589,7 @@ async def test_reset_last_message( ) # Simulate an update where no messages are found (needed for pushed coordinator) - mock_imap_protocol.search.return_value = Response(*EMPTY_SEARCH_RESPONSE) + mock_imap_protocol.search.return_value = Response(*empty_search_reponse) # Make sure we have an update async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) From ffccdbbcecdc961304f16aea4b64dcf81d57f5a8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 13:10:55 +0100 Subject: [PATCH 0181/1198] Bump renault-api to 0.2.8 (#132135) --- homeassistant/components/renault/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 111f296fc85..a4817fc84e6 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["renault_api"], "quality_scale": "silver", - "requirements": ["renault-api==0.2.7"] + "requirements": ["renault-api==0.2.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4ef04b87b49..997d4cd3799 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2550,7 +2550,7 @@ refoss-ha==1.2.5 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.7 +renault-api==0.2.8 # homeassistant.components.renson renson-endura-delta==1.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 73439609bd5..87042c04ce5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2044,7 +2044,7 @@ refoss-ha==1.2.5 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.7 +renault-api==0.2.8 # homeassistant.components.renson renson-endura-delta==1.7.1 From 3a19c2f47f7c57f8f660378b690771e64bfff3a9 Mon Sep 17 00:00:00 2001 From: Petar Petrov Date: Tue, 3 Dec 2024 14:29:44 +0200 Subject: [PATCH 0182/1198] Support Z-Wave JS abort S2 bootstrapping (#132140) ZWaveJS: abort S2 bootstrapping when inclusion is canceled --- homeassistant/components/zwave_js/api.py | 24 +++++++++ tests/components/zwave_js/test_api.py | 66 ++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index ff0459ddbdd..88f8f25c8e2 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -395,6 +395,7 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_node_metadata) websocket_api.async_register_command(hass, websocket_node_alerts) websocket_api.async_register_command(hass, websocket_add_node) + websocket_api.async_register_command(hass, websocket_cancel_secure_bootstrap_s2) websocket_api.async_register_command(hass, websocket_grant_security_classes) websocket_api.async_register_command(hass, websocket_validate_dsk_and_enter_pin) websocket_api.async_register_command(hass, websocket_provision_smart_start_node) @@ -839,6 +840,29 @@ async def websocket_add_node( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/cancel_secure_bootstrap_s2", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_cancel_secure_bootstrap_s2( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Cancel secure bootstrap S2.""" + await driver.controller.async_cancel_secure_bootstrap_s2() + connection.send_result(msg[ID]) + + @websocket_api.require_admin @websocket_api.websocket_command( { diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 357ec29b810..3761ba6eaa6 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5195,3 +5195,69 @@ async def test_get_integration_settings( assert msg["result"] == { CONF_INSTALLER_MODE: installer_mode, } + + +async def test_cancel_secure_bootstrap_s2( + hass: HomeAssistant, client, integration, hass_ws_client: WebSocketGenerator +) -> None: + """Test that the cancel_secure_bootstrap_s2 WS API call works.""" + entry = integration + ws_client = await hass_ws_client(hass) + + # Test successful cancellation + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "controller.cancel_secure_bootstrap_s2" + + # Test FailedZWaveCommand is caught + with patch( + f"{CONTROLLER_PATCH_PREFIX}.async_cancel_secure_bootstrap_s2", + side_effect=FailedZWaveCommand("failed_command", 1, "error message"), + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "zwave_error" + assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + # Test sending command with invalid entry ID fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: "invalid_entry_id", + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND From bebbb87aa2fbbf789e8cd70f5c6ba4ed10ce74c8 Mon Sep 17 00:00:00 2001 From: Tobias Perschon Date: Tue, 3 Dec 2024 13:33:47 +0100 Subject: [PATCH 0183/1198] Bump unifi_ap to 0.0.2 (#132125) --- homeassistant/components/unifi_direct/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifi_direct/manifest.json b/homeassistant/components/unifi_direct/manifest.json index 775279c64e2..aa696985dbe 100644 --- a/homeassistant/components/unifi_direct/manifest.json +++ b/homeassistant/components/unifi_direct/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["unifi_ap"], "quality_scale": "legacy", - "requirements": ["unifi_ap==0.0.1"] + "requirements": ["unifi_ap==0.0.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 997d4cd3799..a5093206446 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2906,7 +2906,7 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.unifi_direct -unifi_ap==0.0.1 +unifi_ap==0.0.2 # homeassistant.components.unifiled unifiled==0.11 From c4ba15bb8c2bd8b4ca7c6e0199e2ac2ac363ee87 Mon Sep 17 00:00:00 2001 From: Tobias Perschon Date: Tue, 3 Dec 2024 13:34:13 +0100 Subject: [PATCH 0184/1198] Improve error logging for unifi-ap (#132141) --- homeassistant/components/unifi_direct/device_tracker.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/unifi_direct/device_tracker.py b/homeassistant/components/unifi_direct/device_tracker.py index 144cbd4dec7..d5e2e926114 100644 --- a/homeassistant/components/unifi_direct/device_tracker.py +++ b/homeassistant/components/unifi_direct/device_tracker.py @@ -67,11 +67,11 @@ class UnifiDeviceScanner(DeviceScanner): """Update the client info from AP.""" try: self.clients = self.ap.get_clients() - except UniFiAPConnectionException: - _LOGGER.error("Failed to connect to accesspoint") + except UniFiAPConnectionException as e: + _LOGGER.error("Failed to connect to accesspoint: %s", str(e)) return False - except UniFiAPDataException: - _LOGGER.error("Failed to get proper response from accesspoint") + except UniFiAPDataException as e: + _LOGGER.error("Failed to get proper response from accesspoint: %s", str(e)) return False return True From 3e64d148cc4644d55c6e0d85ff43084b6e580284 Mon Sep 17 00:00:00 2001 From: Jon Seager Date: Tue, 3 Dec 2024 12:34:50 +0000 Subject: [PATCH 0185/1198] Bump pytouchlinesl to 0.3.0 (#132157) --- homeassistant/components/touchline_sl/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json index ca3136f55c0..ab07ae770fd 100644 --- a/homeassistant/components/touchline_sl/manifest.json +++ b/homeassistant/components/touchline_sl/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/touchline_sl", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["pytouchlinesl==0.2.0"] + "requirements": ["pytouchlinesl==0.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a5093206446..b6c50fb83ac 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2435,7 +2435,7 @@ pytomorrowio==0.3.6 pytouchline==0.7 # homeassistant.components.touchline_sl -pytouchlinesl==0.2.0 +pytouchlinesl==0.3.0 # homeassistant.components.traccar # homeassistant.components.traccar_server diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 87042c04ce5..7b25ab3734a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1950,7 +1950,7 @@ pytile==2023.12.0 pytomorrowio==0.3.6 # homeassistant.components.touchline_sl -pytouchlinesl==0.2.0 +pytouchlinesl==0.3.0 # homeassistant.components.traccar # homeassistant.components.traccar_server From f59cf8fa54eab0c0e9049039c72ec4d829b2d055 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Tue, 3 Dec 2024 13:36:41 +0100 Subject: [PATCH 0186/1198] Set PARALLEL_UPDATES for all BMW platforms (#132088) --- homeassistant/components/bmw_connected_drive/binary_sensor.py | 2 ++ homeassistant/components/bmw_connected_drive/device_tracker.py | 2 ++ homeassistant/components/bmw_connected_drive/diagnostics.py | 2 ++ homeassistant/components/bmw_connected_drive/sensor.py | 2 ++ 4 files changed, 8 insertions(+) diff --git a/homeassistant/components/bmw_connected_drive/binary_sensor.py b/homeassistant/components/bmw_connected_drive/binary_sensor.py index 65bdfca997b..285ac98fc8f 100644 --- a/homeassistant/components/bmw_connected_drive/binary_sensor.py +++ b/homeassistant/components/bmw_connected_drive/binary_sensor.py @@ -26,6 +26,8 @@ from .const import UNIT_MAP from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/device_tracker.py b/homeassistant/components/bmw_connected_drive/device_tracker.py index 977fd531e2c..b65c2c1b088 100644 --- a/homeassistant/components/bmw_connected_drive/device_tracker.py +++ b/homeassistant/components/bmw_connected_drive/device_tracker.py @@ -16,6 +16,8 @@ from .const import ATTR_DIRECTION from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bmw_connected_drive/diagnostics.py b/homeassistant/components/bmw_connected_drive/diagnostics.py index ff3c6f29559..3950ea3dec2 100644 --- a/homeassistant/components/bmw_connected_drive/diagnostics.py +++ b/homeassistant/components/bmw_connected_drive/diagnostics.py @@ -16,6 +16,8 @@ from homeassistant.helpers.device_registry import DeviceEntry from . import BMWConfigEntry from .const import CONF_REFRESH_TOKEN +PARALLEL_UPDATES = 1 + if TYPE_CHECKING: from bimmer_connected.vehicle import MyBMWVehicle diff --git a/homeassistant/components/bmw_connected_drive/sensor.py b/homeassistant/components/bmw_connected_drive/sensor.py index e24e2dd75f6..555655511e8 100644 --- a/homeassistant/components/bmw_connected_drive/sensor.py +++ b/homeassistant/components/bmw_connected_drive/sensor.py @@ -34,6 +34,8 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) From f6beefced366af62dfe6a8c0a6ed1baabe700433 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Tue, 3 Dec 2024 13:50:50 +0100 Subject: [PATCH 0187/1198] Improve Reolink config flow tests (#131693) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/reolink/strings.json | 3 +- tests/components/reolink/test_config_flow.py | 271 ++++++++++++++++-- 2 files changed, 249 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 726a9fab2f3..ac73581ce22 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -33,7 +33,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The mac address of the device does not match the previous mac address" } }, "options": { diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index b358670ac6b..59342934c1c 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -85,6 +85,7 @@ async def test_config_flow_manual_success( assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, } + assert result["result"].unique_id == TEST_MAC async def test_config_flow_errors( @@ -329,6 +330,55 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 +async def test_reauth_abort_unique_id_mismatch( + hass: HomeAssistant, mock_setup_entry: MagicMock, reolink_connect: MagicMock +) -> None: + """Test a reauth flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME2, + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert config_entry.data[CONF_HOST] == TEST_HOST + assert config_entry.data[CONF_USERNAME] == TEST_USERNAME + assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + reolink_connect.mac_address = TEST_MAC + + async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Successful flow from DHCP discovery.""" dhcp_data = dhcp.DhcpServiceInfo( @@ -367,37 +417,97 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No } +async def test_dhcp_ip_update_aborted_if_wrong_mac( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect_class: MagicMock, + reolink_connect: MagicMock, +) -> None: + """Test dhcp discovery does not update the IP if the mac address does not match.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # ensure the last_update_succes is False for the device_coordinator. + reolink_connect.get_states.side_effect = ReolinkError("Test error") + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + dhcp_data = dhcp.DhcpServiceInfo( + ip=TEST_HOST2, + hostname="Reolink", + macaddress=DHCP_FORMATTED_MAC, + ) + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + ) + + for host in (TEST_HOST, TEST_HOST2): + expected_call = call( + host, + TEST_USERNAME, + TEST_PASSWORD, + port=TEST_PORT, + use_https=TEST_USE_HTTPS, + protocol=DEFAULT_PROTOCOL, + timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=ANY, + ) + assert expected_call in reolink_connect_class.call_args_list + + for exc_call in reolink_connect_class.call_args_list: + assert exc_call[0][0] in [TEST_HOST, TEST_HOST2] + get_session = exc_call[1]["aiohttp_get_session_callback"] + assert isinstance(get_session(), ClientSession) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + # Check that IP was not updated + assert config_entry.data[CONF_HOST] == TEST_HOST + + reolink_connect.get_states.side_effect = None + reolink_connect_class.reset_mock() + reolink_connect.mac_address = TEST_MAC + + @pytest.mark.parametrize( - ("last_update_success", "attr", "value", "expected", "host_call_list"), + ("attr", "value", "expected", "host_call_list"), [ ( - False, None, None, TEST_HOST2, [TEST_HOST, TEST_HOST2], ), ( - True, - None, - None, - TEST_HOST, - [TEST_HOST], - ), - ( - False, "get_state", AsyncMock(side_effect=ReolinkError("Test error")), TEST_HOST, [TEST_HOST, TEST_HOST2], ), - ( - False, - "mac_address", - "aa:aa:aa:aa:aa:aa", - TEST_HOST, - [TEST_HOST, TEST_HOST2], - ), ], ) async def test_dhcp_ip_update( @@ -405,7 +515,6 @@ async def test_dhcp_ip_update( freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, - last_update_success: bool, attr: str, value: Any, expected: str, @@ -433,12 +542,11 @@ async def test_dhcp_ip_update( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - if not last_update_success: - # ensure the last_update_succes is False for the device_coordinator. - reolink_connect.get_states.side_effect = ReolinkError("Test error") - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() + # ensure the last_update_succes is False for the device_coordinator. + reolink_connect.get_states.side_effect = ReolinkError("Test error") + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( ip=TEST_HOST2, @@ -484,6 +592,71 @@ async def test_dhcp_ip_update( setattr(reolink_connect, attr, original) +async def test_dhcp_ip_update_ingnored_if_still_connected( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect_class: MagicMock, + reolink_connect: MagicMock, +) -> None: + """Test dhcp discovery is ignored when the camera is still properly connected to HA.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + dhcp_data = dhcp.DhcpServiceInfo( + ip=TEST_HOST2, + hostname="Reolink", + macaddress=DHCP_FORMATTED_MAC, + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + ) + + expected_call = call( + TEST_HOST, + TEST_USERNAME, + TEST_PASSWORD, + port=TEST_PORT, + use_https=TEST_USE_HTTPS, + protocol=DEFAULT_PROTOCOL, + timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=ANY, + ) + assert expected_call in reolink_connect_class.call_args_list + + for exc_call in reolink_connect_class.call_args_list: + assert exc_call[0][0] == TEST_HOST + get_session = exc_call[1]["aiohttp_get_session_callback"] + assert isinstance(get_session(), ClientSession) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + assert config_entry.data[CONF_HOST] == TEST_HOST + + reolink_connect.get_states.side_effect = None + reolink_connect_class.reset_mock() + + async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test a reconfiguration flow.""" config_entry = MockConfigEntry( @@ -526,3 +699,53 @@ async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> Non assert config_entry.data[CONF_HOST] == TEST_HOST2 assert config_entry.data[CONF_USERNAME] == TEST_USERNAME assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + +async def test_reconfig_abort_unique_id_mismatch( + hass: HomeAssistant, mock_setup_entry: MagicMock, reolink_connect: MagicMock +) -> None: + """Test a reconfiguration flow aborts if the unique id does not match.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: TEST_HOST2, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert config_entry.data[CONF_HOST] == TEST_HOST + assert config_entry.data[CONF_USERNAME] == TEST_USERNAME + assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + reolink_connect.mac_address = TEST_MAC From ff77ecd2ce2632355c41f69442b6b2e3766e9d77 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 3 Dec 2024 14:08:31 +0100 Subject: [PATCH 0188/1198] Update frontend to 20241127.2 (#132109) Co-authored-by: Franck Nijhof --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 7bd500f17ea..f59ca05ba55 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.1"] + "requirements": ["home-assistant-frontend==20241127.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d85fa4293a3..25aaed1a14a 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 hassil==2.0.4 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.1 +home-assistant-frontend==20241127.2 home-assistant-intents==2024.11.27 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index b6c50fb83ac..02df54ee33d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.1 +home-assistant-frontend==20241127.2 # homeassistant.components.conversation home-assistant-intents==2024.11.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b25ab3734a..0f814a25c95 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.1 +home-assistant-frontend==20241127.2 # homeassistant.components.conversation home-assistant-intents==2024.11.27 From af5574f71c51150f6011e20162a230e35c1c7fda Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 3 Dec 2024 07:08:55 -0600 Subject: [PATCH 0189/1198] Bump voip-utils (#132110) Co-authored-by: Paulus Schoutsen --- homeassistant/components/voip/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/voip/conftest.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 964193fca53..7dd2e797058 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["voip-utils==0.1.0"] + "requirements": ["voip-utils==0.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 02df54ee33d..8d6919cc6fb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2947,7 +2947,7 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.1.0 +voip-utils==0.2.1 # homeassistant.components.volkszaehler volkszaehler==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f814a25c95..c86ef76bfd7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2357,7 +2357,7 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.1.0 +voip-utils==0.2.1 # homeassistant.components.volvooncall volvooncall==0.10.3 diff --git a/tests/components/voip/conftest.py b/tests/components/voip/conftest.py index cbca8997797..99707297230 100644 --- a/tests/components/voip/conftest.py +++ b/tests/components/voip/conftest.py @@ -6,6 +6,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest from voip_utils import CallInfo +from voip_utils.sip import get_sip_endpoint from homeassistant.components.voip import DOMAIN from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices @@ -55,8 +56,7 @@ async def voip_devices(hass: HomeAssistant, setup_voip: None) -> VoIPDevices: def call_info() -> CallInfo: """Fake call info.""" return CallInfo( - caller_ip="192.168.1.210", - caller_sip_port=5060, + caller_endpoint=get_sip_endpoint("192.168.1.210", 5060), caller_rtp_port=5004, server_ip="192.168.1.10", headers={ From 76ba3afeae07a11da26d74d453f8bdfd0078b659 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 14:33:40 +0100 Subject: [PATCH 0190/1198] Cleanup dead code in renault (#132172) --- homeassistant/components/renault/coordinator.py | 2 +- homeassistant/components/renault/entity.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/renault/coordinator.py b/homeassistant/components/renault/coordinator.py index 988349e76f4..89e62867130 100644 --- a/homeassistant/components/renault/coordinator.py +++ b/homeassistant/components/renault/coordinator.py @@ -18,7 +18,7 @@ from renault_api.kamereon.models import KamereonVehicleDataAttributes from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -T = TypeVar("T", bound=KamereonVehicleDataAttributes | None) +T = TypeVar("T", bound=KamereonVehicleDataAttributes) # We have potentially 7 coordinators per vehicle _PARALLEL_SEMAPHORE = asyncio.Semaphore(1) diff --git a/homeassistant/components/renault/entity.py b/homeassistant/components/renault/entity.py index c7f83c1e6f2..7beb91e9603 100644 --- a/homeassistant/components/renault/entity.py +++ b/homeassistant/components/renault/entity.py @@ -59,6 +59,4 @@ class RenaultDataEntity( def _get_data_attr(self, key: str) -> StateType: """Return the attribute value from the coordinator data.""" - if self.coordinator.data is None: - return None return cast(StateType, getattr(self.coordinator.data, key)) From 7e079303429335200da325c7830cc8a2232d323e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:01:35 +0100 Subject: [PATCH 0191/1198] Pin rpds-py to 0.21.0 to fix CI (#132170) * Pin rpds-py==0.21.0 to fix CI * Add carriage return --- homeassistant/package_constraints.txt | 5 +++++ script/gen_requirements_all.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 25aaed1a14a..cec61d18fbc 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -205,3 +205,8 @@ async-timeout==4.0.3 # https://github.com/home-assistant/core/issues/122508 # https://github.com/home-assistant/core/issues/118004 aiofiles>=24.1.0 + +# 0.22.0 causes CI failures on Python 3.13 +# python3 -X dev -m pytest tests/components/matrix +# python3 -X dev -m pytest tests/components/zha +rpds-py==0.21.0 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 97ffcac79a4..450469096ea 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -238,6 +238,11 @@ async-timeout==4.0.3 # https://github.com/home-assistant/core/issues/122508 # https://github.com/home-assistant/core/issues/118004 aiofiles>=24.1.0 + +# 0.22.0 causes CI failures on Python 3.13 +# python3 -X dev -m pytest tests/components/matrix +# python3 -X dev -m pytest tests/components/zha +rpds-py==0.21.0 """ GENERATED_MESSAGE = ( From 9e723752f887b0e24b753aaaa92ab7ecc9b23f52 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Tue, 3 Dec 2024 15:08:36 +0100 Subject: [PATCH 0192/1198] Bump nettigo-air-monitor to version 4.0.0 (#132106) --- homeassistant/components/nam/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index d837aa69b9d..c3a559de50b 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -7,7 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], - "requirements": ["nettigo-air-monitor==3.3.0"], + "requirements": ["nettigo-air-monitor==4.0.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 8d6919cc6fb..70d5dc6c555 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1439,7 +1439,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==4.0.0 # homeassistant.components.neurio_energy neurio==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c86ef76bfd7..2e3544dc7c8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1202,7 +1202,7 @@ nessclient==1.1.2 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==4.0.0 # homeassistant.components.nexia nexia==2.0.8 From e3885b8117e3713feb0b37c1432f4a28119922f2 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:10:58 +0100 Subject: [PATCH 0193/1198] Instantiate new httpx client for lamarzocco (#132016) --- homeassistant/components/lamarzocco/__init__.py | 8 ++++---- homeassistant/components/lamarzocco/config_flow.py | 9 +++++++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index 09187848a0f..a69b97242f3 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -22,7 +22,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from .const import CONF_USE_BLUETOOTH, DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator @@ -46,11 +46,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - assert entry.unique_id serial = entry.unique_id - + client = create_async_httpx_client(hass) cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], - client=get_async_client(hass), + client=client, ) # initialize local API @@ -60,7 +60,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - local_client = LaMarzoccoLocalClient( host=host, local_bearer=entry.data[CONF_TOKEN], - client=get_async_client(hass), + client=client, ) # initialize Bluetooth diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index e4ee0682ae7..05dfcbc5196 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -6,6 +6,7 @@ from collections.abc import Mapping import logging from typing import Any +from httpx import AsyncClient from pylamarzocco.client_cloud import LaMarzoccoCloudClient from pylamarzocco.client_local import LaMarzoccoLocalClient from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful @@ -36,7 +37,7 @@ from homeassistant.const import ( ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, @@ -57,6 +58,8 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 + _client: AsyncClient + def __init__(self) -> None: """Initialize the config flow.""" self._config: dict[str, Any] = {} @@ -79,10 +82,12 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): **user_input, **self._discovered, } + self._client = create_async_httpx_client(self.hass) cloud_client = LaMarzoccoCloudClient( username=data[CONF_USERNAME], password=data[CONF_PASSWORD], + client=self._client, ) try: self._fleet = await cloud_client.get_customer_fleet() @@ -163,7 +168,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): # validate local connection if host is provided if user_input.get(CONF_HOST): if not await LaMarzoccoLocalClient.validate_connection( - client=get_async_client(self.hass), + client=self._client, host=user_input[CONF_HOST], token=selected_device.communication_key, ): From 6a094746231d20b86978a57691a9bd732ae59e0c Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:11:15 +0100 Subject: [PATCH 0194/1198] Catch InverterReturnedError in APSystems (#131930) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- .../components/apsystems/coordinator.py | 18 ++++++++++--- .../components/apsystems/strings.json | 5 ++++ tests/components/apsystems/test_init.py | 25 +++++++++++++++++++ 3 files changed, 44 insertions(+), 4 deletions(-) create mode 100644 tests/components/apsystems/test_init.py diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index b6e951343f7..e56cb826840 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -5,12 +5,17 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData +from APsystemsEZ1 import ( + APsystemsEZ1M, + InverterReturnedError, + ReturnAlarmInfo, + ReturnOutputData, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER +from .const import DOMAIN, LOGGER @dataclass @@ -43,6 +48,11 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): self.api.min_power = device_info.minPower async def _async_update_data(self) -> ApSystemsSensorData: - output_data = await self.api.get_output_data() - alarm_info = await self.api.get_alarm_info() + try: + output_data = await self.api.get_output_data() + alarm_info = await self.api.get_alarm_info() + except InverterReturnedError: + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="inverter_error" + ) from None return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info) diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index e02f86c2730..b3a10ca49a7 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -72,5 +72,10 @@ "name": "Inverter status" } } + }, + "exceptions": { + "inverter_error": { + "message": "Inverter returned an error" + } } } diff --git a/tests/components/apsystems/test_init.py b/tests/components/apsystems/test_init.py new file mode 100644 index 00000000000..c85c4094e30 --- /dev/null +++ b/tests/components/apsystems/test_init.py @@ -0,0 +1,25 @@ +"""Test the APSystem setup.""" + +from unittest.mock import AsyncMock + +from APsystemsEZ1 import InverterReturnedError + +from homeassistant.components.apsystems.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_update_failed( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test update failed.""" + mock_apsystems.get_output_data.side_effect = InverterReturnedError + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.state is ConfigEntryState.SETUP_RETRY From 7c9b8552cb932bf53235b9a6a5e40803885a78b8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:21:41 +0100 Subject: [PATCH 0195/1198] Reapply "bump hassil and intents" (#132138) (#132151) This reverts commit 39b2cf6ed27d32f70c1b259ed406f6e9b654709c. --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 4 ++-- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- script/hassfest/docker/Dockerfile | 2 +- .../conversation/snapshots/test_default_agent.ambr | 6 +++--- tests/components/conversation/snapshots/test_http.ambr | 4 ++-- tests/components/conversation/test_default_agent.py | 2 +- tests/testing_config/custom_sentences/en/beer.yaml | 4 ++-- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 26265a37cce..2d2f2f58a3a 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.4", "home-assistant-intents==2024.11.27"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cec61d18fbc..cdb4ed38a41 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,10 +32,10 @@ go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 hass-nabucasa==0.85.0 -hassil==2.0.4 +hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.2 -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 70d5dc6c555..725577a728a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hass-nabucasa==0.85.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -1130,7 +1130,7 @@ holidays==0.61 home-assistant-frontend==20241127.2 # homeassistant.components.conversation -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2e3544dc7c8..cf4623f61c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -928,7 +928,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 # homeassistant.components.conversation -hassil==2.0.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar hdate==0.11.1 @@ -956,7 +956,7 @@ holidays==0.61 home-assistant-frontend==20241127.2 # homeassistant.components.conversation -home-assistant-intents==2024.11.27 +home-assistant-intents==2024.12.2 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index b6fbbdd1172..38b8ba5e8d0 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.4 home-assistant-intents==2024.11.27 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.2 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index b1f2ea0db75..f1e220b10b2 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -308,7 +308,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', + 'speech': 'Sorry, I am not aware of any area called late added', }), }), }), @@ -378,7 +378,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, I am not aware of any area called kitchen', }), }), }), @@ -428,7 +428,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', + 'speech': 'Sorry, I am not aware of any area called renamed', }), }), }), diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 966abd63d78..a3edd4fa51c 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -535,7 +535,7 @@ 'name': 'HassTurnOn', }), 'match': True, - 'sentence_template': ' on [all] in ', + 'sentence_template': ' on [] ', 'slots': dict({ 'area': 'kitchen', 'domain': 'light', @@ -606,7 +606,7 @@ 'name': 'OrderBeer', }), 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", + 'sentence_template': "[I'd like to ]order a {beer_style} [please]", 'slots': dict({ 'beer_style': 'lager', }), diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 20fa41944f2..dab1e61ab81 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -2930,7 +2930,7 @@ async def test_intent_cache_fuzzy(hass: HomeAssistant) -> None: ) result = await agent.async_recognize_intent(user_input) assert result is not None - assert result.unmatched_entities["name"].text == "test light" + assert result.unmatched_entities["area"].text == "test " # Mark this result so we know it is from cache next time mark = "_from_cache" diff --git a/tests/testing_config/custom_sentences/en/beer.yaml b/tests/testing_config/custom_sentences/en/beer.yaml index f318e0221b2..7222ffcb0ca 100644 --- a/tests/testing_config/custom_sentences/en/beer.yaml +++ b/tests/testing_config/custom_sentences/en/beer.yaml @@ -3,11 +3,11 @@ intents: OrderBeer: data: - sentences: - - "I'd like to order a {beer_style} [please]" + - "[I'd like to ]order a {beer_style} [please]" OrderFood: data: - sentences: - - "I'd like to order {food_name:name} [please]" + - "[I'd like to ]order {food_name:name} [please]" lists: beer_style: values: From 6fc4f45def84ea19b18b1dbcaa4b2eff4638f623 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:24:05 +0100 Subject: [PATCH 0196/1198] Dump pip freeze in CI (#132173) * Dump pip freeze in CI * adjust * adjust * adjust * Include python version --- .github/workflows/ci.yaml | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a0ac973e960..aa393e59063 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -485,7 +485,6 @@ jobs: uses: actions/cache@v4.1.2 with: path: venv - lookup-only: true key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} @@ -531,6 +530,23 @@ jobs: python -m script.gen_requirements_all ci uv pip install -r requirements_all_pytest.txt -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat + - name: Dump pip freeze + run: | + python -m venv venv + . venv/bin/activate + python --version + uv pip freeze >> pip_freeze.txt + - name: Upload pip_freeze artifact + uses: actions/upload-artifact@v4.4.3 + with: + name: pip-freeze-${{ matrix.python-version }} + path: pip_freeze.txt + overwrite: true + - name: Remove pip_freeze + run: rm pip_freeze.txt + - name: Check dirty + run: | + ./script/check_dirty hassfest: name: Check hassfest From 7ae80b542a9cc1899e229b6e5b486b1f00f83f2b Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 3 Dec 2024 15:48:56 +0100 Subject: [PATCH 0197/1198] Use typed config entry in SABnzbd coordinator (#132098) --- homeassistant/components/sabnzbd/__init__.py | 6 ++---- homeassistant/components/sabnzbd/binary_sensor.py | 2 +- homeassistant/components/sabnzbd/button.py | 3 +-- homeassistant/components/sabnzbd/coordinator.py | 6 ++++-- homeassistant/components/sabnzbd/number.py | 3 +-- homeassistant/components/sabnzbd/sensor.py | 2 +- 6 files changed, 10 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index cf2eb5d0a7d..e6a99c858c3 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError @@ -24,7 +24,7 @@ from .const import ( SERVICE_RESUME, SERVICE_SET_SPEED, ) -from .coordinator import SabnzbdUpdateCoordinator +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator from .helpers import get_client PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR] @@ -48,8 +48,6 @@ SERVICE_SPEED_SCHEMA = SERVICE_BASE_SCHEMA.extend( } ) -type SabnzbdConfigEntry = ConfigEntry[SabnzbdUpdateCoordinator] - @callback def async_get_entry_for_service_call( diff --git a/homeassistant/components/sabnzbd/binary_sensor.py b/homeassistant/components/sabnzbd/binary_sensor.py index 8b1b1c37c89..1d65bf01211 100644 --- a/homeassistant/components/sabnzbd/binary_sensor.py +++ b/homeassistant/components/sabnzbd/binary_sensor.py @@ -15,7 +15,7 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SabnzbdConfigEntry +from .coordinator import SabnzbdConfigEntry from .entity import SabnzbdEntity diff --git a/homeassistant/components/sabnzbd/button.py b/homeassistant/components/sabnzbd/button.py index 79038e84775..1ff26b41655 100644 --- a/homeassistant/components/sabnzbd/button.py +++ b/homeassistant/components/sabnzbd/button.py @@ -11,9 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SabnzbdConfigEntry from .const import DOMAIN -from .coordinator import SabnzbdUpdateCoordinator +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator from .entity import SabnzbdEntity diff --git a/homeassistant/components/sabnzbd/coordinator.py b/homeassistant/components/sabnzbd/coordinator.py index 14f7c18e38c..dac8d8a8e95 100644 --- a/homeassistant/components/sabnzbd/coordinator.py +++ b/homeassistant/components/sabnzbd/coordinator.py @@ -12,16 +12,18 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda _LOGGER = logging.getLogger(__name__) +type SabnzbdConfigEntry = ConfigEntry[SabnzbdUpdateCoordinator] + class SabnzbdUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The SABnzbd update coordinator.""" - config_entry: ConfigEntry + config_entry: SabnzbdConfigEntry def __init__( self, hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SabnzbdConfigEntry, sab_api: SabnzbdApi, ) -> None: """Initialize the SABnzbd update coordinator.""" diff --git a/homeassistant/components/sabnzbd/number.py b/homeassistant/components/sabnzbd/number.py index d8536cb6b37..53c8d462f11 100644 --- a/homeassistant/components/sabnzbd/number.py +++ b/homeassistant/components/sabnzbd/number.py @@ -17,9 +17,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SabnzbdConfigEntry from .const import DOMAIN -from .coordinator import SabnzbdUpdateCoordinator +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator from .entity import SabnzbdEntity diff --git a/homeassistant/components/sabnzbd/sensor.py b/homeassistant/components/sabnzbd/sensor.py index 115b9de3793..662ae739d15 100644 --- a/homeassistant/components/sabnzbd/sensor.py +++ b/homeassistant/components/sabnzbd/sensor.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import SabnzbdConfigEntry +from .coordinator import SabnzbdConfigEntry from .entity import SabnzbdEntity From 33db95f6be9d4f08cbea823e3a52efecb27e14a3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 3 Dec 2024 09:03:43 -0600 Subject: [PATCH 0198/1198] Bump PyJWT to 2.10.1 (#132100) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cdb4ed38a41..16047fe8648 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -47,7 +47,7 @@ paho-mqtt==1.6.1 Pillow==11.0.0 propcache==0.2.1 psutil-home-assistant==0.0.1 -PyJWT==2.10.0 +PyJWT==2.10.1 pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.3.0 diff --git a/pyproject.toml b/pyproject.toml index 9aa53920318..1cd7cb878d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ dependencies = [ "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.10.0", + "PyJWT==2.10.1", # PyJWT has loose dependency. We want the latest one. "cryptography==44.0.0", "Pillow==11.0.0", diff --git a/requirements.txt b/requirements.txt index d0e2be91a99..e4aa6dc121a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,7 +25,7 @@ home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.10.0 +PyJWT==2.10.1 cryptography==44.0.0 Pillow==11.0.0 propcache==0.2.1 From 92f38ef1a1d4efbdcbb68199968cfdd245fc0da8 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 3 Dec 2024 16:13:15 +0100 Subject: [PATCH 0199/1198] Update frontend to 20241127.3 (#132176) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index f59ca05ba55..264f0756b82 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.2"] + "requirements": ["home-assistant-frontend==20241127.3"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 16047fe8648..503937a44cb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.2 +home-assistant-frontend==20241127.3 home-assistant-intents==2024.12.2 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 725577a728a..60b1c7aef3d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.2 +home-assistant-frontend==20241127.3 # homeassistant.components.conversation home-assistant-intents==2024.12.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cf4623f61c1..4494cf8d790 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.61 # homeassistant.components.frontend -home-assistant-frontend==20241127.2 +home-assistant-frontend==20241127.3 # homeassistant.components.conversation home-assistant-intents==2024.12.2 From 56fc8a1f922354c18553a5b3029ccf4816821a30 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Dec 2024 16:20:48 +0100 Subject: [PATCH 0200/1198] Pass config entry directly to update coordinator in Sensibo (#132114) --- homeassistant/components/sensibo/__init__.py | 2 +- homeassistant/components/sensibo/coordinator.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/sensibo/__init__.py b/homeassistant/components/sensibo/__init__.py index b2b6ac15958..15ef3def1f5 100644 --- a/homeassistant/components/sensibo/__init__.py +++ b/homeassistant/components/sensibo/__init__.py @@ -21,7 +21,7 @@ type SensiboConfigEntry = ConfigEntry[SensiboDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SensiboConfigEntry) -> bool: """Set up Sensibo from a config entry.""" - coordinator = SensiboDataUpdateCoordinator(hass) + coordinator = SensiboDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/sensibo/coordinator.py b/homeassistant/components/sensibo/coordinator.py index d654a7cb072..cfd40195de3 100644 --- a/homeassistant/components/sensibo/coordinator.py +++ b/homeassistant/components/sensibo/coordinator.py @@ -29,11 +29,12 @@ class SensiboDataUpdateCoordinator(DataUpdateCoordinator[SensiboData]): config_entry: SensiboConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, config_entry: SensiboConfigEntry) -> None: """Initialize the Sensibo coordinator.""" super().__init__( hass, LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), # We don't want an immediate refresh since the device From 13e4c51ce5ebbfa35010c0d1beb825de98d8e4e6 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Tue, 3 Dec 2024 17:03:43 +0100 Subject: [PATCH 0201/1198] Bump uiprotect to 6.6.5 (#132147) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 9730c1e3741..e8a8c062800 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.6.4", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.6.5", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 60b1c7aef3d..06e184246b2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2897,7 +2897,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.4 +uiprotect==6.6.5 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4494cf8d790..52dcb44e47d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2313,7 +2313,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.4 +uiprotect==6.6.5 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From 208b14dd2bd96e83cd7090a37459cbe0d09bd04f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Tue, 3 Dec 2024 16:08:09 +0000 Subject: [PATCH 0202/1198] Use translations on NumberEntity unit_of_measurement property (#132095) Co-authored-by: Martin Hjelmare --- homeassistant/components/number/__init__.py | 12 ++++ homeassistant/components/sensor/__init__.py | 16 ----- homeassistant/helpers/entity.py | 16 +++++ tests/components/number/test_init.py | 65 ++++++++++++++++++++- 4 files changed, 92 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/number/__init__.py b/homeassistant/components/number/__init__.py index dc169fcb348..9f4aef08aa9 100644 --- a/homeassistant/components/number/__init__.py +++ b/homeassistant/components/number/__init__.py @@ -384,6 +384,18 @@ class NumberEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): return self.hass.config.units.temperature_unit + if (translation_key := self._unit_of_measurement_translation_key) and ( + unit_of_measurement + := self.platform.default_language_platform_translations.get(translation_key) + ): + if native_unit_of_measurement is not None: + raise ValueError( + f"Number entity {type(self)} from integration '{self.platform.platform_name}' " + f"has a translation key for unit_of_measurement '{unit_of_measurement}', " + f"but also has a native_unit_of_measurement '{native_unit_of_measurement}'" + ) + return unit_of_measurement + return native_unit_of_measurement @cached_property diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 064914a5dfa..2933d779b4b 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -467,22 +467,6 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return self.entity_description.suggested_unit_of_measurement return None - @cached_property - def _unit_of_measurement_translation_key(self) -> str | None: - """Return translation key for unit of measurement.""" - if self.translation_key is None: - return None - if self.platform is None: - raise ValueError( - f"Sensor {type(self)} cannot have a translation key for " - "unit of measurement before being added to the entity platform" - ) - platform = self.platform - return ( - f"component.{platform.platform_name}.entity.{platform.domain}" - f".{self.translation_key}.unit_of_measurement" - ) - @final @property @override diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 1f77dd3f95c..19076c4edc0 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -647,6 +647,22 @@ class Entity( f".{self.translation_key}.name" ) + @cached_property + def _unit_of_measurement_translation_key(self) -> str | None: + """Return translation key for unit of measurement.""" + if self.translation_key is None: + return None + if self.platform is None: + raise ValueError( + f"Entity {type(self)} cannot have a translation key for " + "unit of measurement before being added to the entity platform" + ) + platform = self.platform + return ( + f"component.{platform.platform_name}.entity.{platform.domain}" + f".{self.translation_key}.unit_of_measurement" + ) + def _substitute_name_placeholders(self, name: str) -> str: """Substitute placeholders in entity name.""" try: diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 721b531e8cd..31d99dc55d7 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -2,7 +2,7 @@ from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest @@ -836,6 +836,69 @@ async def test_custom_unit_change( assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == default_unit +async def test_translated_unit( + hass: HomeAssistant, +) -> None: + """Test translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + + entity_id = entity0.entity_id + state = hass.states.get(entity_id) + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "Tests" + + +async def test_translated_unit_with_native_unit_raises( + hass: HomeAssistant, +) -> None: + """Test that translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + native_unit_of_measurement="bad_unit", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + # Setup fails so entity_id is None + assert entity0.entity_id is None + + def test_device_classes_aligned() -> None: """Make sure all sensor device classes are also available in NumberDeviceClass.""" From 6c98cd49ea59150f368db6df57513156359b038b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 18:03:13 +0100 Subject: [PATCH 0203/1198] Fix check dirty in Prepare dependencies CI (#132180) --- .github/workflows/ci.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index aa393e59063..34c2fa838a6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -544,6 +544,9 @@ jobs: overwrite: true - name: Remove pip_freeze run: rm pip_freeze.txt + - name: Remove generated requirements_all + if: steps.cache-venv.outputs.cache-hit != 'true' + run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt - name: Check dirty run: | ./script/check_dirty From e401fee3da81324695cf68b64fb74e73bf2241c1 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Tue, 3 Dec 2024 09:43:49 -0800 Subject: [PATCH 0204/1198] Add initial quality scale for TotalConnect (#132012) --- .../totalconnect/quality_scale.yaml | 62 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/totalconnect/quality_scale.yaml diff --git a/homeassistant/components/totalconnect/quality_scale.yaml b/homeassistant/components/totalconnect/quality_scale.yaml new file mode 100644 index 00000000000..e52011d7d48 --- /dev/null +++ b/homeassistant/components/totalconnect/quality_scale.yaml @@ -0,0 +1,62 @@ +rules: + # Bronze + config-flow: todo + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: todo + runtime-data: todo + test-before-setup: todo + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: todo + dependency-transparency: todo + action-setup: todo + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: todo + action-exceptions: todo + reauthentication-flow: done + parallel-updates: todo + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: todo + stale-devices: todo + diagnostics: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: todo + docs-use-cases: done + + # stopped here.... + docs-supported-devices: todo + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 4f3c7ea7cbc..95b35f63e50 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1073,7 +1073,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "tomorrowio", "toon", "torque", - "totalconnect", "touchline", "touchline_sl", "tplink", From 74b713fa97623b2a03fbae3a2a42eb2b15e8b9af Mon Sep 17 00:00:00 2001 From: lunmay <28674102+lunmay@users.noreply.github.com> Date: Tue, 3 Dec 2024 19:31:28 +0100 Subject: [PATCH 0205/1198] Fix typo in exception message in google_photos integration (#132194) --- homeassistant/components/google_photos/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index bd565a6122d..fa3f4669dac 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -48,7 +48,7 @@ "message": "`{filename}` is not an image" }, "missing_upload_permission": { - "message": "Home Assistnt was not granted permission to upload to Google Photos" + "message": "Home Assistant was not granted permission to upload to Google Photos" }, "upload_error": { "message": "Failed to upload content: {message}" From ab83ec61e0f28dec725547a5af198be3e64d1cc6 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 3 Dec 2024 12:37:05 -0600 Subject: [PATCH 0206/1198] Ensure entity names are not hassil templates (#132184) --- .../components/conversation/default_agent.py | 2 +- .../conversation/test_default_agent.py | 36 +++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 59c09232b93..624fa3c3555 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -711,7 +711,7 @@ class DefaultAgent(ConversationEntity): for name_tuple in self._get_entity_name_tuples(exposed=False): self._unexposed_names_trie.insert( name_tuple[0].lower(), - TextSlotValue.from_tuple(name_tuple), + TextSlotValue.from_tuple(name_tuple, allow_template=False), ) # Build filtered slot list diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index dab1e61ab81..58d2b0d48bf 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -3013,3 +3013,39 @@ async def test_entities_filtered_by_input(hass: HomeAssistant) -> None: assert len(name_list.values) == 2 assert name_list.values[0].text_in.text == "test light" assert name_list.values[1].text_in.text == "test light" + + +@pytest.mark.usefixtures("init_components") +async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: + """Test that entities names are not treated as hassil templates.""" + # Contains hassil template characters + hass.states.async_set( + "light.test_light", "off", attributes={ATTR_FRIENDLY_NAME: " Date: Tue, 3 Dec 2024 21:18:54 +0100 Subject: [PATCH 0207/1198] Refactor roomba to set vacuums in vacuum file (#132102) --- homeassistant/components/roomba/braava.py | 128 ------- homeassistant/components/roomba/entity.py | 187 +---------- homeassistant/components/roomba/roomba.py | 89 ----- homeassistant/components/roomba/vacuum.py | 388 +++++++++++++++++++++- 4 files changed, 387 insertions(+), 405 deletions(-) delete mode 100644 homeassistant/components/roomba/braava.py delete mode 100644 homeassistant/components/roomba/roomba.py diff --git a/homeassistant/components/roomba/braava.py b/homeassistant/components/roomba/braava.py deleted file mode 100644 index 8744561b2c5..00000000000 --- a/homeassistant/components/roomba/braava.py +++ /dev/null @@ -1,128 +0,0 @@ -"""Class for Braava devices.""" - -import logging - -from homeassistant.components.vacuum import VacuumEntityFeature - -from .entity import SUPPORT_IROBOT, IRobotVacuum - -_LOGGER = logging.getLogger(__name__) - -ATTR_DETECTED_PAD = "detected_pad" -ATTR_LID_CLOSED = "lid_closed" -ATTR_TANK_PRESENT = "tank_present" -ATTR_TANK_LEVEL = "tank_level" -ATTR_PAD_WETNESS = "spray_amount" - -OVERLAP_STANDARD = 67 -OVERLAP_DEEP = 85 -OVERLAP_EXTENDED = 25 -MOP_STANDARD = "Standard" -MOP_DEEP = "Deep" -MOP_EXTENDED = "Extended" -BRAAVA_MOP_BEHAVIORS = [MOP_STANDARD, MOP_DEEP, MOP_EXTENDED] -BRAAVA_SPRAY_AMOUNT = [1, 2, 3] - -# Braava Jets can set mopping behavior through fanspeed -SUPPORT_BRAAVA = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED - - -class BraavaJet(IRobotVacuum): # pylint: disable=hass-enforce-class-module - """Braava Jet.""" - - _attr_supported_features = SUPPORT_BRAAVA - - def __init__(self, roomba, blid): - """Initialize the Roomba handler.""" - super().__init__(roomba, blid) - - # Initialize fan speed list - self._attr_fan_speed_list = [ - f"{behavior}-{spray}" - for behavior in BRAAVA_MOP_BEHAVIORS - for spray in BRAAVA_SPRAY_AMOUNT - ] - - @property - def fan_speed(self): - """Return the fan speed of the vacuum cleaner.""" - # Mopping behavior and spray amount as fan speed - rank_overlap = self.vacuum_state.get("rankOverlap", {}) - behavior = None - if rank_overlap == OVERLAP_STANDARD: - behavior = MOP_STANDARD - elif rank_overlap == OVERLAP_DEEP: - behavior = MOP_DEEP - elif rank_overlap == OVERLAP_EXTENDED: - behavior = MOP_EXTENDED - pad_wetness = self.vacuum_state.get("padWetness", {}) - # "disposable" and "reusable" values are always the same - pad_wetness_value = pad_wetness.get("disposable") - return f"{behavior}-{pad_wetness_value}" - - async def async_set_fan_speed(self, fan_speed, **kwargs): - """Set fan speed.""" - try: - split = fan_speed.split("-", 1) - behavior = split[0] - spray = int(split[1]) - if behavior.capitalize() in BRAAVA_MOP_BEHAVIORS: - behavior = behavior.capitalize() - except IndexError: - _LOGGER.error( - "Fan speed error: expected {behavior}-{spray_amount}, got '%s'", - fan_speed, - ) - return - except ValueError: - _LOGGER.error("Spray amount error: expected integer, got '%s'", split[1]) - return - if behavior not in BRAAVA_MOP_BEHAVIORS: - _LOGGER.error( - "Mop behavior error: expected one of %s, got '%s'", - str(BRAAVA_MOP_BEHAVIORS), - behavior, - ) - return - if spray not in BRAAVA_SPRAY_AMOUNT: - _LOGGER.error( - "Spray amount error: expected one of %s, got '%d'", - str(BRAAVA_SPRAY_AMOUNT), - spray, - ) - return - - overlap = 0 - if behavior == MOP_STANDARD: - overlap = OVERLAP_STANDARD - elif behavior == MOP_DEEP: - overlap = OVERLAP_DEEP - else: - overlap = OVERLAP_EXTENDED - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "rankOverlap", overlap - ) - await self.hass.async_add_executor_job( - self.vacuum.set_preference, - "padWetness", - {"disposable": spray, "reusable": spray}, - ) - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state_attrs = super().extra_state_attributes - - # Get Braava state - state = self.vacuum_state - detected_pad = state.get("detectedPad") - mop_ready = state.get("mopReady", {}) - lid_closed = mop_ready.get("lidClosed") - tank_present = mop_ready.get("tankPresent") - tank_level = state.get("tankLvl") - state_attrs[ATTR_DETECTED_PAD] = detected_pad - state_attrs[ATTR_LID_CLOSED] = lid_closed - state_attrs[ATTR_TANK_PRESENT] = tank_present - state_attrs[ATTR_TANK_LEVEL] = tank_level - - return state_attrs diff --git a/homeassistant/components/roomba/entity.py b/homeassistant/components/roomba/entity.py index 10c3d36de12..d55a260e53a 100644 --- a/homeassistant/components/roomba/entity.py +++ b/homeassistant/components/roomba/entity.py @@ -2,62 +2,15 @@ from __future__ import annotations -import asyncio -import logging - -from homeassistant.components.vacuum import ( - ATTR_STATUS, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, - StateVacuumEntity, - VacuumEntityFeature, -) -from homeassistant.const import ATTR_CONNECTIONS, STATE_IDLE, STATE_PAUSED +from homeassistant.const import ATTR_CONNECTIONS import homeassistant.helpers.device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity import homeassistant.util.dt as dt_util -from homeassistant.util.unit_system import METRIC_SYSTEM from . import roomba_reported_state from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) - -ATTR_CLEANING_TIME = "cleaning_time" -ATTR_CLEANED_AREA = "cleaned_area" -ATTR_ERROR = "error" -ATTR_ERROR_CODE = "error_code" -ATTR_POSITION = "position" -ATTR_SOFTWARE_VERSION = "software_version" - -# Commonly supported features -SUPPORT_IROBOT = ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.PAUSE - | VacuumEntityFeature.RETURN_HOME - | VacuumEntityFeature.SEND_COMMAND - | VacuumEntityFeature.START - | VacuumEntityFeature.STATE - | VacuumEntityFeature.STOP - | VacuumEntityFeature.LOCATE -) - -STATE_MAP = { - "": STATE_IDLE, - "charge": STATE_DOCKED, - "evac": STATE_RETURNING, # Emptying at cleanbase - "hmMidMsn": STATE_CLEANING, # Recharging at the middle of a cycle - "hmPostMsn": STATE_RETURNING, # Cycle finished - "hmUsrDock": STATE_RETURNING, - "pause": STATE_PAUSED, - "run": STATE_CLEANING, - "stop": STATE_IDLE, - "stuck": STATE_ERROR, -} - class IRobotEntity(Entity): """Base class for iRobot Entities.""" @@ -65,7 +18,7 @@ class IRobotEntity(Entity): _attr_should_poll = False _attr_has_entity_name = True - def __init__(self, roomba, blid): + def __init__(self, roomba, blid) -> None: """Initialize the iRobot handler.""" self.vacuum = roomba self._blid = blid @@ -127,20 +80,6 @@ class IRobotEntity(Entity): return None return dt_util.utc_from_timestamp(ts) - @property - def _robot_state(self): - """Return the state of the vacuum cleaner.""" - clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {}) - cycle = clean_mission_status.get("cycle") - phase = clean_mission_status.get("phase") - try: - state = STATE_MAP[phase] - except KeyError: - return STATE_ERROR - if cycle != "none" and state in (STATE_IDLE, STATE_DOCKED): - state = STATE_PAUSED - return state - async def async_added_to_hass(self): """Register callback function.""" self.vacuum.register_on_message_callback(self.on_message) @@ -154,125 +93,3 @@ class IRobotEntity(Entity): state = json_data.get("state", {}).get("reported", {}) if self.new_state_filter(state): self.schedule_update_ha_state() - - -class IRobotVacuum(IRobotEntity, StateVacuumEntity): # pylint: disable=hass-enforce-class-module - """Base class for iRobot robots.""" - - _attr_name = None - _attr_supported_features = SUPPORT_IROBOT - _attr_available = True # Always available, otherwise setup will fail - - def __init__(self, roomba, blid): - """Initialize the iRobot handler.""" - super().__init__(roomba, blid) - self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1 - - @property - def state(self): - """Return the state of the vacuum cleaner.""" - return self._robot_state - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state = self.vacuum_state - - # Roomba software version - software_version = state.get("softwareVer") - - # Set properties that are to appear in the GUI - state_attrs = {ATTR_SOFTWARE_VERSION: software_version} - - # Set legacy status to avoid break changes - state_attrs[ATTR_STATUS] = self.vacuum.current_state - - # Only add cleaning time and cleaned area attrs when the vacuum is - # currently on - if self.state == STATE_CLEANING: - # Get clean mission status - ( - state_attrs[ATTR_CLEANING_TIME], - state_attrs[ATTR_CLEANED_AREA], - ) = self.get_cleaning_status(state) - - # Error - if self.vacuum.error_code != 0: - state_attrs[ATTR_ERROR] = self.vacuum.error_message - state_attrs[ATTR_ERROR_CODE] = self.vacuum.error_code - - # Not all Roombas expose position data - # https://github.com/koalazak/dorita980/issues/48 - if self._cap_position: - pos_state = state.get("pose", {}) - position = None - pos_x = pos_state.get("point", {}).get("x") - pos_y = pos_state.get("point", {}).get("y") - theta = pos_state.get("theta") - if all(item is not None for item in (pos_x, pos_y, theta)): - position = f"({pos_x}, {pos_y}, {theta})" - state_attrs[ATTR_POSITION] = position - - return state_attrs - - def get_cleaning_status(self, state) -> tuple[int, int]: - """Return the cleaning time and cleaned area from the device.""" - if not (mission_state := state.get("cleanMissionStatus")): - return (0, 0) - - if cleaning_time := mission_state.get("mssnM", 0): - pass - elif start_time := mission_state.get("mssnStrtTm"): - now = dt_util.as_timestamp(dt_util.utcnow()) - if now > start_time: - cleaning_time = (now - start_time) // 60 - - if cleaned_area := mission_state.get("sqft", 0): # Imperial - # Convert to m2 if the unit_system is set to metric - if self.hass.config.units is METRIC_SYSTEM: - cleaned_area = round(cleaned_area * 0.0929) - - return (cleaning_time, cleaned_area) - - def on_message(self, json_data): - """Update state on message change.""" - state = json_data.get("state", {}).get("reported", {}) - if self.new_state_filter(state): - _LOGGER.debug("Got new state from the vacuum: %s", json_data) - self.schedule_update_ha_state() - - async def async_start(self): - """Start or resume the cleaning task.""" - if self.state == STATE_PAUSED: - await self.hass.async_add_executor_job(self.vacuum.send_command, "resume") - else: - await self.hass.async_add_executor_job(self.vacuum.send_command, "start") - - async def async_stop(self, **kwargs): - """Stop the vacuum cleaner.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "stop") - - async def async_pause(self): - """Pause the cleaning cycle.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "pause") - - async def async_return_to_base(self, **kwargs): - """Set the vacuum cleaner to return to the dock.""" - if self.state == STATE_CLEANING: - await self.async_pause() - for _ in range(10): - if self.state == STATE_PAUSED: - break - await asyncio.sleep(1) - await self.hass.async_add_executor_job(self.vacuum.send_command, "dock") - - async def async_locate(self, **kwargs): - """Located vacuum.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "find") - - async def async_send_command(self, command, params=None, **kwargs): - """Send raw command.""" - _LOGGER.debug("async_send_command %s (%s), %s", command, params, kwargs) - await self.hass.async_add_executor_job( - self.vacuum.send_command, command, params - ) diff --git a/homeassistant/components/roomba/roomba.py b/homeassistant/components/roomba/roomba.py deleted file mode 100644 index 917fbb2bfff..00000000000 --- a/homeassistant/components/roomba/roomba.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Class for Roomba devices.""" - -import logging - -from homeassistant.components.vacuum import VacuumEntityFeature - -from .entity import SUPPORT_IROBOT, IRobotVacuum - -_LOGGER = logging.getLogger(__name__) - -ATTR_BIN_FULL = "bin_full" -ATTR_BIN_PRESENT = "bin_present" - -FAN_SPEED_AUTOMATIC = "Automatic" -FAN_SPEED_ECO = "Eco" -FAN_SPEED_PERFORMANCE = "Performance" -FAN_SPEEDS = [FAN_SPEED_AUTOMATIC, FAN_SPEED_ECO, FAN_SPEED_PERFORMANCE] - -# Only Roombas with CarpetBost can set their fanspeed -SUPPORT_ROOMBA_CARPET_BOOST = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED - - -class RoombaVacuum(IRobotVacuum): # pylint: disable=hass-enforce-class-module - """Basic Roomba robot (without carpet boost).""" - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state_attrs = super().extra_state_attributes - - # Get bin state - bin_raw_state = self.vacuum_state.get("bin", {}) - bin_state = {} - if bin_raw_state.get("present") is not None: - bin_state[ATTR_BIN_PRESENT] = bin_raw_state.get("present") - if bin_raw_state.get("full") is not None: - bin_state[ATTR_BIN_FULL] = bin_raw_state.get("full") - state_attrs.update(bin_state) - - return state_attrs - - -class RoombaVacuumCarpetBoost(RoombaVacuum): # pylint: disable=hass-enforce-class-module - """Roomba robot with carpet boost.""" - - _attr_fan_speed_list = FAN_SPEEDS - _attr_supported_features = SUPPORT_ROOMBA_CARPET_BOOST - - @property - def fan_speed(self): - """Return the fan speed of the vacuum cleaner.""" - fan_speed = None - carpet_boost = self.vacuum_state.get("carpetBoost") - high_perf = self.vacuum_state.get("vacHigh") - if carpet_boost is not None and high_perf is not None: - if carpet_boost: - fan_speed = FAN_SPEED_AUTOMATIC - elif high_perf: - fan_speed = FAN_SPEED_PERFORMANCE - else: # carpet_boost and high_perf are False - fan_speed = FAN_SPEED_ECO - return fan_speed - - async def async_set_fan_speed(self, fan_speed, **kwargs): - """Set fan speed.""" - if fan_speed.capitalize() in FAN_SPEEDS: - fan_speed = fan_speed.capitalize() - _LOGGER.debug("Set fan speed to: %s", fan_speed) - high_perf = None - carpet_boost = None - if fan_speed == FAN_SPEED_AUTOMATIC: - high_perf = False - carpet_boost = True - elif fan_speed == FAN_SPEED_ECO: - high_perf = False - carpet_boost = False - elif fan_speed == FAN_SPEED_PERFORMANCE: - high_perf = True - carpet_boost = False - else: - _LOGGER.error("No such fan speed available: %s", fan_speed) - return - # The set_preference method does only accept string values - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "carpetBoost", str(carpet_boost) - ) - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "vacHigh", str(high_perf) - ) diff --git a/homeassistant/components/roomba/vacuum.py b/homeassistant/components/roomba/vacuum.py index a45b8eea632..9024e54087d 100644 --- a/homeassistant/components/roomba/vacuum.py +++ b/homeassistant/components/roomba/vacuum.py @@ -2,16 +2,92 @@ from __future__ import annotations +import asyncio +import logging +from typing import Any + +from homeassistant.components.vacuum import ( + ATTR_STATUS, + STATE_CLEANING, + STATE_DOCKED, + STATE_ERROR, + STATE_RETURNING, + StateVacuumEntity, + VacuumEntityFeature, +) from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_IDLE, STATE_PAUSED from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util +from homeassistant.util.unit_system import METRIC_SYSTEM from . import roomba_reported_state -from .braava import BraavaJet from .const import DOMAIN -from .entity import IRobotVacuum +from .entity import IRobotEntity from .models import RoombaData -from .roomba import RoombaVacuum, RoombaVacuumCarpetBoost + +SUPPORT_IROBOT = ( + VacuumEntityFeature.BATTERY + | VacuumEntityFeature.PAUSE + | VacuumEntityFeature.RETURN_HOME + | VacuumEntityFeature.SEND_COMMAND + | VacuumEntityFeature.START + | VacuumEntityFeature.STATE + | VacuumEntityFeature.STOP + | VacuumEntityFeature.LOCATE +) + +STATE_MAP = { + "": STATE_IDLE, + "charge": STATE_DOCKED, + "evac": STATE_RETURNING, # Emptying at cleanbase + "hmMidMsn": STATE_CLEANING, # Recharging at the middle of a cycle + "hmPostMsn": STATE_RETURNING, # Cycle finished + "hmUsrDock": STATE_RETURNING, + "pause": STATE_PAUSED, + "run": STATE_CLEANING, + "stop": STATE_IDLE, + "stuck": STATE_ERROR, +} + +_LOGGER = logging.getLogger(__name__) +ATTR_SOFTWARE_VERSION = "software_version" +ATTR_CLEANING_TIME = "cleaning_time" +ATTR_CLEANED_AREA = "cleaned_area" +ATTR_ERROR = "error" +ATTR_ERROR_CODE = "error_code" +ATTR_POSITION = "position" +ATTR_SOFTWARE_VERSION = "software_version" + +ATTR_BIN_FULL = "bin_full" +ATTR_BIN_PRESENT = "bin_present" + +FAN_SPEED_AUTOMATIC = "Automatic" +FAN_SPEED_ECO = "Eco" +FAN_SPEED_PERFORMANCE = "Performance" +FAN_SPEEDS = [FAN_SPEED_AUTOMATIC, FAN_SPEED_ECO, FAN_SPEED_PERFORMANCE] + +# Only Roombas with CarpetBost can set their fanspeed +SUPPORT_ROOMBA_CARPET_BOOST = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED + +ATTR_DETECTED_PAD = "detected_pad" +ATTR_LID_CLOSED = "lid_closed" +ATTR_TANK_PRESENT = "tank_present" +ATTR_TANK_LEVEL = "tank_level" +ATTR_PAD_WETNESS = "spray_amount" + +OVERLAP_STANDARD = 67 +OVERLAP_DEEP = 85 +OVERLAP_EXTENDED = 25 +MOP_STANDARD = "Standard" +MOP_DEEP = "Deep" +MOP_EXTENDED = "Extended" +BRAAVA_MOP_BEHAVIORS = [MOP_STANDARD, MOP_DEEP, MOP_EXTENDED] +BRAAVA_SPRAY_AMOUNT = [1, 2, 3] + +# Braava Jets can set mopping behavior through fanspeed +SUPPORT_BRAAVA = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED async def async_setup_entry( @@ -39,3 +115,309 @@ async def async_setup_entry( roomba_vac = constructor(roomba, blid) async_add_entities([roomba_vac]) + + +class IRobotVacuum(IRobotEntity, StateVacuumEntity): + """Base class for iRobot robots.""" + + _attr_name = None + _attr_supported_features = SUPPORT_IROBOT + _attr_available = True # Always available, otherwise setup will fail + + def __init__(self, roomba, blid) -> None: + """Initialize the iRobot handler.""" + super().__init__(roomba, blid) + self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1 + + @property + def _robot_state(self): + """Return the state of the vacuum cleaner.""" + clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {}) + cycle = clean_mission_status.get("cycle") + phase = clean_mission_status.get("phase") + try: + state = STATE_MAP[phase] + except KeyError: + return STATE_ERROR + if cycle != "none" and state in (STATE_IDLE, STATE_DOCKED): + state = STATE_PAUSED + return state + + @property + def state(self) -> str: + """Return the state of the vacuum cleaner.""" + return self._robot_state + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state = self.vacuum_state + + # Roomba software version + software_version = state.get("softwareVer") + + # Set properties that are to appear in the GUI + state_attrs = {ATTR_SOFTWARE_VERSION: software_version} + + # Set legacy status to avoid break changes + state_attrs[ATTR_STATUS] = self.vacuum.current_state + + # Only add cleaning time and cleaned area attrs when the vacuum is + # currently on + if self.state == STATE_CLEANING: + # Get clean mission status + ( + state_attrs[ATTR_CLEANING_TIME], + state_attrs[ATTR_CLEANED_AREA], + ) = self.get_cleaning_status(state) + + # Error + if self.vacuum.error_code != 0: + state_attrs[ATTR_ERROR] = self.vacuum.error_message + state_attrs[ATTR_ERROR_CODE] = self.vacuum.error_code + + # Not all Roombas expose position data + # https://github.com/koalazak/dorita980/issues/48 + if self._cap_position: + pos_state = state.get("pose", {}) + position = None + pos_x = pos_state.get("point", {}).get("x") + pos_y = pos_state.get("point", {}).get("y") + theta = pos_state.get("theta") + if all(item is not None for item in (pos_x, pos_y, theta)): + position = f"({pos_x}, {pos_y}, {theta})" + state_attrs[ATTR_POSITION] = position + + return state_attrs + + def get_cleaning_status(self, state) -> tuple[int, int]: + """Return the cleaning time and cleaned area from the device.""" + if not (mission_state := state.get("cleanMissionStatus")): + return (0, 0) + + if cleaning_time := mission_state.get("mssnM", 0): + pass + elif start_time := mission_state.get("mssnStrtTm"): + now = dt_util.as_timestamp(dt_util.utcnow()) + if now > start_time: + cleaning_time = (now - start_time) // 60 + + if cleaned_area := mission_state.get("sqft", 0): # Imperial + # Convert to m2 if the unit_system is set to metric + if self.hass.config.units is METRIC_SYSTEM: + cleaned_area = round(cleaned_area * 0.0929) + + return (cleaning_time, cleaned_area) + + def on_message(self, json_data): + """Update state on message change.""" + state = json_data.get("state", {}).get("reported", {}) + if self.new_state_filter(state): + _LOGGER.debug("Got new state from the vacuum: %s", json_data) + self.schedule_update_ha_state() + + async def async_start(self) -> None: + """Start or resume the cleaning task.""" + if self.state == STATE_PAUSED: + await self.hass.async_add_executor_job(self.vacuum.send_command, "resume") + else: + await self.hass.async_add_executor_job(self.vacuum.send_command, "start") + + async def async_stop(self, **kwargs): + """Stop the vacuum cleaner.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "stop") + + async def async_pause(self) -> None: + """Pause the cleaning cycle.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "pause") + + async def async_return_to_base(self, **kwargs): + """Set the vacuum cleaner to return to the dock.""" + if self.state == STATE_CLEANING: + await self.async_pause() + for _ in range(10): + if self.state == STATE_PAUSED: + break + await asyncio.sleep(1) + await self.hass.async_add_executor_job(self.vacuum.send_command, "dock") + + async def async_locate(self, **kwargs): + """Located vacuum.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "find") + + async def async_send_command(self, command, params=None, **kwargs): + """Send raw command.""" + _LOGGER.debug("async_send_command %s (%s), %s", command, params, kwargs) + await self.hass.async_add_executor_job( + self.vacuum.send_command, command, params + ) + + +class RoombaVacuum(IRobotVacuum): + """Basic Roomba robot (without carpet boost).""" + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state_attrs = super().extra_state_attributes + + # Get bin state + bin_raw_state = self.vacuum_state.get("bin", {}) + bin_state = {} + if bin_raw_state.get("present") is not None: + bin_state[ATTR_BIN_PRESENT] = bin_raw_state.get("present") + if bin_raw_state.get("full") is not None: + bin_state[ATTR_BIN_FULL] = bin_raw_state.get("full") + state_attrs.update(bin_state) + + return state_attrs + + +class RoombaVacuumCarpetBoost(RoombaVacuum): + """Roomba robot with carpet boost.""" + + _attr_fan_speed_list = FAN_SPEEDS + _attr_supported_features = SUPPORT_ROOMBA_CARPET_BOOST + + @property + def fan_speed(self): + """Return the fan speed of the vacuum cleaner.""" + fan_speed = None + carpet_boost = self.vacuum_state.get("carpetBoost") + high_perf = self.vacuum_state.get("vacHigh") + if carpet_boost is not None and high_perf is not None: + if carpet_boost: + fan_speed = FAN_SPEED_AUTOMATIC + elif high_perf: + fan_speed = FAN_SPEED_PERFORMANCE + else: # carpet_boost and high_perf are False + fan_speed = FAN_SPEED_ECO + return fan_speed + + async def async_set_fan_speed(self, fan_speed, **kwargs): + """Set fan speed.""" + if fan_speed.capitalize() in FAN_SPEEDS: + fan_speed = fan_speed.capitalize() + _LOGGER.debug("Set fan speed to: %s", fan_speed) + high_perf = None + carpet_boost = None + if fan_speed == FAN_SPEED_AUTOMATIC: + high_perf = False + carpet_boost = True + elif fan_speed == FAN_SPEED_ECO: + high_perf = False + carpet_boost = False + elif fan_speed == FAN_SPEED_PERFORMANCE: + high_perf = True + carpet_boost = False + else: + _LOGGER.error("No such fan speed available: %s", fan_speed) + return + # The set_preference method does only accept string values + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "carpetBoost", str(carpet_boost) + ) + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "vacHigh", str(high_perf) + ) + + +class BraavaJet(IRobotVacuum): + """Braava Jet.""" + + _attr_supported_features = SUPPORT_BRAAVA + + def __init__(self, roomba, blid) -> None: + """Initialize the Roomba handler.""" + super().__init__(roomba, blid) + + # Initialize fan speed list + self._attr_fan_speed_list = [ + f"{behavior}-{spray}" + for behavior in BRAAVA_MOP_BEHAVIORS + for spray in BRAAVA_SPRAY_AMOUNT + ] + + @property + def fan_speed(self): + """Return the fan speed of the vacuum cleaner.""" + # Mopping behavior and spray amount as fan speed + rank_overlap = self.vacuum_state.get("rankOverlap", {}) + behavior = None + if rank_overlap == OVERLAP_STANDARD: + behavior = MOP_STANDARD + elif rank_overlap == OVERLAP_DEEP: + behavior = MOP_DEEP + elif rank_overlap == OVERLAP_EXTENDED: + behavior = MOP_EXTENDED + pad_wetness = self.vacuum_state.get("padWetness", {}) + # "disposable" and "reusable" values are always the same + pad_wetness_value = pad_wetness.get("disposable") + return f"{behavior}-{pad_wetness_value}" + + async def async_set_fan_speed(self, fan_speed, **kwargs): + """Set fan speed.""" + try: + split = fan_speed.split("-", 1) + behavior = split[0] + spray = int(split[1]) + if behavior.capitalize() in BRAAVA_MOP_BEHAVIORS: + behavior = behavior.capitalize() + except IndexError: + _LOGGER.error( + "Fan speed error: expected {behavior}-{spray_amount}, got '%s'", + fan_speed, + ) + return + except ValueError: + _LOGGER.error("Spray amount error: expected integer, got '%s'", split[1]) + return + if behavior not in BRAAVA_MOP_BEHAVIORS: + _LOGGER.error( + "Mop behavior error: expected one of %s, got '%s'", + str(BRAAVA_MOP_BEHAVIORS), + behavior, + ) + return + if spray not in BRAAVA_SPRAY_AMOUNT: + _LOGGER.error( + "Spray amount error: expected one of %s, got '%d'", + str(BRAAVA_SPRAY_AMOUNT), + spray, + ) + return + + overlap = 0 + if behavior == MOP_STANDARD: + overlap = OVERLAP_STANDARD + elif behavior == MOP_DEEP: + overlap = OVERLAP_DEEP + else: + overlap = OVERLAP_EXTENDED + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "rankOverlap", overlap + ) + await self.hass.async_add_executor_job( + self.vacuum.set_preference, + "padWetness", + {"disposable": spray, "reusable": spray}, + ) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state_attrs = super().extra_state_attributes + + # Get Braava state + state = self.vacuum_state + detected_pad = state.get("detectedPad") + mop_ready = state.get("mopReady", {}) + lid_closed = mop_ready.get("lidClosed") + tank_present = mop_ready.get("tankPresent") + tank_level = state.get("tankLvl") + state_attrs[ATTR_DETECTED_PAD] = detected_pad + state_attrs[ATTR_LID_CLOSED] = lid_closed + state_attrs[ATTR_TANK_PRESENT] = tank_present + state_attrs[ATTR_TANK_LEVEL] = tank_level + + return state_attrs From 09d7fed6cdabd029bbaa8c9b70f858e0f57a3a79 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Tue, 3 Dec 2024 21:23:52 +0100 Subject: [PATCH 0208/1198] Add dhcp discovery for fyta (#132185) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/fyta/manifest.json | 1 + homeassistant/components/fyta/strings.json | 3 +- homeassistant/generated/dhcp.py | 4 ++ tests/components/fyta/test_config_flow.py | 60 ++++++++++++++++----- 4 files changed, 53 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index 0df9eca2e38..15f007e5f4d 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -3,6 +3,7 @@ "name": "FYTA", "codeowners": ["@dontinelli"], "config_flow": true, + "dhcp": [{ "hostname": "fyta*" }], "documentation": "https://www.home-assistant.io/integrations/fyta", "integration_type": "hub", "iot_class": "cloud_polling", diff --git a/homeassistant/components/fyta/strings.json b/homeassistant/components/fyta/strings.json index edd65ad228d..fc9f424d5aa 100644 --- a/homeassistant/components/fyta/strings.json +++ b/homeassistant/components/fyta/strings.json @@ -26,7 +26,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index e37fb2332b1..22a09945a80 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -209,6 +209,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "fully_kiosk", "registered_devices": True, }, + { + "domain": "fyta", + "hostname": "fyta*", + }, { "domain": "goalzero", "registered_devices": True, diff --git a/tests/components/fyta/test_config_flow.py b/tests/components/fyta/test_config_flow.py index e47b78aa893..21101db8534 100644 --- a/tests/components/fyta/test_config_flow.py +++ b/tests/components/fyta/test_config_flow.py @@ -10,6 +10,7 @@ from fyta_cli.fyta_exceptions import ( import pytest from homeassistant import config_entries +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -20,6 +21,26 @@ from .const import ACCESS_TOKEN, EXPIRATION, PASSWORD, USERNAME from tests.common import MockConfigEntry +async def user_step( + hass: HomeAssistant, flow_id: str, mock_setup_entry: AsyncMock +) -> None: + """Test user step (helper function).""" + + result = await hass.config_entries.flow.async_configure( + flow_id, {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USERNAME + assert result["data"] == { + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + CONF_ACCESS_TOKEN: ACCESS_TOKEN, + CONF_EXPIRATION: EXPIRATION, + } + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_user_flow( hass: HomeAssistant, mock_fyta_connector: AsyncMock, mock_setup_entry: AsyncMock ) -> None: @@ -31,20 +52,7 @@ async def test_user_flow( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == USERNAME - assert result2["data"] == { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_EXPIRATION: EXPIRATION, - } - assert len(mock_setup_entry.mock_calls) == 1 + await user_step(hass, result["flow_id"], mock_setup_entry) @pytest.mark.parametrize( @@ -190,3 +198,27 @@ async def test_reauth( assert entry.data[CONF_PASSWORD] == "other_password" assert entry.data[CONF_ACCESS_TOKEN] == ACCESS_TOKEN assert entry.data[CONF_EXPIRATION] == EXPIRATION + + +async def test_dhcp_discovery( + hass: HomeAssistant, mock_fyta_connector: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test DHCP discovery flow.""" + + service_info = DhcpServiceInfo( + hostname="FYTA HUB", + ip="1.2.3.4", + macaddress="aabbccddeeff", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=service_info, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + await user_step(hass, result["flow_id"], mock_setup_entry) From 1a714276cc04a306d9862c9243b801d012c6d29d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Dec 2024 21:43:33 +0100 Subject: [PATCH 0209/1198] Remove support for live recorder data migration of entity IDs (#131952) --- .../components/recorder/migration.py | 21 +-- .../recorder/table_managers/states_meta.py | 2 +- tests/components/recorder/common.py | 9 - .../recorder/test_history_db_schema_32.py | 15 +- .../recorder/test_migration_from_schema_32.py | 168 +++++++++++++----- ..._migration_run_time_migrations_remember.py | 7 +- 6 files changed, 141 insertions(+), 81 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index fffecff149c..750b4adc563 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2586,15 +2586,11 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): return has_event_type_to_migrate() -class EntityIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): +class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate entity_ids to states_meta.""" required_schema_version = STATES_META_SCHEMA_VERSION migration_id = "entity_id_migration" - task = CommitBeforeMigrationTask - # We have to commit before to make sure there are - # no new pending states_meta about to be added to - # the db since this happens live def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate entity_ids to states_meta, return True if completed. @@ -2664,18 +2660,6 @@ class EntityIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): _LOGGER.debug("Migrating entity_ids done=%s", is_done) return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) - def migration_done(self, instance: Recorder, session: Session) -> None: - """Will be called after migrate returns True.""" - # The migration has finished, now we start the post migration - # to remove the old entity_id data from the states table - # at this point we can also start using the StatesMeta table - # so we set active to True - _LOGGER.debug("Activating states_meta manager as all data is migrated") - instance.states_meta_manager.active = True - with contextlib.suppress(SQLAlchemyError): - migrate = EntityIDPostMigration(self.schema_version, self.migration_changes) - migrate.queue_migration(instance, session) - def needs_migrate_query(self) -> StatementLambdaElement: """Check if the data is migrated.""" return has_entity_ids_to_migrate() @@ -2786,12 +2770,13 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): NON_LIVE_DATA_MIGRATORS = ( StatesContextIDMigration, # Introduced in HA Core 2023.4 EventsContextIDMigration, # Introduced in HA Core 2023.4 + EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 ) LIVE_DATA_MIGRATORS = ( EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 - EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 + EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) diff --git a/homeassistant/components/recorder/table_managers/states_meta.py b/homeassistant/components/recorder/table_managers/states_meta.py index 80d20dbec94..75afb6589a1 100644 --- a/homeassistant/components/recorder/table_managers/states_meta.py +++ b/homeassistant/components/recorder/table_managers/states_meta.py @@ -24,7 +24,7 @@ CACHE_SIZE = 8192 class StatesMetaManager(BaseLRUTableManager[StatesMeta]): """Manage the StatesMeta table.""" - active = False + active = True def __init__(self, recorder: Recorder) -> None: """Initialize the states meta manager.""" diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 60168f5e6ef..fbb0991c960 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -428,14 +428,6 @@ def get_schema_module_path(schema_version_postfix: str) -> str: return f"tests.components.recorder.db_schema_{schema_version_postfix}" -@dataclass(slots=True) -class MockMigrationTask(migration.MigrationTask): - """Mock migration task which does nothing.""" - - def run(self, instance: Recorder) -> None: - """Run migration task.""" - - @contextmanager def old_db_schema(schema_version_postfix: str) -> Iterator[None]: """Fixture to initialize the db with the old schema.""" @@ -453,7 +445,6 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 3ee6edd8e1e..666626ff688 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -38,6 +38,17 @@ async def mock_recorder_before_hass( """Set up recorder.""" +@pytest.fixture +def disable_states_meta_manager(): + """Disable the states meta manager.""" + with patch.object( + recorder.table_managers.states_meta.StatesMetaManager, + "active", + False, + ): + yield + + @pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema 32.""" @@ -46,7 +57,9 @@ def db_schema_32(): @pytest.fixture(autouse=True) -def setup_recorder(db_schema_32, recorder_mock: Recorder) -> recorder.Recorder: +def setup_recorder( + db_schema_32, disable_states_meta_manager, recorder_mock: Recorder +) -> recorder.Recorder: """Set up recorder.""" diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 6ef97f3bbd1..e77fae7ffad 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -44,7 +44,6 @@ import homeassistant.util.dt as dt_util from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import ( - MockMigrationTask, async_attach_db_engine, async_recorder_block_till_done, async_wait_recording_done, @@ -114,7 +113,6 @@ def db_schema_32(): patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): yield @@ -919,11 +917,13 @@ async def test_migrate_event_type_ids( ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") -async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_entity_ids( + async_test_recorder: RecorderInstanceGenerator, +) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -949,14 +949,24 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -982,28 +992,43 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + await hass.async_stop() + await hass.async_block_till_done() + assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 1 - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_post_migrate_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -1029,14 +1054,25 @@ async def test_post_migrate_entity_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch.object(migration.EntityIDPostMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDPostMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -1047,19 +1083,34 @@ async def test_post_migrate_entity_ids( assert len(states) == 3 return {state.state: state.entity_id for state in states} - states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + + await hass.async_stop() + await hass.async_block_till_done() + assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None assert states_by_state["two_1"] is None +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -1088,14 +1139,24 @@ async def test_migrate_null_entity_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -1121,17 +1182,32 @@ async def test_migrate_null_entity_ids( ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) - assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 - assert len(states_by_entity_id["sensor.one"]) == 2 - def _get_migration_id(): with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + migration_changes = await instance.async_add_executor_job(_get_migration_id) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 + assert len(states_by_entity_id["sensor.one"]) == 2 + assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 93fa16b8364..7a333b0a2f5 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -19,11 +19,7 @@ from homeassistant.components.recorder.util import ( from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant -from .common import ( - MockMigrationTask, - async_recorder_block_till_done, - async_wait_recording_done, -) +from .common import async_recorder_block_till_done, async_wait_recording_done from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -102,7 +98,6 @@ async def test_migration_changes_prevent_trying_to_migrate_again( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): async with ( From a405d2b7243571536a2fd5edc5bacbb219e041a9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 3 Dec 2024 21:49:24 +0100 Subject: [PATCH 0210/1198] Bump pytest to 8.3.4 (#132179) --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index 34dcdfc1244..2370bed8986 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -29,7 +29,7 @@ pytest-timeout==2.3.1 pytest-unordered==0.6.1 pytest-picked==0.5.0 pytest-xdist==3.6.1 -pytest==8.3.3 +pytest==8.3.4 requests-mock==1.12.1 respx==0.21.1 syrupy==4.8.0 From bb518373463afb1763b4a95292a7af657b978763 Mon Sep 17 00:00:00 2001 From: Hugh Saunders Date: Tue, 3 Dec 2024 21:23:04 +0000 Subject: [PATCH 0211/1198] Generic Thermostat Add Target Min Max to UI config (#131168) Currently you can configure the minium and maximum target temperatures if you create a generic thermostat in YAML. If you create it via the UI, there is no option to configure them, you just get the climate domain defaults. This commit adds minimum and maximum fields to the first stage of the generic thermostat config flow, so that UI users can also set min and max. Min and max are important as usually users want to select target temperatures within a relatively narrow band, while the defaults create a wide band. The wide band makes it hard to be accurate enough with the arc style temperatue selector on the thermostat card. --- .../components/generic_thermostat/climate.py | 4 ++-- .../components/generic_thermostat/config_flow.py | 12 ++++++++++++ homeassistant/components/generic_thermostat/const.py | 2 ++ .../components/generic_thermostat/strings.json | 8 ++++++-- 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index d68eaccbb0c..f82da4483eb 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -63,7 +63,9 @@ from .const import ( CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, + CONF_MAX_TEMP, CONF_MIN_DUR, + CONF_MIN_TEMP, CONF_PRESETS, CONF_SENSOR, DEFAULT_TOLERANCE, @@ -77,8 +79,6 @@ DEFAULT_NAME = "Generic Thermostat" CONF_INITIAL_HVAC_MODE = "initial_hvac_mode" CONF_KEEP_ALIVE = "keep_alive" -CONF_MIN_TEMP = "min_temp" -CONF_MAX_TEMP = "max_temp" CONF_PRECISION = "precision" CONF_TARGET_TEMP = "target_temp" CONF_TEMP_STEP = "target_temp_step" diff --git a/homeassistant/components/generic_thermostat/config_flow.py b/homeassistant/components/generic_thermostat/config_flow.py index 5b0eae8ff66..1fbeaefde6b 100644 --- a/homeassistant/components/generic_thermostat/config_flow.py +++ b/homeassistant/components/generic_thermostat/config_flow.py @@ -21,7 +21,9 @@ from .const import ( CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, + CONF_MAX_TEMP, CONF_MIN_DUR, + CONF_MIN_TEMP, CONF_PRESETS, CONF_SENSOR, DEFAULT_TOLERANCE, @@ -57,6 +59,16 @@ OPTIONS_SCHEMA = { vol.Optional(CONF_MIN_DUR): selector.DurationSelector( selector.DurationSelectorConfig(allow_negative=False) ), + vol.Optional(CONF_MIN_TEMP): selector.NumberSelector( + selector.NumberSelectorConfig( + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 + ) + ), + vol.Optional(CONF_MAX_TEMP): selector.NumberSelector( + selector.NumberSelectorConfig( + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 + ) + ), } PRESETS_SCHEMA = { diff --git a/homeassistant/components/generic_thermostat/const.py b/homeassistant/components/generic_thermostat/const.py index 51927297b63..f0e6f1a7d73 100644 --- a/homeassistant/components/generic_thermostat/const.py +++ b/homeassistant/components/generic_thermostat/const.py @@ -18,7 +18,9 @@ CONF_AC_MODE = "ac_mode" CONF_COLD_TOLERANCE = "cold_tolerance" CONF_HEATER = "heater" CONF_HOT_TOLERANCE = "hot_tolerance" +CONF_MAX_TEMP = "max_temp" CONF_MIN_DUR = "min_cycle_duration" +CONF_MIN_TEMP = "min_temp" CONF_PRESETS = { p: f"{p}_temp" for p in ( diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index fd89bec6349..58280e99543 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -12,7 +12,9 @@ "min_cycle_duration": "Minimum cycle duration", "name": "[%key:common::config_flow::data::name%]", "cold_tolerance": "Cold tolerance", - "hot_tolerance": "Hot tolerance" + "hot_tolerance": "Hot tolerance", + "min_temp": "Minimum target temperature", + "max_temp": "Maximum target temperature" }, "data_description": { "ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.", @@ -45,7 +47,9 @@ "target_sensor": "[%key:component::generic_thermostat::config::step::user::data::target_sensor%]", "min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data::min_cycle_duration%]", "cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data::cold_tolerance%]", - "hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]" + "hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]", + "min_temp": "[%key:component::generic_thermostat::config::step::user::data::min_temp%]", + "max_temp": "[%key:component::generic_thermostat::config::step::user::data::max_temp%]" }, "data_description": { "heater": "[%key:component::generic_thermostat::config::step::user::data_description::heater%]", From f31ff3ca1461cbcea3dbb9c1d31f95fffd26139e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Dec 2024 22:24:39 +0100 Subject: [PATCH 0212/1198] Bump holidays to 0.62 (#132108) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index a3c0a4514d3..7edc140da11 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.61", "babel==2.15.0"] + "requirements": ["holidays==0.62", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index ea08bfe1717..842c6f1f1ad 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.61"] + "requirements": ["holidays==0.62"] } diff --git a/requirements_all.txt b/requirements_all.txt index 06e184246b2..18f7bc2fb20 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1124,7 +1124,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.61 +holidays==0.62 # homeassistant.components.frontend home-assistant-frontend==20241127.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 52dcb44e47d..ebfc47c764d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -950,7 +950,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.61 +holidays==0.62 # homeassistant.components.frontend home-assistant-frontend==20241127.3 From 14897f921cd6846d7bde1f85c9b3c9f3b8a1a285 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Dec 2024 22:25:29 +0100 Subject: [PATCH 0213/1198] Fix mypy issue in airzone cloud (#132208) --- homeassistant/components/airzone_cloud/climate.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/airzone_cloud/climate.py b/homeassistant/components/airzone_cloud/climate.py index d32b070ad8c..cba41867d61 100644 --- a/homeassistant/components/airzone_cloud/climate.py +++ b/homeassistant/components/airzone_cloud/climate.py @@ -194,12 +194,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) - if ( - self.get_airzone_value(AZD_SPEED) is not None - and self.get_airzone_value(AZD_SPEEDS) is not None - ): - self._initialize_fan_speeds() - @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" @@ -252,6 +246,15 @@ class AirzoneDeviceClimate(AirzoneClimate): _speeds: dict[int, str] _speeds_reverse: dict[str, int] + def _init_attributes(self) -> None: + """Init common climate device attributes.""" + super()._init_attributes() + if ( + self.get_airzone_value(AZD_SPEED) is not None + and self.get_airzone_value(AZD_SPEEDS) is not None + ): + self._initialize_fan_speeds() + def _initialize_fan_speeds(self) -> None: """Initialize fan speeds.""" azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS) From 5ae875be777b2bad0532f69c582e7f19d123e89a Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Tue, 3 Dec 2024 22:29:58 +0100 Subject: [PATCH 0214/1198] Update test_config_flow for solarlog (#132104) Co-authored-by: Joost Lekkerkerker --- tests/components/solarlog/test_config_flow.py | 51 +++++++------------ 1 file changed, 17 insertions(+), 34 deletions(-) diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 3de3c08fcd0..58a5faa0772 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -9,7 +9,6 @@ from solarlog_cli.solarlog_exceptions import ( SolarLogError, ) -from homeassistant.components.solarlog import config_flow from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD @@ -35,7 +34,6 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: result["flow_id"], {CONF_HOST: HOST, CONF_HAS_PWD: False}, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == HOST @@ -44,13 +42,6 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert len(mock_setup_entry.mock_calls) == 1 -def init_config_flow(hass: HomeAssistant) -> config_flow.SolarLogConfigFlow: - """Init a configuration flow.""" - flow = config_flow.SolarLogConfigFlow() - flow.hass = hass - return flow - - @pytest.mark.usefixtures("test_connect") async def test_user( hass: HomeAssistant, @@ -68,7 +59,6 @@ async def test_user( result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_HOST: HOST, CONF_HAS_PWD: False} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == HOST @@ -97,17 +87,19 @@ async def test_form_exceptions( mock_solarlog_connector: AsyncMock, ) -> None: """Test we can handle Form exceptions.""" - flow = init_config_flow(hass) - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + assert result["errors"] == {} mock_solarlog_connector.test_connection.side_effect = exception1 # tests with connection error - result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: False}) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: HOST, CONF_HAS_PWD: False} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -117,14 +109,16 @@ async def test_form_exceptions( mock_solarlog_connector.test_connection.side_effect = None mock_solarlog_connector.test_extended_data_available.side_effect = exception2 - result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: True}) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: HOST, CONF_HAS_PWD: True} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "password" - result = await flow.async_step_password({CONF_PASSWORD: "pwd"}) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "pwd"} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "password" @@ -132,18 +126,10 @@ async def test_form_exceptions( mock_solarlog_connector.test_extended_data_available.side_effect = None - # tests with all provided (no password) - result = await flow.async_step_user({CONF_HOST: HOST, CONF_HAS_PWD: False}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_HAS_PWD] is False - - # tests with all provided (password) - result = await flow.async_step_password({CONF_PASSWORD: "pwd"}) - await hass.async_block_till_done() + # tests with all provided + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "pwd"} + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == HOST @@ -205,7 +191,6 @@ async def test_reconfigure_flow( result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_HAS_PWD: True, CONF_PASSWORD: password} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -257,7 +242,6 @@ async def test_reauth( result["flow_id"], {CONF_PASSWORD: "other_pwd"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -270,7 +254,6 @@ async def test_reauth( result["flow_id"], {CONF_PASSWORD: "other_pwd"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" From 4deaeaeda048112c9ebd5e3883bccfcadb8dded6 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Dec 2024 23:08:08 +0100 Subject: [PATCH 0215/1198] Fix next mypy issue in airzone_cloud (#132217) --- homeassistant/components/airzone_cloud/climate.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/airzone_cloud/climate.py b/homeassistant/components/airzone_cloud/climate.py index cba41867d61..5ee15ff6819 100644 --- a/homeassistant/components/airzone_cloud/climate.py +++ b/homeassistant/components/airzone_cloud/climate.py @@ -208,8 +208,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[ self.get_airzone_value(AZD_ACTION) ] - if self.supported_features & ClimateEntityFeature.FAN_MODE: - self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) if self.get_airzone_value(AZD_POWER): self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[ self.get_airzone_value(AZD_MODE) @@ -255,6 +253,13 @@ class AirzoneDeviceClimate(AirzoneClimate): ): self._initialize_fan_speeds() + @callback + def _async_update_attrs(self) -> None: + """Update climate attributes.""" + super()._async_update_attrs() + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) + def _initialize_fan_speeds(self) -> None: """Initialize fan speeds.""" azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS) From 535b47789fc523ec03101b386ed800e7552dd240 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:33:45 +0100 Subject: [PATCH 0216/1198] Improve BMWDataUpdateCoordinator typing (#132087) Co-authored-by: rikroe Co-authored-by: Joost Lekkerkerker --- .../bmw_connected_drive/__init__.py | 17 ++-------- .../bmw_connected_drive/binary_sensor.py | 2 +- .../components/bmw_connected_drive/button.py | 2 +- .../bmw_connected_drive/coordinator.py | 32 +++++++++++-------- .../bmw_connected_drive/device_tracker.py | 2 +- .../bmw_connected_drive/diagnostics.py | 4 +-- .../components/bmw_connected_drive/lock.py | 2 +- .../components/bmw_connected_drive/notify.py | 2 +- .../components/bmw_connected_drive/number.py | 2 +- .../components/bmw_connected_drive/select.py | 2 +- .../components/bmw_connected_drive/sensor.py | 2 +- .../components/bmw_connected_drive/switch.py | 2 +- .../bmw_connected_drive/test_coordinator.py | 8 ++--- 13 files changed, 37 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 9e43cfc4187..5ec678b9c95 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from dataclasses import dataclass import logging import voluptuous as vol @@ -18,7 +17,7 @@ from homeassistant.helpers import ( import homeassistant.helpers.config_validation as cv from .const import ATTR_VIN, CONF_READ_ONLY, DOMAIN -from .coordinator import BMWDataUpdateCoordinator +from .coordinator import BMWConfigEntry, BMWDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -49,16 +48,6 @@ PLATFORMS = [ SERVICE_UPDATE_STATE = "update_state" -type BMWConfigEntry = ConfigEntry[BMWData] - - -@dataclass -class BMWData: - """Class to store BMW runtime data.""" - - coordinator: BMWDataUpdateCoordinator - - @callback def _async_migrate_options_from_data_if_missing( hass: HomeAssistant, entry: ConfigEntry @@ -137,11 +126,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Set up one data coordinator per account/config entry coordinator = BMWDataUpdateCoordinator( hass, - entry=entry, + config_entry=entry, ) await coordinator.async_config_entry_first_refresh() - entry.runtime_data = BMWData(coordinator) + entry.runtime_data = coordinator # Set up all platforms except notify await hass.config_entries.async_forward_entry_setups( diff --git a/homeassistant/components/bmw_connected_drive/binary_sensor.py b/homeassistant/components/bmw_connected_drive/binary_sensor.py index 285ac98fc8f..5a58c707d6a 100644 --- a/homeassistant/components/bmw_connected_drive/binary_sensor.py +++ b/homeassistant/components/bmw_connected_drive/binary_sensor.py @@ -203,7 +203,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BMW binary sensors from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities = [ BMWBinarySensor(coordinator, vehicle, description, hass.config.units) diff --git a/homeassistant/components/bmw_connected_drive/button.py b/homeassistant/components/bmw_connected_drive/button.py index 85747278cb1..1b3043a2dcb 100644 --- a/homeassistant/components/bmw_connected_drive/button.py +++ b/homeassistant/components/bmw_connected_drive/button.py @@ -73,7 +73,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BMW buttons from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWButton] = [] diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 4f560d16f9c..3828a827e68 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -27,34 +27,40 @@ from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_I _LOGGER = logging.getLogger(__name__) +type BMWConfigEntry = ConfigEntry[BMWDataUpdateCoordinator] + + class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): """Class to manage fetching BMW data.""" account: MyBMWAccount + config_entry: BMWConfigEntry - def __init__(self, hass: HomeAssistant, *, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, *, config_entry: ConfigEntry) -> None: """Initialize account-wide BMW data updater.""" self.account = MyBMWAccount( - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - get_region_from_name(entry.data[CONF_REGION]), + config_entry.data[CONF_USERNAME], + config_entry.data[CONF_PASSWORD], + get_region_from_name(config_entry.data[CONF_REGION]), observer_position=GPSPosition(hass.config.latitude, hass.config.longitude), verify=get_default_context(), ) - self.read_only = entry.options[CONF_READ_ONLY] - self._entry = entry + self.read_only: bool = config_entry.options[CONF_READ_ONLY] - if CONF_REFRESH_TOKEN in entry.data: + if CONF_REFRESH_TOKEN in config_entry.data: self.account.set_refresh_token( - refresh_token=entry.data[CONF_REFRESH_TOKEN], - gcid=entry.data.get(CONF_GCID), + refresh_token=config_entry.data[CONF_REFRESH_TOKEN], + gcid=config_entry.data.get(CONF_GCID), ) super().__init__( hass, _LOGGER, - name=f"{DOMAIN}-{entry.data['username']}", - update_interval=timedelta(seconds=SCAN_INTERVALS[entry.data[CONF_REGION]]), + config_entry=config_entry, + name=f"{DOMAIN}-{config_entry.data[CONF_USERNAME]}", + update_interval=timedelta( + seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]] + ), ) # Default to false on init so _async_update_data logic works @@ -88,9 +94,9 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): def _update_config_entry_refresh_token(self, refresh_token: str | None) -> None: """Update or delete the refresh_token in the Config Entry.""" data = { - **self._entry.data, + **self.config_entry.data, CONF_REFRESH_TOKEN: refresh_token, } if not refresh_token: data.pop(CONF_REFRESH_TOKEN) - self.hass.config_entries.async_update_entry(self._entry, data=data) + self.hass.config_entries.async_update_entry(self.config_entry, data=data) diff --git a/homeassistant/components/bmw_connected_drive/device_tracker.py b/homeassistant/components/bmw_connected_drive/device_tracker.py index b65c2c1b088..f53cd72d5de 100644 --- a/homeassistant/components/bmw_connected_drive/device_tracker.py +++ b/homeassistant/components/bmw_connected_drive/device_tracker.py @@ -27,7 +27,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW tracker from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWDeviceTracker] = [] for vehicle in coordinator.account.vehicles: diff --git a/homeassistant/components/bmw_connected_drive/diagnostics.py b/homeassistant/components/bmw_connected_drive/diagnostics.py index 3950ea3dec2..3f357c3ae79 100644 --- a/homeassistant/components/bmw_connected_drive/diagnostics.py +++ b/homeassistant/components/bmw_connected_drive/diagnostics.py @@ -51,7 +51,7 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: BMWConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data coordinator.account.config.log_responses = True await coordinator.account.get_vehicles(force_init=True) @@ -77,7 +77,7 @@ async def async_get_device_diagnostics( hass: HomeAssistant, config_entry: BMWConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data coordinator.account.config.log_responses = True await coordinator.account.get_vehicles(force_init=True) diff --git a/homeassistant/components/bmw_connected_drive/lock.py b/homeassistant/components/bmw_connected_drive/lock.py index b715a1e38cc..4aa0b411895 100644 --- a/homeassistant/components/bmw_connected_drive/lock.py +++ b/homeassistant/components/bmw_connected_drive/lock.py @@ -31,7 +31,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW lock from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data if not coordinator.read_only: async_add_entities( diff --git a/homeassistant/components/bmw_connected_drive/notify.py b/homeassistant/components/bmw_connected_drive/notify.py index 662a73a20cd..04b9fa594e4 100644 --- a/homeassistant/components/bmw_connected_drive/notify.py +++ b/homeassistant/components/bmw_connected_drive/notify.py @@ -53,7 +53,7 @@ def get_service( targets = {} if ( config_entry - and (coordinator := config_entry.runtime_data.coordinator) + and (coordinator := config_entry.runtime_data) and not coordinator.read_only ): targets.update({v.name: v for v in coordinator.account.vehicles}) diff --git a/homeassistant/components/bmw_connected_drive/number.py b/homeassistant/components/bmw_connected_drive/number.py index cce71b3b2fd..7181bad76e0 100644 --- a/homeassistant/components/bmw_connected_drive/number.py +++ b/homeassistant/components/bmw_connected_drive/number.py @@ -61,7 +61,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW number from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWNumber] = [] diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 7bc91b098ae..7091cbc6817 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -68,7 +68,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW lock from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWSelect] = [] diff --git a/homeassistant/components/bmw_connected_drive/sensor.py b/homeassistant/components/bmw_connected_drive/sensor.py index 555655511e8..b7be367d57d 100644 --- a/homeassistant/components/bmw_connected_drive/sensor.py +++ b/homeassistant/components/bmw_connected_drive/sensor.py @@ -193,7 +193,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW sensors from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities = [ BMWSensor(coordinator, vehicle, description) diff --git a/homeassistant/components/bmw_connected_drive/switch.py b/homeassistant/components/bmw_connected_drive/switch.py index f0214bc1262..826f6b840b2 100644 --- a/homeassistant/components/bmw_connected_drive/switch.py +++ b/homeassistant/components/bmw_connected_drive/switch.py @@ -69,7 +69,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW switch from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWSwitch] = [] diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index 774a85eb6da..beb3d74d572 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -33,7 +33,7 @@ async def test_update_success(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.runtime_data.coordinator.last_update_success is True + assert config_entry.runtime_data.last_update_success is True @pytest.mark.usefixtures("bmw_fixture") @@ -48,7 +48,7 @@ async def test_update_failed( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data assert coordinator.last_update_success is True @@ -77,7 +77,7 @@ async def test_update_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data assert coordinator.last_update_success is True @@ -146,7 +146,7 @@ async def test_captcha_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data assert coordinator.last_update_success is True From abd3466d197a860120679665315c9b8367230883 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Wed, 4 Dec 2024 00:35:50 +0100 Subject: [PATCH 0217/1198] Add powerfox integration (#131640) --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/powerfox/__init__.py | 55 +++ .../components/powerfox/config_flow.py | 57 +++ homeassistant/components/powerfox/const.py | 11 + .../components/powerfox/coordinator.py | 40 ++ homeassistant/components/powerfox/entity.py | 32 ++ .../components/powerfox/manifest.json | 16 + .../components/powerfox/quality_scale.yaml | 92 +++++ homeassistant/components/powerfox/sensor.py | 147 +++++++ .../components/powerfox/strings.json | 46 +++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + homeassistant/generated/zeroconf.py | 4 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/powerfox/__init__.py | 14 + tests/components/powerfox/conftest.py | 87 +++++ .../powerfox/snapshots/test_sensor.ambr | 358 ++++++++++++++++++ tests/components/powerfox/test_config_flow.py | 145 +++++++ tests/components/powerfox/test_init.py | 45 +++ tests/components/powerfox/test_sensor.py | 53 +++ 23 files changed, 1228 insertions(+) create mode 100644 homeassistant/components/powerfox/__init__.py create mode 100644 homeassistant/components/powerfox/config_flow.py create mode 100644 homeassistant/components/powerfox/const.py create mode 100644 homeassistant/components/powerfox/coordinator.py create mode 100644 homeassistant/components/powerfox/entity.py create mode 100644 homeassistant/components/powerfox/manifest.json create mode 100644 homeassistant/components/powerfox/quality_scale.yaml create mode 100644 homeassistant/components/powerfox/sensor.py create mode 100644 homeassistant/components/powerfox/strings.json create mode 100644 tests/components/powerfox/__init__.py create mode 100644 tests/components/powerfox/conftest.py create mode 100644 tests/components/powerfox/snapshots/test_sensor.ambr create mode 100644 tests/components/powerfox/test_config_flow.py create mode 100644 tests/components/powerfox/test_init.py create mode 100644 tests/components/powerfox/test_sensor.py diff --git a/.strict-typing b/.strict-typing index ed698c26ea0..42f35b52153 100644 --- a/.strict-typing +++ b/.strict-typing @@ -365,6 +365,7 @@ homeassistant.components.persistent_notification.* homeassistant.components.pi_hole.* homeassistant.components.ping.* homeassistant.components.plugwise.* +homeassistant.components.powerfox.* homeassistant.components.powerwall.* homeassistant.components.private_ble_device.* homeassistant.components.prometheus.* diff --git a/CODEOWNERS b/CODEOWNERS index 7755c3eb4ae..916ff63e696 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1133,6 +1133,8 @@ build.json @home-assistant/supervisor /tests/components/point/ @fredrike /homeassistant/components/poolsense/ @haemishkyd /tests/components/poolsense/ @haemishkyd +/homeassistant/components/powerfox/ @klaasnicolaas +/tests/components/powerfox/ @klaasnicolaas /homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson /tests/components/powerwall/ @bdraco @jrester @daniel-simpson /homeassistant/components/private_ble_device/ @Jc2k diff --git a/homeassistant/components/powerfox/__init__.py b/homeassistant/components/powerfox/__init__.py new file mode 100644 index 00000000000..243f3aacc4f --- /dev/null +++ b/homeassistant/components/powerfox/__init__.py @@ -0,0 +1,55 @@ +"""The Powerfox integration.""" + +from __future__ import annotations + +import asyncio + +from powerfox import Powerfox, PowerfoxConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import PowerfoxDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type PowerfoxConfigEntry = ConfigEntry[list[PowerfoxDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: PowerfoxConfigEntry) -> bool: + """Set up Powerfox from a config entry.""" + client = Powerfox( + username=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + session=async_get_clientsession(hass), + ) + + try: + devices = await client.all_devices() + except PowerfoxConnectionError as err: + await client.close() + raise ConfigEntryNotReady from err + + coordinators: list[PowerfoxDataUpdateCoordinator] = [ + PowerfoxDataUpdateCoordinator(hass, client, device) for device in devices + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + entry.runtime_data = coordinators + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PowerfoxConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/powerfox/config_flow.py b/homeassistant/components/powerfox/config_flow.py new file mode 100644 index 00000000000..b4eddeb6fce --- /dev/null +++ b/homeassistant/components/powerfox/config_flow.py @@ -0,0 +1,57 @@ +"""Config flow for Powerfox integration.""" + +from __future__ import annotations + +from typing import Any + +from powerfox import Powerfox, PowerfoxAuthenticationError, PowerfoxConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Powerfox.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + client = Powerfox( + username=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_EMAIL: user_input[CONF_EMAIL], + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=STEP_USER_DATA_SCHEMA, + ) diff --git a/homeassistant/components/powerfox/const.py b/homeassistant/components/powerfox/const.py new file mode 100644 index 00000000000..24f1310f970 --- /dev/null +++ b/homeassistant/components/powerfox/const.py @@ -0,0 +1,11 @@ +"""Constants for the Powerfox integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "powerfox" +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(minutes=5) diff --git a/homeassistant/components/powerfox/coordinator.py b/homeassistant/components/powerfox/coordinator.py new file mode 100644 index 00000000000..6fd9b2af189 --- /dev/null +++ b/homeassistant/components/powerfox/coordinator.py @@ -0,0 +1,40 @@ +"""Coordinator for Powerfox integration.""" + +from __future__ import annotations + +from powerfox import Device, Powerfox, PowerfoxConnectionError, Poweropti + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +class PowerfoxDataUpdateCoordinator(DataUpdateCoordinator[Poweropti]): + """Class to manage fetching Powerfox data from the API.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + client: Powerfox, + device: Device, + ) -> None: + """Initialize global Powerfox data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + self.device = device + + async def _async_update_data(self) -> Poweropti: + """Fetch data from Powerfox API.""" + try: + return await self.client.device(device_id=self.device.id) + except PowerfoxConnectionError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/powerfox/entity.py b/homeassistant/components/powerfox/entity.py new file mode 100644 index 00000000000..0ab7200ffe8 --- /dev/null +++ b/homeassistant/components/powerfox/entity.py @@ -0,0 +1,32 @@ +"""Generic entity for Powerfox.""" + +from __future__ import annotations + +from powerfox import Device + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PowerfoxDataUpdateCoordinator + + +class PowerfoxEntity(CoordinatorEntity[PowerfoxDataUpdateCoordinator]): + """Base entity for Powerfox.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PowerfoxDataUpdateCoordinator, + device: Device, + ) -> None: + """Initialize Powerfox entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.id)}, + manufacturer="Powerfox", + model=device.type.human_readable, + name=device.name, + serial_number=device.id, + ) diff --git a/homeassistant/components/powerfox/manifest.json b/homeassistant/components/powerfox/manifest.json new file mode 100644 index 00000000000..a7285bb213f --- /dev/null +++ b/homeassistant/components/powerfox/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "powerfox", + "name": "Powerfox", + "codeowners": ["@klaasnicolaas"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/powerfox", + "iot_class": "cloud_polling", + "quality_scale": "bronze", + "requirements": ["powerfox==1.0.0"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "powerfox*" + } + ] +} diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml new file mode 100644 index 00000000000..5b1fa9e6398 --- /dev/null +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration uses a coordinator to handle updates. + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration is connecting to a cloud service. + discovery: + status: exempt + comment: | + It can find poweropti devices via zeroconf, but will start a normal user flow. + docs-data-update: done + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that should disabled by default. + entity-translations: done + exception-translations: done + icon-translations: + status: exempt + comment: | + There is no need for icon translations. + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/powerfox/sensor.py b/homeassistant/components/powerfox/sensor.py new file mode 100644 index 00000000000..af6f0301b0c --- /dev/null +++ b/homeassistant/components/powerfox/sensor.py @@ -0,0 +1,147 @@ +"""Sensors for Powerfox integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Generic, TypeVar + +from powerfox import Device, PowerMeter, WaterMeter + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfVolume +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PowerfoxConfigEntry +from .coordinator import PowerfoxDataUpdateCoordinator +from .entity import PowerfoxEntity + +T = TypeVar("T", PowerMeter, WaterMeter) + + +@dataclass(frozen=True, kw_only=True) +class PowerfoxSensorEntityDescription(Generic[T], SensorEntityDescription): + """Describes Poweropti sensor entity.""" + + value_fn: Callable[[T], float | int | None] + + +SENSORS_POWER: tuple[PowerfoxSensorEntityDescription[PowerMeter], ...] = ( + PowerfoxSensorEntityDescription[PowerMeter]( + key="power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda meter: meter.power, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage", + translation_key="energy_usage", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage_low_tariff", + translation_key="energy_usage_low_tariff", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage_low_tariff, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage_high_tariff", + translation_key="energy_usage_high_tariff", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage_high_tariff, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_return", + translation_key="energy_return", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_return, + ), +) + + +SENSORS_WATER: tuple[PowerfoxSensorEntityDescription[WaterMeter], ...] = ( + PowerfoxSensorEntityDescription[WaterMeter]( + key="cold_water", + translation_key="cold_water", + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.cold_water, + ), + PowerfoxSensorEntityDescription[WaterMeter]( + key="warm_water", + translation_key="warm_water", + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.warm_water, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PowerfoxConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Powerfox sensors based on a config entry.""" + entities: list[SensorEntity] = [] + for coordinator in entry.runtime_data: + if isinstance(coordinator.data, PowerMeter): + entities.extend( + PowerfoxSensorEntity( + coordinator=coordinator, + description=description, + device=coordinator.device, + ) + for description in SENSORS_POWER + if description.value_fn(coordinator.data) is not None + ) + if isinstance(coordinator.data, WaterMeter): + entities.extend( + PowerfoxSensorEntity( + coordinator=coordinator, + description=description, + device=coordinator.device, + ) + for description in SENSORS_WATER + ) + async_add_entities(entities) + + +class PowerfoxSensorEntity(PowerfoxEntity, SensorEntity): + """Defines a powerfox power meter sensor.""" + + entity_description: PowerfoxSensorEntityDescription + + def __init__( + self, + coordinator: PowerfoxDataUpdateCoordinator, + device: Device, + description: PowerfoxSensorEntityDescription, + ) -> None: + """Initialize Powerfox power meter sensor.""" + super().__init__(coordinator, device) + self.entity_description = description + self._attr_unique_id = f"{device.id}_{description.key}" + + @property + def native_value(self) -> float | int | None: + """Return the state of the entity.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/powerfox/strings.json b/homeassistant/components/powerfox/strings.json new file mode 100644 index 00000000000..451100f3b42 --- /dev/null +++ b/homeassistant/components/powerfox/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "step": { + "user": { + "description": "Connect to your Powerfox account to get information about your energy, heat or water consumption.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "The email address of your Powerfox account.", + "password": "The password of your Powerfox account." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + }, + "entity": { + "sensor": { + "energy_usage": { + "name": "Energy usage" + }, + "energy_usage_low_tariff": { + "name": "Energy usage low tariff" + }, + "energy_usage_high_tariff": { + "name": "Energy usage high tariff" + }, + "energy_return": { + "name": "Energy return" + }, + "cold_water": { + "name": "Cold water" + }, + "warm_water": { + "name": "Warm water" + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 9a75ac32ea1..5cd9dd786fe 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -461,6 +461,7 @@ FLOWS = { "plum_lightpad", "point", "poolsense", + "powerfox", "powerwall", "private_ble_device", "profiler", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index ae7e0dd6c59..d2f0a90065a 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4763,6 +4763,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "powerfox": { + "name": "Powerfox", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "private_ble_device": { "name": "Private BLE Device", "integration_type": "hub", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 5f7161a8245..9bfff93cc2f 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -542,6 +542,10 @@ ZEROCONF = { "manufacturer": "nettigo", }, }, + { + "domain": "powerfox", + "name": "powerfox*", + }, { "domain": "pure_energie", "name": "smartbridge*", diff --git a/mypy.ini b/mypy.ini index 22e85244843..8e675ff6481 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3406,6 +3406,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.powerfox.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.powerwall.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 18f7bc2fb20..0df4ba65c86 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1633,6 +1633,9 @@ pmsensor==0.4 # homeassistant.components.poolsense poolsense==0.0.8 +# homeassistant.components.powerfox +powerfox==1.0.0 + # homeassistant.components.reddit praw==7.5.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ebfc47c764d..ab8d4663a86 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1340,6 +1340,9 @@ plumlightpad==0.0.11 # homeassistant.components.poolsense poolsense==0.0.8 +# homeassistant.components.powerfox +powerfox==1.0.0 + # homeassistant.components.reddit praw==7.5.0 diff --git a/tests/components/powerfox/__init__.py b/tests/components/powerfox/__init__.py new file mode 100644 index 00000000000..d24e52eba9b --- /dev/null +++ b/tests/components/powerfox/__init__.py @@ -0,0 +1,14 @@ +"""Tests for the Powerfox integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +MOCK_DIRECT_HOST = "1.1.1.1" + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/powerfox/conftest.py b/tests/components/powerfox/conftest.py new file mode 100644 index 00000000000..14ccc5996e5 --- /dev/null +++ b/tests/components/powerfox/conftest.py @@ -0,0 +1,87 @@ +"""Common fixtures for the Powerfox tests.""" + +from collections.abc import Generator +from datetime import UTC, datetime +from unittest.mock import AsyncMock, patch + +from powerfox import Device, DeviceType, PowerMeter, WaterMeter +import pytest + +from homeassistant.components.powerfox.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.powerfox.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_powerfox_client() -> Generator[AsyncMock]: + """Mock a Powerfox client.""" + with ( + patch( + "homeassistant.components.powerfox.Powerfox", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.powerfox.config_flow.Powerfox", + new=mock_client, + ), + ): + client = mock_client.return_value + client.all_devices.return_value = [ + Device( + id="9x9x1f12xx3x", + date_added=datetime(2024, 11, 26, 9, 22, 35, tzinfo=UTC), + main_device=True, + bidirectional=True, + type=DeviceType.POWER_METER, + name="Poweropti", + ), + Device( + id="9x9x1f12xx4x", + date_added=datetime(2024, 11, 26, 9, 22, 35, tzinfo=UTC), + main_device=False, + bidirectional=False, + type=DeviceType.COLD_WATER_METER, + name="Wateropti", + ), + ] + client.device.side_effect = [ + PowerMeter( + outdated=False, + timestamp=datetime(2024, 11, 26, 10, 48, 51, tzinfo=UTC), + power=111, + energy_usage=1111.111, + energy_return=111.111, + energy_usage_high_tariff=111.111, + energy_usage_low_tariff=111.111, + ), + WaterMeter( + outdated=False, + timestamp=datetime(2024, 11, 26, 10, 48, 51, tzinfo=UTC), + cold_water=1111.111, + warm_water=0.0, + ), + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a Powerfox config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Powerfox", + data={ + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + }, + ) diff --git a/tests/components/powerfox/snapshots/test_sensor.ambr b/tests/components/powerfox/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..dda162d4eeb --- /dev/null +++ b/tests/components/powerfox/snapshots/test_sensor.ambr @@ -0,0 +1,358 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.poweropti_energy_return-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_return', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy return', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_return', + 'unique_id': '9x9x1f12xx3x_energy_return', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_return-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy return', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_return', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage', + 'unique_id': '9x9x1f12xx3x_energy_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_high_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage_high_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage high tariff', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage_high_tariff', + 'unique_id': '9x9x1f12xx3x_energy_usage_high_tariff', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_high_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage high tariff', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage_high_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_low_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage_low_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage low tariff', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage_low_tariff', + 'unique_id': '9x9x1f12xx3x_energy_usage_low_tariff', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_low_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage low tariff', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage_low_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '9x9x1f12xx3x_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Poweropti Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_all_sensors[sensor.wateropti_cold_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wateropti_cold_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cold water', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cold_water', + 'unique_id': '9x9x1f12xx4x_cold_water', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.wateropti_cold_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Wateropti Cold water', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wateropti_cold_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1111.111', + }) +# --- +# name: test_all_sensors[sensor.wateropti_warm_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wateropti_warm_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warm water', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warm_water', + 'unique_id': '9x9x1f12xx4x_warm_water', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.wateropti_warm_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Wateropti Warm water', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wateropti_warm_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/powerfox/test_config_flow.py b/tests/components/powerfox/test_config_flow.py new file mode 100644 index 00000000000..b99470880a0 --- /dev/null +++ b/tests/components/powerfox/test_config_flow.py @@ -0,0 +1,145 @@ +"""Test the Powerfox config flow.""" + +from unittest.mock import AsyncMock + +from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError +import pytest + +from homeassistant.components import zeroconf +from homeassistant.components.powerfox.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import MOCK_DIRECT_HOST + +from tests.common import MockConfigEntry + +MOCK_ZEROCONF_DISCOVERY_INFO = zeroconf.ZeroconfServiceInfo( + ip_address=MOCK_DIRECT_HOST, + ip_addresses=[MOCK_DIRECT_HOST], + hostname="powerfox.local", + name="Powerfox", + port=443, + type="_http._tcp", + properties={}, +) + + +async def test_full_user_flow( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@powerfox.test" + assert result.get("data") == { + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + } + assert len(mock_powerfox_client.all_devices.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_zeroconf_discovery( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test zeroconf discovery.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCK_ZEROCONF_DISCOVERY_INFO, + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@powerfox.test" + assert result.get("data") == { + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + } + assert len(mock_powerfox_client.all_devices.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_powerfox_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions during config flow.""" + mock_powerfox_client.all_devices.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/powerfox/test_init.py b/tests/components/powerfox/test_init.py new file mode 100644 index 00000000000..900c7b60ae0 --- /dev/null +++ b/tests/components/powerfox/test_init.py @@ -0,0 +1,45 @@ +"""Test the Powerfox init module.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from powerfox import PowerfoxConnectionError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Powerfox configuration entry not ready.""" + mock_powerfox_client.all_devices.side_effect = PowerfoxConnectionError + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/powerfox/test_sensor.py b/tests/components/powerfox/test_sensor.py new file mode 100644 index 00000000000..547d8de202c --- /dev/null +++ b/tests/components/powerfox/test_sensor.py @@ -0,0 +1,53 @@ +"""Test the sensors provided by the Powerfox integration.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from powerfox import PowerfoxConnectionError +from syrupy import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Powerfox sensors.""" + with patch("homeassistant.components.powerfox.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_failed( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities become unavailable after failed update.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("sensor.poweropti_energy_usage").state is not None + + mock_powerfox_client.device.side_effect = PowerfoxConnectionError + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.poweropti_energy_usage").state == STATE_UNAVAILABLE From 5b365fc0bd89df048ee1c8488c487eb04a66057f Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:39:53 +0100 Subject: [PATCH 0218/1198] Add missing data description for solarlog (#131712) --- homeassistant/components/solarlog/strings.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/homeassistant/components/solarlog/strings.json b/homeassistant/components/solarlog/strings.json index bbd9b509ecf..bf87b0b0938 100644 --- a/homeassistant/components/solarlog/strings.json +++ b/homeassistant/components/solarlog/strings.json @@ -26,6 +26,10 @@ "data": { "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "has_password": "[%key:component::solarlog::config::step::user::data_description::has_password%]", + "password": "[%key:component::solarlog::config::step::password::data_description::password%]" } }, "reconfigure": { @@ -33,6 +37,10 @@ "data": { "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "has_password": "[%key:component::solarlog::config::step::user::data_description::has_password%]", + "password": "[%key:component::solarlog::config::step::password::data_description::password%]" } } }, From c484568e75952d549d713795e6f46231f0e9e1a1 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:40:41 +0100 Subject: [PATCH 0219/1198] Bump pynecil to v2.0.2 (#132221) --- homeassistant/components/iron_os/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 3141273e3f0..d85b8bf4707 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==1.0.1", "aiogithubapi==24.6.0"] + "requirements": ["pynecil==2.0.2", "aiogithubapi==24.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0df4ba65c86..2b441c85d57 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2093,7 +2093,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==1.0.1 +pynecil==2.0.2 # homeassistant.components.netgear pynetgear==0.10.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ab8d4663a86..9ff648d250f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1692,7 +1692,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==1.0.1 +pynecil==2.0.2 # homeassistant.components.netgear pynetgear==0.10.10 From 9a17389cd005fa21ee8ec364d20a51e5fa7c9954 Mon Sep 17 00:00:00 2001 From: Tom Date: Wed, 4 Dec 2024 00:42:53 +0100 Subject: [PATCH 0220/1198] Plugwise quality docs benchmark data update and removal (#132082) --- homeassistant/components/plugwise/quality_scale.yaml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index 0881e79c1c0..58a20046c5b 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -31,9 +31,7 @@ rules: docs-installation-instructions: status: todo comment: Docs PR 36087 - docs-removal-instructions: - status: todo - comment: Docs PR 36055 (done, but mark todo for benchmark) + docs-removal-instructions: done docs-actions: done brands: done ## Silver @@ -91,9 +89,7 @@ rules: docs-supported-functions: status: todo comment: Check for completeness - docs-data-update: - status: todo - comment: Docs PR 36055 (done, but mark todo for benchmark) + docs-data-update: done docs-known-limitations: status: todo comment: Partial in 36087 but could be more elaborat From 2696405c63c39f4029423cc34b05c507413ec01a Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:59:36 +0100 Subject: [PATCH 0221/1198] Suez water add quality_scale.yaml (#131360) Co-authored-by: Joost Lekkerkerker --- .../components/suez_water/quality_scale.yaml | 88 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/suez_water/quality_scale.yaml diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml new file mode 100644 index 00000000000..0ca4c2e0f27 --- /dev/null +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -0,0 +1,88 @@ +rules: + # Bronze + config-flow: todo + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: todo + runtime-data: + status: todo + comment: coordinator is created during setup, should be stored in runtime_data + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: no subscription to api + dependency-transparency: done + action-setup: + status: exempt + comment: no service action + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + docs-actions: + status: exempt + comment: no service action + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: done + action-exceptions: + status: exempt + comment: no service action + reauthentication-flow: todo + parallel-updates: + status: exempt + comment: no service action and coordinator updates + test-coverage: done + integration-owner: done + docs-installation-parameters: + status: todo + comment: missing user/password + docs-configuration-parameters: + status: exempt + comment: no configuration option + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: todo + entity-disabled-by-default: todo + discovery: + status: exempt + comment: api only, nothing on local network to discover services + stale-devices: + status: exempt + comment: one device only + diagnostics: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: one device only + discovery-update-info: + status: exempt + comment: fixed api + repair-issues: + status: exempt + comment: No repair issues to be raised + docs-use-cases: done + docs-supported-devices: todo + docs-supported-functions: done + docs-data-update: + status: todo + comment: make it clearer + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 95b35f63e50..7ba2bbc3d25 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1003,7 +1003,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "stream", "streamlabswater", "subaru", - "suez_water", "sun", "sunweg", "supervisord", From c0303bc6520c1fb47d28a3b974fbda2ca9d8c183 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:59:57 +0100 Subject: [PATCH 0222/1198] Add quality scale for fyta (#131508) Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com> Co-authored-by: Joost Lekkerkerker --- homeassistant/components/fyta/manifest.json | 1 + .../components/fyta/quality_scale.yaml | 90 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 3 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/fyta/quality_scale.yaml diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index 15f007e5f4d..ea628f55c6c 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -8,5 +8,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["fyta_cli"], + "quality_scale": "platinum", "requirements": ["fyta_cli==0.7.0"] } diff --git a/homeassistant/components/fyta/quality_scale.yaml b/homeassistant/components/fyta/quality_scale.yaml new file mode 100644 index 00000000000..97f62f884e7 --- /dev/null +++ b/homeassistant/components/fyta/quality_scale.yaml @@ -0,0 +1,90 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: done + dependency-transparency: done + action-setup: + status: exempt + comment: No custom action. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No custom action. + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: done + parallel-updates: + status: exempt + comment: | + Coordinator and only sensor platform. + + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + No options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: No noisy entities. + discovery: + status: exempt + comment: bug in hassfest + stale-devices: done + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: No configuration besides credentials. + dynamic-devices: done + discovery-update-info: + status: exempt + comment: Fyta can be discovered but does not have a local connection. + repair-issues: + status: exempt + comment: | + No issues/repairs. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + No known issues that could be resolved by the user. + docs-examples: + status: exempt + comment: | + As only sensors are provided, no examples deemed necessary/appropriate. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 7ba2bbc3d25..d76fd6a0bc2 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -425,7 +425,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "fujitsu_fglair", "fujitsu_hvac", "futurenow", - "fyta", "garadget", "garages_amsterdam", "gardena_bluetooth", From 3b39c534793ca0e9b81c95c1c5cc296ec9e0da96 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Wed, 4 Dec 2024 00:08:58 +0000 Subject: [PATCH 0223/1198] Add quality scale for Mastodon (#131357) --- .../components/mastodon/quality_scale.yaml | 93 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/mastodon/quality_scale.yaml diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml new file mode 100644 index 00000000000..f287b9a0c1f --- /dev/null +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -0,0 +1,93 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: + status: todo + comment: | + Mastodon.py does not have CI build/publish. + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration do not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + Legacy Notify needs rewriting once Notify architecture stabilizes. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + There are no configuration options. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: todo + comment: | + Waiting to move to oAuth. + test-coverage: + status: todo + comment: | + Legacy Notify needs rewriting once Notify architecture stabilizes. + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + Web service does not support discovery. + discovery: + status: exempt + comment: | + Web service does not support discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single web service. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: todo + comment: | + Waiting to move to OAuth. + repair-issues: done + stale-devices: + status: exempt + comment: | + Web service does not go stale. + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index d76fd6a0bc2..2de90dda964 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -642,7 +642,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "manual_mqtt", "map", "marytts", - "mastodon", "matrix", "matter", "maxcube", From 3ef9b718073479939ca74f502d35b07449ad826c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 3 Dec 2024 16:18:34 -0800 Subject: [PATCH 0224/1198] Add quality_scale.yaml for Google Photos integration (#131329) Co-authored-by: Joost Lekkerkerker --- .../google_photos/quality_scale.yaml | 68 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/google_photos/quality_scale.yaml diff --git a/homeassistant/components/google_photos/quality_scale.yaml b/homeassistant/components/google_photos/quality_scale.yaml new file mode 100644 index 00000000000..ed313e13d6a --- /dev/null +++ b/homeassistant/components/google_photos/quality_scale.yaml @@ -0,0 +1,68 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: done + common-modules: done + has-entity-name: + status: exempt + comment: Integration does not have entities + action-setup: + status: todo + comment: | + The integration does action setup in `async_setup_entry` which needs to be + moved to `async_setup`. + appropriate-polling: done + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: done + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: done + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 2de90dda964..63ca8b0d213 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -457,7 +457,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "google_generative_ai_conversation", "google_mail", "google_maps", - "google_photos", "google_pubsub", "google_sheets", "google_tasks", From 7a9849771098e66a6ed33771f92adcb10d7a3b29 Mon Sep 17 00:00:00 2001 From: LG-ThinQ-Integration Date: Wed, 4 Dec 2024 09:46:36 +0900 Subject: [PATCH 0225/1198] Bump thinqconnect to 1.0.2 (#132131) Co-authored-by: yunseon.park --- homeassistant/components/lg_thinq/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index daab1353098..6dd60909c66 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lg_thinq", "iot_class": "cloud_push", "loggers": ["thinqconnect"], - "requirements": ["thinqconnect==1.0.1"] + "requirements": ["thinqconnect==1.0.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2b441c85d57..e57670d25ce 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2840,7 +2840,7 @@ thermopro-ble==0.10.0 thingspeak==1.0.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.1 +thinqconnect==1.0.2 # homeassistant.components.tikteck tikteck==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9ff648d250f..deb421d35cb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2265,7 +2265,7 @@ thermobeacon-ble==0.7.0 thermopro-ble==0.10.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.1 +thinqconnect==1.0.2 # homeassistant.components.tilt_ble tilt-ble==0.2.3 From 1fe2a928a2dada29194d7ccad8f12b0caff14e0d Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Wed, 4 Dec 2024 01:48:35 +0100 Subject: [PATCH 0226/1198] Add reauthentication flow for Powerfox integration (#132225) * Add reauthentication flow for Powerfox integration * Update quality scale --- .../components/powerfox/config_flow.py | 47 +++++++++++- .../components/powerfox/coordinator.py | 15 +++- .../components/powerfox/quality_scale.yaml | 2 +- .../components/powerfox/strings.json | 13 +++- tests/components/powerfox/test_config_flow.py | 75 ++++++++++++++++++- tests/components/powerfox/test_init.py | 19 ++++- 6 files changed, 163 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/powerfox/config_flow.py b/homeassistant/components/powerfox/config_flow.py index b4eddeb6fce..ca78b8eb874 100644 --- a/homeassistant/components/powerfox/config_flow.py +++ b/homeassistant/components/powerfox/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from powerfox import Powerfox, PowerfoxAuthenticationError, PowerfoxConnectionError @@ -20,6 +21,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema( } ) +STEP_REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Powerfox.""" @@ -28,7 +35,8 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} + errors = {} + if user_input is not None: self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) client = Powerfox( @@ -55,3 +63,40 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, data_schema=STEP_USER_DATA_SCHEMA, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication flow for Powerfox.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication flow for Powerfox.""" + errors = {} + + reauth_entry = self._get_reauth_entry() + if user_input is not None: + client = Powerfox( + username=reauth_entry.data[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + data_schema=STEP_REAUTH_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/powerfox/coordinator.py b/homeassistant/components/powerfox/coordinator.py index 6fd9b2af189..f7ec5ab6716 100644 --- a/homeassistant/components/powerfox/coordinator.py +++ b/homeassistant/components/powerfox/coordinator.py @@ -2,10 +2,17 @@ from __future__ import annotations -from powerfox import Device, Powerfox, PowerfoxConnectionError, Poweropti +from powerfox import ( + Device, + Powerfox, + PowerfoxAuthenticationError, + PowerfoxConnectionError, + Poweropti, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, SCAN_INTERVAL @@ -36,5 +43,7 @@ class PowerfoxDataUpdateCoordinator(DataUpdateCoordinator[Poweropti]): """Fetch data from Powerfox API.""" try: return await self.client.device(device_id=self.device.id) - except PowerfoxConnectionError as error: - raise UpdateFailed(error) from error + except PowerfoxAuthenticationError as err: + raise ConfigEntryAuthFailed(err) from err + except PowerfoxConnectionError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml index 5b1fa9e6398..43172a2e84a 100644 --- a/homeassistant/components/powerfox/quality_scale.yaml +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -46,7 +46,7 @@ rules: status: exempt comment: | This integration uses a coordinator to handle updates. - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold diff --git a/homeassistant/components/powerfox/strings.json b/homeassistant/components/powerfox/strings.json index 451100f3b42..3eab77494d3 100644 --- a/homeassistant/components/powerfox/strings.json +++ b/homeassistant/components/powerfox/strings.json @@ -11,6 +11,16 @@ "email": "The email address of your Powerfox account.", "password": "The password of your Powerfox account." } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The password for {email} is no longer valid.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::powerfox::config::step::user::data_description::password%]" + } } }, "error": { @@ -18,7 +28,8 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/powerfox/test_config_flow.py b/tests/components/powerfox/test_config_flow.py index b99470880a0..759092aee6e 100644 --- a/tests/components/powerfox/test_config_flow.py +++ b/tests/components/powerfox/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Powerfox config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError import pytest @@ -136,6 +136,7 @@ async def test_exceptions( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {"base": error} + # Recover from error mock_powerfox_client.all_devices.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -143,3 +144,75 @@ async def test_exceptions( user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, ) assert result.get("type") is FlowResultType.CREATE_ENTRY + + +async def test_step_reauth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test re-authentication flow.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + with patch( + "homeassistant.components.powerfox.config_flow.Powerfox", + autospec=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_step_reauth_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions during re-authentication flow.""" + mock_powerfox_client.all_devices.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" diff --git a/tests/components/powerfox/test_init.py b/tests/components/powerfox/test_init.py index 900c7b60ae0..1ad60babc04 100644 --- a/tests/components/powerfox/test_init.py +++ b/tests/components/powerfox/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import AsyncMock -from powerfox import PowerfoxConnectionError +from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -43,3 +43,20 @@ async def test_config_entry_not_ready( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_entry_exception( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test ConfigEntryNotReady when API raises an exception during entry setup.""" + mock_config_entry.add_to_hass(hass) + mock_powerfox_client.device.side_effect = PowerfoxAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" From cb361845111a6fa5947cf52c5174d5d09c67fa5a Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Wed, 4 Dec 2024 06:03:31 +0100 Subject: [PATCH 0227/1198] fix: unifiprotect prevent RTSP repair for third-party cameras (#132212) Co-authored-by: J. Nick Koston --- .../components/unifiprotect/camera.py | 2 +- tests/components/unifiprotect/test_repairs.py | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/unifiprotect/camera.py b/homeassistant/components/unifiprotect/camera.py index a40939be917..0b1c03b8dd6 100644 --- a/homeassistant/components/unifiprotect/camera.py +++ b/homeassistant/components/unifiprotect/camera.py @@ -90,7 +90,7 @@ def _get_camera_channels( is_default = False # no RTSP enabled use first channel with no stream - if is_default: + if is_default and not camera.is_third_party_camera: _create_rtsp_repair(hass, entry, data, camera) yield camera, camera.channels[0], True else: diff --git a/tests/components/unifiprotect/test_repairs.py b/tests/components/unifiprotect/test_repairs.py index adb9555e6ea..1117038bbd0 100644 --- a/tests/components/unifiprotect/test_repairs.py +++ b/tests/components/unifiprotect/test_repairs.py @@ -363,3 +363,30 @@ async def test_rtsp_writable_fix_when_not_setup( ufp.api.update_device.assert_called_with( ModelType.CAMERA, doorbell.id, {"channels": channels} ) + + +async def test_rtsp_no_fix_if_third_party( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test no RTSP disabled warning if camera is third-party.""" + + for channel in doorbell.channels: + channel.is_rtsp_enabled = False + for user in ufp.api.bootstrap.users.values(): + user.all_permissions = [] + + ufp.api.get_camera = AsyncMock(return_value=doorbell) + doorbell.is_third_party_camera = True + + await init_entry(hass, ufp, [doorbell]) + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + assert not msg["result"]["issues"] From ce11ac5ecd17144737322056b9e8a5382411c127 Mon Sep 17 00:00:00 2001 From: Jeff Terrace Date: Wed, 4 Dec 2024 01:34:00 -0500 Subject: [PATCH 0228/1198] Bump onvif-zeep-async to 3.1.13 (#132229) Bump onvif-zeep-async to 3.1.13. --- homeassistant/components/onvif/manifest.json | 2 +- pyproject.toml | 2 -- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/onvif/manifest.json b/homeassistant/components/onvif/manifest.json index d03073dcfd3..02ef16b6787 100644 --- a/homeassistant/components/onvif/manifest.json +++ b/homeassistant/components/onvif/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/onvif", "iot_class": "local_push", "loggers": ["onvif", "wsdiscovery", "zeep"], - "requirements": ["onvif-zeep-async==3.1.12", "WSDiscovery==2.0.0"] + "requirements": ["onvif-zeep-async==3.1.13", "WSDiscovery==2.0.0"] } diff --git a/pyproject.toml b/pyproject.toml index 1cd7cb878d6..af910075b32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -527,8 +527,6 @@ filterwarnings = [ # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", - # https://github.com/hunterjm/python-onvif-zeep-async/pull/51 - >3.1.12 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client", # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", diff --git a/requirements_all.txt b/requirements_all.txt index e57670d25ce..d3db7d1ecf0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1524,7 +1524,7 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.1.12 +onvif-zeep-async==3.1.13 # homeassistant.components.opengarage open-garage==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index deb421d35cb..07fb2483eaa 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1269,7 +1269,7 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.1.12 +onvif-zeep-async==3.1.13 # homeassistant.components.opengarage open-garage==0.2.0 From 58d06ebc39d948d1ed235181ae6b8b758d04c88d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 4 Dec 2024 09:35:53 +0100 Subject: [PATCH 0229/1198] Bump yt-dlp to 2024.12.03 (#132220) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 866215839bf..f85f1561bb9 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.11.18"], + "requirements": ["yt-dlp[default]==2024.12.03"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index d3db7d1ecf0..23fab1e7574 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3069,7 +3069,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.18 +yt-dlp[default]==2024.12.03 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 07fb2483eaa..b0b75aa988f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2458,7 +2458,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.18 +yt-dlp[default]==2024.12.03 # homeassistant.components.zamg zamg==0.3.6 From ab1f03f392b0570aaae577d518fa6c41831e3ae6 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Wed, 4 Dec 2024 09:37:17 +0100 Subject: [PATCH 0230/1198] Add diagnostics to Powerfox integration (#132226) * Add diagnostics to Powerfox integration * Update quality scale list --- .../components/powerfox/diagnostics.py | 58 +++++++++++++++++++ .../components/powerfox/quality_scale.yaml | 2 +- .../powerfox/snapshots/test_diagnostics.ambr | 26 +++++++++ tests/components/powerfox/test_diagnostics.py | 30 ++++++++++ 4 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/powerfox/diagnostics.py create mode 100644 tests/components/powerfox/snapshots/test_diagnostics.ambr create mode 100644 tests/components/powerfox/test_diagnostics.py diff --git a/homeassistant/components/powerfox/diagnostics.py b/homeassistant/components/powerfox/diagnostics.py new file mode 100644 index 00000000000..8f6b847fca0 --- /dev/null +++ b/homeassistant/components/powerfox/diagnostics.py @@ -0,0 +1,58 @@ +"""Support for Powerfox diagnostics.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +from powerfox import PowerMeter, WaterMeter + +from homeassistant.core import HomeAssistant + +from . import PowerfoxConfigEntry, PowerfoxDataUpdateCoordinator + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PowerfoxConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Powerfox config entry.""" + powerfox_data: list[PowerfoxDataUpdateCoordinator] = entry.runtime_data + + return { + "devices": [ + { + **( + { + "power_meter": { + "outdated": coordinator.data.outdated, + "timestamp": datetime.strftime( + coordinator.data.timestamp, "%Y-%m-%d %H:%M:%S" + ), + "power": coordinator.data.power, + "energy_usage": coordinator.data.energy_usage, + "energy_return": coordinator.data.energy_return, + "energy_usage_high_tariff": coordinator.data.energy_usage_high_tariff, + "energy_usage_low_tariff": coordinator.data.energy_usage_low_tariff, + } + } + if isinstance(coordinator.data, PowerMeter) + else {} + ), + **( + { + "water_meter": { + "outdated": coordinator.data.outdated, + "timestamp": datetime.strftime( + coordinator.data.timestamp, "%Y-%m-%d %H:%M:%S" + ), + "cold_water": coordinator.data.cold_water, + "warm_water": coordinator.data.warm_water, + } + } + if isinstance(coordinator.data, WaterMeter) + else {} + ), + } + for coordinator in powerfox_data + ], + } diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml index 43172a2e84a..5a14264940f 100644 --- a/homeassistant/components/powerfox/quality_scale.yaml +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -51,7 +51,7 @@ rules: # Gold devices: done - diagnostics: todo + diagnostics: done discovery-update-info: status: exempt comment: | diff --git a/tests/components/powerfox/snapshots/test_diagnostics.ambr b/tests/components/powerfox/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..781e7b8c0d5 --- /dev/null +++ b/tests/components/powerfox/snapshots/test_diagnostics.ambr @@ -0,0 +1,26 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'devices': list([ + dict({ + 'power_meter': dict({ + 'energy_return': 111.111, + 'energy_usage': 1111.111, + 'energy_usage_high_tariff': 111.111, + 'energy_usage_low_tariff': 111.111, + 'outdated': False, + 'power': 111, + 'timestamp': '2024-11-26 10:48:51', + }), + }), + dict({ + 'water_meter': dict({ + 'cold_water': 1111.111, + 'outdated': False, + 'timestamp': '2024-11-26 10:48:51', + 'warm_water': 0.0, + }), + }), + ]), + }) +# --- diff --git a/tests/components/powerfox/test_diagnostics.py b/tests/components/powerfox/test_diagnostics.py new file mode 100644 index 00000000000..7dc2c3c7263 --- /dev/null +++ b/tests/components/powerfox/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test for PowerFox diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the PowerFox entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot From 6b7724c55634f067a9232bbbd1b476929d942015 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Wed, 4 Dec 2024 09:52:15 +0100 Subject: [PATCH 0231/1198] Track if intent was processed locally (#132166) --- .../components/assist_pipeline/pipeline.py | 8 +++++++- .../assist_pipeline/snapshots/test_init.ambr | 8 ++++++++ .../snapshots/test_websocket.ambr | 16 ++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 5bbc81adb86..9e9e84fb5d6 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -1018,6 +1018,7 @@ class PipelineRun: "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, + "prefer_local_intents": self.pipeline.prefer_local_intents, }, ) ) @@ -1031,6 +1032,7 @@ class PipelineRun: language=self.pipeline.language, agent_id=self.intent_agent, ) + processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT conversation_result: conversation.ConversationResult | None = None if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT: @@ -1061,6 +1063,7 @@ class PipelineRun: response=intent_response, conversation_id=user_input.conversation_id, ) + processed_locally = True if conversation_result is None: # Fall back to pipeline conversation agent @@ -1085,7 +1088,10 @@ class PipelineRun: self.process_event( PipelineEvent( PipelineEventType.INTENT_END, - {"intent_output": conversation_result.as_dict()}, + { + "processed_locally": processed_locally, + "intent_output": conversation_result.as_dict(), + }, ) ) diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index c70d3944f88..3b829e0e14a 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -37,6 +37,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -60,6 +61,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -126,6 +128,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -149,6 +152,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -215,6 +219,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -238,6 +243,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -328,6 +334,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -351,6 +358,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 566fb129959..41747a50eb6 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -36,6 +36,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline.4 @@ -58,6 +59,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline.5 @@ -117,6 +119,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_debug.4 @@ -139,6 +142,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_debug.5 @@ -210,6 +214,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_enhancements.4 @@ -232,6 +237,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_enhancements.5 @@ -313,6 +319,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.6 @@ -335,6 +342,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.7 @@ -519,6 +527,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_failed.2 @@ -541,6 +550,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_timeout.2 @@ -569,6 +579,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'never mind', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_pipeline_empty_tts_output.2 @@ -592,6 +603,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_pipeline_empty_tts_output.3 @@ -680,6 +692,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg0].2 @@ -702,6 +715,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg0].3 @@ -724,6 +738,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg1].2 @@ -746,6 +761,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg1].3 From cafd2092d4025261c2f7b7009d5d9431956264c6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:52:31 +0100 Subject: [PATCH 0232/1198] Use typed config entry in fyta (#132248) --- homeassistant/components/fyta/__init__.py | 6 ++++-- homeassistant/components/fyta/entity.py | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/fyta/__init__.py b/homeassistant/components/fyta/__init__.py index b29789be87e..1969ebfffe9 100644 --- a/homeassistant/components/fyta/__init__.py +++ b/homeassistant/components/fyta/__init__.py @@ -55,13 +55,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool: """Unload Fyta entity.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: FytaConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) diff --git a/homeassistant/components/fyta/entity.py b/homeassistant/components/fyta/entity.py index 18c52d74e25..4c078098ec1 100644 --- a/homeassistant/components/fyta/entity.py +++ b/homeassistant/components/fyta/entity.py @@ -3,10 +3,10 @@ from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import FytaConfigEntry from .const import DOMAIN from .coordinator import FytaCoordinator @@ -19,7 +19,7 @@ class FytaPlantEntity(CoordinatorEntity[FytaCoordinator]): def __init__( self, coordinator: FytaCoordinator, - entry: ConfigEntry, + entry: FytaConfigEntry, description: SensorEntityDescription, plant_id: int, ) -> None: From 5600ad0d82f0b16db8c424579710994a42763826 Mon Sep 17 00:00:00 2001 From: cnico Date: Wed, 4 Dec 2024 09:53:29 +0100 Subject: [PATCH 0233/1198] Fix blocking call in netdata (#132209) Co-authored-by: G Johansson --- homeassistant/components/netdata/manifest.json | 2 +- homeassistant/components/netdata/sensor.py | 5 ++++- requirements_all.txt | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/netdata/manifest.json b/homeassistant/components/netdata/manifest.json index 199073298ab..8901a271de2 100644 --- a/homeassistant/components/netdata/manifest.json +++ b/homeassistant/components/netdata/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["netdata"], "quality_scale": "legacy", - "requirements": ["netdata==1.1.0"] + "requirements": ["netdata==1.3.0"] } diff --git a/homeassistant/components/netdata/sensor.py b/homeassistant/components/netdata/sensor.py index b77a4392ef4..f33349c56ce 100644 --- a/homeassistant/components/netdata/sensor.py +++ b/homeassistant/components/netdata/sensor.py @@ -24,6 +24,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) @@ -70,7 +71,9 @@ async def async_setup_platform( port = config[CONF_PORT] resources = config[CONF_RESOURCES] - netdata = NetdataData(Netdata(host, port=port, timeout=20.0)) + netdata = NetdataData( + Netdata(host, port=port, timeout=20.0, httpx_client=get_async_client(hass)) + ) await netdata.async_update() if netdata.api.metrics is None: diff --git a/requirements_all.txt b/requirements_all.txt index 23fab1e7574..23b2d91fbfa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1433,7 +1433,7 @@ ndms2-client==0.1.2 nessclient==1.1.2 # homeassistant.components.netdata -netdata==1.1.0 +netdata==1.3.0 # homeassistant.components.nmap_tracker netmap==0.7.0.2 From 2ebc229d8d0058af67d1e1fd455e12f0f50c0af1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:54:29 +0100 Subject: [PATCH 0234/1198] Use typed config entry in mastodon (#132249) --- homeassistant/components/mastodon/__init__.py | 4 ++-- homeassistant/components/mastodon/config_flow.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index e8d23434248..f7f974ffbb0 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -81,7 +81,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> ) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: """Migrate old config.""" if entry.version == 1 and entry.minor_version == 1: @@ -113,7 +113,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def setup_mastodon(entry: ConfigEntry) -> tuple[Mastodon, dict, dict]: +def setup_mastodon(entry: MastodonConfigEntry) -> tuple[Mastodon, dict, dict]: """Get mastodon details.""" client = create_mastodon_client( entry.data[CONF_BASE_URL], diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 7c0985570f7..a36ba2e917f 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -8,7 +8,7 @@ from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError import voluptuous as vol from yarl import URL -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_CLIENT_ID, @@ -53,7 +53,6 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - config_entry: ConfigEntry def check_connection( self, From ea9301aa9ee66f074e045ea52a3a53e8094c8eb6 Mon Sep 17 00:00:00 2001 From: Christopher Masto Date: Wed, 4 Dec 2024 04:39:54 -0500 Subject: [PATCH 0235/1198] Fix Visual Studio Code tasks to use selected Python interpreter (#132219) --- .vscode/tasks.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 1f95c5eef8f..7425e7a2533 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -16,7 +16,7 @@ { "label": "Pytest", "type": "shell", - "command": "python3 -m pytest --timeout=10 tests", + "command": "${command:python.interpreterPath} -m pytest --timeout=10 tests", "dependsOn": ["Install all Test Requirements"], "group": { "kind": "test", @@ -31,7 +31,7 @@ { "label": "Pytest (changed tests only)", "type": "shell", - "command": "python3 -m pytest --timeout=10 --picked", + "command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked", "group": { "kind": "test", "isDefault": true @@ -89,7 +89,7 @@ "label": "Code Coverage", "detail": "Generate code coverage report for a given integration.", "type": "shell", - "command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto", + "command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto", "dependsOn": ["Compile English translations"], "group": { "kind": "test", @@ -105,7 +105,7 @@ "label": "Update syrupy snapshots", "detail": "Update syrupy snapshots for a given integration.", "type": "shell", - "command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update", + "command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update", "dependsOn": ["Compile English translations"], "group": { "kind": "test", @@ -163,7 +163,7 @@ "label": "Compile English translations", "detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.", "type": "shell", - "command": "python3 -m script.translations develop --all", + "command": "${command:python.interpreterPath} -m script.translations develop --all", "group": { "kind": "build", "isDefault": true @@ -173,7 +173,7 @@ "label": "Run scaffold", "detail": "Add new functionality to a integration using a scaffold.", "type": "shell", - "command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}", + "command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}", "group": { "kind": "build", "isDefault": true @@ -183,7 +183,7 @@ "label": "Create new integration", "detail": "Use the scaffold to create a new integration.", "type": "shell", - "command": "python3 -m script.scaffold integration", + "command": "${command:python.interpreterPath} -m script.scaffold integration", "group": { "kind": "build", "isDefault": true From 8c6d638354706653e2a1325a14ba12b27ba2440c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Dec 2024 10:43:44 +0100 Subject: [PATCH 0236/1198] Improve discovery rule in IQS validation (#132251) * Improve discovery rule in IQS validation * Adjust fyta/powerfox --- .../components/fyta/quality_scale.yaml | 4 +-- .../components/powerfox/quality_scale.yaml | 4 +-- .../quality_scale_validation/discovery.py | 31 +++++++++++++------ 3 files changed, 26 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/fyta/quality_scale.yaml b/homeassistant/components/fyta/quality_scale.yaml index 97f62f884e7..0fbacd0e12e 100644 --- a/homeassistant/components/fyta/quality_scale.yaml +++ b/homeassistant/components/fyta/quality_scale.yaml @@ -53,8 +53,8 @@ rules: status: exempt comment: No noisy entities. discovery: - status: exempt - comment: bug in hassfest + status: done + comment: DHCP stale-devices: done diagnostics: done exception-translations: done diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml index 5a14264940f..7e104b894ca 100644 --- a/homeassistant/components/powerfox/quality_scale.yaml +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -57,9 +57,9 @@ rules: comment: | This integration is connecting to a cloud service. discovery: - status: exempt + status: done comment: | - It can find poweropti devices via zeroconf, but will start a normal user flow. + It can find poweropti devices via zeroconf, and will start a normal user flow. docs-data-update: done docs-examples: todo docs-known-limitations: done diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py index a4f01ce0269..d24005b6373 100644 --- a/script/hassfest/quality_scale_validation/discovery.py +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -7,23 +7,32 @@ import ast from script.hassfest.model import Integration -DISCOVERY_FUNCTIONS = [ - "async_step_discovery", +MANIFEST_KEYS = [ + "bluetooth", + "dhcp", + "homekit", + "mqtt", + "ssdp", + "usb", + "zeroconf", +] +CONFIG_FLOW_STEPS = { "async_step_bluetooth", + "async_step_discovery", + "async_step_dhcp", "async_step_hassio", "async_step_homekit", "async_step_mqtt", "async_step_ssdp", - "async_step_zeroconf", - "async_step_dhcp", "async_step_usb", -] + "async_step_zeroconf", +} def _has_discovery_function(module: ast.Module) -> bool: """Test if the module defines at least one of the discovery functions.""" return any( - type(item) is ast.AsyncFunctionDef and item.name in DISCOVERY_FUNCTIONS + type(item) is ast.AsyncFunctionDef and item.name in CONFIG_FLOW_STEPS for item in ast.walk(module) ) @@ -35,11 +44,15 @@ def validate(integration: Integration) -> list[str] | None: if not config_flow_file.exists(): return ["Integration is missing config_flow.py"] - config_flow = ast.parse(config_flow_file.read_text()) + # Check manifest + if any(key in integration.manifest for key in MANIFEST_KEYS): + return None - if not _has_discovery_function(config_flow): + # Fallback => check config_flow step + config_flow = ast.parse(config_flow_file.read_text()) + if not (_has_discovery_function(config_flow)): return [ - f"Integration is missing one of {DISCOVERY_FUNCTIONS} " + f"Integration is missing one of {CONFIG_FLOW_STEPS} " f"in {config_flow_file}" ] From db266d80ec62ed5c6184a4e13f362a7b44fbccdc Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Dec 2024 10:45:47 +0100 Subject: [PATCH 0237/1198] Pass config entry to UpdateCoordinator in yale_smart_alarm (#132205) --- .../components/yale_smart_alarm/__init__.py | 6 +++--- .../yale_smart_alarm/alarm_control_panel.py | 4 ++-- .../components/yale_smart_alarm/binary_sensor.py | 4 +++- .../components/yale_smart_alarm/coordinator.py | 13 ++++++++----- homeassistant/components/yale_smart_alarm/entity.py | 8 ++++---- 5 files changed, 20 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index c543de89b84..b3fcc28ad49 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -27,17 +27,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool return True -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: YaleConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: """Migrate old entry.""" LOGGER.debug("Migrating from version %s", entry.version) diff --git a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py index 0f5b7d0b8e5..868b186be9d 100644 --- a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py +++ b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py @@ -47,7 +47,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): def __init__(self, coordinator: YaleDataUpdateCoordinator) -> None: """Initialize the Yale Alarm Device.""" super().__init__(coordinator) - self._attr_unique_id = coordinator.entry.entry_id + self._attr_unique_id = coordinator.config_entry.entry_id async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" @@ -84,7 +84,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): translation_domain=DOMAIN, translation_key="set_alarm", translation_placeholders={ - "name": self.coordinator.entry.data[CONF_NAME], + "name": self.coordinator.config_entry.data[CONF_NAME], "error": str(error), }, ) from error diff --git a/homeassistant/components/yale_smart_alarm/binary_sensor.py b/homeassistant/components/yale_smart_alarm/binary_sensor.py index 8e68b1f0cb4..17b6035321a 100644 --- a/homeassistant/components/yale_smart_alarm/binary_sensor.py +++ b/homeassistant/components/yale_smart_alarm/binary_sensor.py @@ -108,7 +108,9 @@ class YaleProblemSensor(YaleAlarmEntity, BinarySensorEntity): """Initiate Yale Problem Sensor.""" super().__init__(coordinator) self.entity_description = entity_description - self._attr_unique_id = f"{coordinator.entry.entry_id}-{entity_description.key}" + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}-{entity_description.key}" + ) @property def is_on(self) -> bool: diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 66bd71c9f1e..7ece2a3448b 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -9,12 +9,14 @@ from yalesmartalarmclient import YaleLock from yalesmartalarmclient.client import YaleSmartAlarmClient from yalesmartalarmclient.exceptions import AuthenticationError -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +if TYPE_CHECKING: + from . import YaleConfigEntry + from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS @@ -22,13 +24,14 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" yale: YaleSmartAlarmClient + config_entry: YaleConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, config_entry: YaleConfigEntry) -> None: """Initialize the Yale hub.""" - self.entry = entry super().__init__( hass, LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), always_update=False, @@ -40,8 +43,8 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): try: self.yale = await self.hass.async_add_executor_job( YaleSmartAlarmClient, - self.entry.data[CONF_USERNAME], - self.entry.data[CONF_PASSWORD], + self.config_entry.data[CONF_USERNAME], + self.config_entry.data[CONF_PASSWORD], ) self.locks = await self.hass.async_add_executor_job(self.yale.get_locks) except AuthenticationError as error: diff --git a/homeassistant/components/yale_smart_alarm/entity.py b/homeassistant/components/yale_smart_alarm/entity.py index e37dc3562f5..4020c93de4e 100644 --- a/homeassistant/components/yale_smart_alarm/entity.py +++ b/homeassistant/components/yale_smart_alarm/entity.py @@ -25,7 +25,7 @@ class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): manufacturer=MANUFACTURER, model=MODEL, identifiers={(DOMAIN, data["address"])}, - via_device=(DOMAIN, coordinator.entry.data[CONF_USERNAME]), + via_device=(DOMAIN, coordinator.config_entry.data[CONF_USERNAME]), ) @@ -43,7 +43,7 @@ class YaleLockEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): manufacturer=MANUFACTURER, model=MODEL, identifiers={(DOMAIN, lock.sid())}, - via_device=(DOMAIN, coordinator.entry.data[CONF_USERNAME]), + via_device=(DOMAIN, coordinator.config_entry.data[CONF_USERNAME]), ) self.lock_data = lock @@ -58,10 +58,10 @@ class YaleAlarmEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): super().__init__(coordinator) panel_info = coordinator.data["panel_info"] self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.entry.data[CONF_USERNAME])}, + identifiers={(DOMAIN, coordinator.config_entry.data[CONF_USERNAME])}, manufacturer=MANUFACTURER, model=MODEL, - name=coordinator.entry.data[CONF_NAME], + name=coordinator.config_entry.data[CONF_NAME], connections={(CONNECTION_NETWORK_MAC, panel_info["mac"])}, sw_version=panel_info["version"], ) From f0c07d68c5ec5ced8f4e4f98ee696cebf8498047 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Wed, 4 Dec 2024 11:17:39 +0100 Subject: [PATCH 0238/1198] Catch exceptions on entry setup for Autarco integration (#132227) --- homeassistant/components/autarco/__init__.py | 10 ++++++++-- homeassistant/components/autarco/strings.json | 2 +- tests/components/autarco/test_init.py | 17 ++++++++++++++++- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/autarco/__init__.py b/homeassistant/components/autarco/__init__.py index 0e29b25ad80..f42bfdf4a0e 100644 --- a/homeassistant/components/autarco/__init__.py +++ b/homeassistant/components/autarco/__init__.py @@ -4,11 +4,12 @@ from __future__ import annotations import asyncio -from autarco import Autarco +from autarco import Autarco, AutarcoConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .coordinator import AutarcoDataUpdateCoordinator @@ -25,7 +26,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> b password=entry.data[CONF_PASSWORD], session=async_get_clientsession(hass), ) - account_sites = await client.get_account() + + try: + account_sites = await client.get_account() + except AutarcoConnectionError as err: + await client.close() + raise ConfigEntryNotReady from err coordinators: list[AutarcoDataUpdateCoordinator] = [ AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites diff --git a/homeassistant/components/autarco/strings.json b/homeassistant/components/autarco/strings.json index 159dbd09781..a053cd36e09 100644 --- a/homeassistant/components/autarco/strings.json +++ b/homeassistant/components/autarco/strings.json @@ -28,7 +28,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py index 2707c53d35f..6c71eca5ef1 100644 --- a/tests/components/autarco/test_init.py +++ b/tests/components/autarco/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import AsyncMock -from autarco import AutarcoAuthenticationError +from autarco import AutarcoAuthenticationError, AutarcoConnectionError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -30,6 +30,21 @@ async def test_load_unload_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Autarco configuration entry not ready.""" + mock_autarco_client.get_account.side_effect = AutarcoConnectionError + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + async def test_setup_entry_exception( hass: HomeAssistant, mock_autarco_client: AsyncMock, From 5a1d5802c42b83f4b3ae2695a3a234a7f43a140c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Dec 2024 11:19:11 +0100 Subject: [PATCH 0239/1198] Fix sensibo test coverage to 100% (#132202) --- homeassistant/components/sensibo/climate.py | 11 +++-------- homeassistant/components/sensibo/strings.json | 3 --- tests/components/sensibo/test_climate.py | 11 +++++++++++ 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/sensibo/climate.py b/homeassistant/components/sensibo/climate.py index 390ebc080b8..c2f03c2d568 100644 --- a/homeassistant/components/sensibo/climate.py +++ b/homeassistant/components/sensibo/climate.py @@ -22,7 +22,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import TemperatureConverter @@ -108,7 +108,7 @@ AC_STATE_TO_DATA = { } -def _find_valid_target_temp(target: int, valid_targets: list[int]) -> int: +def _find_valid_target_temp(target: float, valid_targets: list[int]) -> int: if target <= valid_targets[0]: return valid_targets[0] if target >= valid_targets[-1]: @@ -320,12 +320,7 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): translation_key="no_target_temperature_in_features", ) - if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="no_target_temperature", - ) - + temperature: float = kwargs[ATTR_TEMPERATURE] if temperature == self.target_temperature: return diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index bec402bee18..302e34bb5aa 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -500,9 +500,6 @@ "no_target_temperature_in_features": { "message": "Current mode doesn't support setting target temperature" }, - "no_target_temperature": { - "message": "No target temperature provided" - }, "no_fan_level_in_features": { "message": "Current mode doesn't support setting fan level" }, diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index 8be9f4a60e4..7916727e57a 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -347,6 +347,17 @@ async def test_climate_temperatures( state2 = hass.states.get("climate.hallway") assert state2.attributes["temperature"] == 20 + with patch( + "homeassistant.components.sensibo.coordinator.SensiboClient.async_set_ac_state_property", + ) as mock_call: + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: state1.entity_id, ATTR_TEMPERATURE: 20}, + blocking=True, + ) + assert not mock_call.called + with ( patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", From 545a780fcb8e2c0bfc594cd314fdd2d775a56f12 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 4 Dec 2024 11:50:55 +0100 Subject: [PATCH 0240/1198] Bump deebot-client to 9.1.0 (#132253) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 4a43489ff24..546aba01d90 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.0.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 23b2d91fbfa..9b33617588d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -735,7 +735,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.0.0 +deebot-client==9.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b0b75aa988f..64e9ea27d75 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -625,7 +625,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==9.0.0 +deebot-client==9.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From a417d3dcf8983b6de42d0ec74743a441c8c670eb Mon Sep 17 00:00:00 2001 From: Pete Date: Wed, 4 Dec 2024 13:21:10 +0100 Subject: [PATCH 0241/1198] Fix recorder "year" period in leap year (#132167) * FIX: make "year" period work in leap year * Add test * Set second and microsecond to non-zero in test start times * FIX: better fix for leap year problem * Revert "FIX: better fix for leap year problem" This reverts commit 06aba46ec6a0a1e944c88fe99d9bc6181a73cc1c. --------- Co-authored-by: Erik --- homeassistant/components/recorder/util.py | 2 +- tests/components/recorder/test_util.py | 92 ++++++++++++++++------- 2 files changed, 67 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index a59519ef38d..125b354211e 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -902,7 +902,7 @@ def resolve_period( start_time = (start_time + timedelta(days=cal_offset * 366)).replace( month=1, day=1 ) - end_time = (start_time + timedelta(days=365)).replace(day=1) + end_time = (start_time + timedelta(days=366)).replace(day=1) start_time = dt_util.as_utc(start_time) end_time = dt_util.as_utc(end_time) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 4904bdecc4d..7b8eef6b16f 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -9,6 +9,7 @@ import threading from typing import Any from unittest.mock import MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy import lambda_stmt, text from sqlalchemy.engine.result import ChunkedIteratorResult @@ -1052,55 +1053,94 @@ async def test_execute_stmt_lambda_element( assert rows == ["mock_row"] -@pytest.mark.freeze_time(datetime(2022, 10, 21, 7, 25, tzinfo=UTC)) -async def test_resolve_period(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("start_time", "periods"), + [ + ( + # Test 00:25 local time, during DST + datetime(2022, 10, 21, 7, 25, 50, 123, tzinfo=UTC), + { + "hour": ["2022-10-21T07:00:00+00:00", "2022-10-21T08:00:00+00:00"], + "hour-1": ["2022-10-21T06:00:00+00:00", "2022-10-21T07:00:00+00:00"], + "day": ["2022-10-21T07:00:00+00:00", "2022-10-22T07:00:00+00:00"], + "day-1": ["2022-10-20T07:00:00+00:00", "2022-10-21T07:00:00+00:00"], + "week": ["2022-10-17T07:00:00+00:00", "2022-10-24T07:00:00+00:00"], + "week-1": ["2022-10-10T07:00:00+00:00", "2022-10-17T07:00:00+00:00"], + "month": ["2022-10-01T07:00:00+00:00", "2022-11-01T07:00:00+00:00"], + "month-1": ["2022-09-01T07:00:00+00:00", "2022-10-01T07:00:00+00:00"], + "year": ["2022-01-01T08:00:00+00:00", "2023-01-01T08:00:00+00:00"], + "year-1": ["2021-01-01T08:00:00+00:00", "2022-01-01T08:00:00+00:00"], + }, + ), + ( + # Test 00:25 local time, standard time, February 28th a leap year + datetime(2024, 2, 28, 8, 25, 50, 123, tzinfo=UTC), + { + "hour": ["2024-02-28T08:00:00+00:00", "2024-02-28T09:00:00+00:00"], + "hour-1": ["2024-02-28T07:00:00+00:00", "2024-02-28T08:00:00+00:00"], + "day": ["2024-02-28T08:00:00+00:00", "2024-02-29T08:00:00+00:00"], + "day-1": ["2024-02-27T08:00:00+00:00", "2024-02-28T08:00:00+00:00"], + "week": ["2024-02-26T08:00:00+00:00", "2024-03-04T08:00:00+00:00"], + "week-1": ["2024-02-19T08:00:00+00:00", "2024-02-26T08:00:00+00:00"], + "month": ["2024-02-01T08:00:00+00:00", "2024-03-01T08:00:00+00:00"], + "month-1": ["2024-01-01T08:00:00+00:00", "2024-02-01T08:00:00+00:00"], + "year": ["2024-01-01T08:00:00+00:00", "2025-01-01T08:00:00+00:00"], + "year-1": ["2023-01-01T08:00:00+00:00", "2024-01-01T08:00:00+00:00"], + }, + ), + ], +) +async def test_resolve_period( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + start_time: datetime, + periods: dict[str, tuple[str, str]], +) -> None: """Test statistic_during_period.""" + assert hass.config.time_zone == "US/Pacific" + freezer.move_to(start_time) now = dt_util.utcnow() start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" + assert start_t.isoformat() == periods["hour"][0] + assert end_t.isoformat() == periods["hour"][1] start_t, end_t = resolve_period({"calendar": {"period": "hour", "offset": -1}}) - assert start_t.isoformat() == "2022-10-21T06:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" + assert start_t.isoformat() == periods["hour-1"][0] + assert end_t.isoformat() == periods["hour-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "day"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-22T07:00:00+00:00" + assert start_t.isoformat() == periods["day"][0] + assert end_t.isoformat() == periods["day"][1] start_t, end_t = resolve_period({"calendar": {"period": "day", "offset": -1}}) - assert start_t.isoformat() == "2022-10-20T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" + assert start_t.isoformat() == periods["day-1"][0] + assert end_t.isoformat() == periods["day-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "week"}}) - assert start_t.isoformat() == "2022-10-17T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-24T07:00:00+00:00" + assert start_t.isoformat() == periods["week"][0] + assert end_t.isoformat() == periods["week"][1] start_t, end_t = resolve_period({"calendar": {"period": "week", "offset": -1}}) - assert start_t.isoformat() == "2022-10-10T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-17T07:00:00+00:00" + assert start_t.isoformat() == periods["week-1"][0] + assert end_t.isoformat() == periods["week-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "month"}}) - assert start_t.isoformat() == "2022-10-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-11-01T07:00:00+00:00" + assert start_t.isoformat() == periods["month"][0] + assert end_t.isoformat() == periods["month"][1] start_t, end_t = resolve_period({"calendar": {"period": "month", "offset": -1}}) - assert start_t.isoformat() == "2022-09-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-01T07:00:00+00:00" + assert start_t.isoformat() == periods["month-1"][0] + assert end_t.isoformat() == periods["month-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "year"}}) - assert start_t.isoformat() == "2022-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2023-01-01T08:00:00+00:00" + assert start_t.isoformat() == periods["year"][0] + assert end_t.isoformat() == periods["year"][1] start_t, end_t = resolve_period({"calendar": {"period": "year", "offset": -1}}) - assert start_t.isoformat() == "2021-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2022-01-01T08:00:00+00:00" + assert start_t.isoformat() == periods["year-1"][0] + assert end_t.isoformat() == periods["year-1"][1] # Fixed period assert resolve_period({}) == (None, None) From deab285db8070b83394ae48324257375e3ce0527 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 4 Dec 2024 14:01:49 +0100 Subject: [PATCH 0242/1198] Improve tests of recorder util resolve_period (#132259) --- tests/components/recorder/test_util.py | 89 ++++++++------------------ 1 file changed, 28 insertions(+), 61 deletions(-) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 7b8eef6b16f..2514c38e105 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1060,32 +1060,32 @@ async def test_execute_stmt_lambda_element( # Test 00:25 local time, during DST datetime(2022, 10, 21, 7, 25, 50, 123, tzinfo=UTC), { - "hour": ["2022-10-21T07:00:00+00:00", "2022-10-21T08:00:00+00:00"], - "hour-1": ["2022-10-21T06:00:00+00:00", "2022-10-21T07:00:00+00:00"], - "day": ["2022-10-21T07:00:00+00:00", "2022-10-22T07:00:00+00:00"], - "day-1": ["2022-10-20T07:00:00+00:00", "2022-10-21T07:00:00+00:00"], - "week": ["2022-10-17T07:00:00+00:00", "2022-10-24T07:00:00+00:00"], - "week-1": ["2022-10-10T07:00:00+00:00", "2022-10-17T07:00:00+00:00"], - "month": ["2022-10-01T07:00:00+00:00", "2022-11-01T07:00:00+00:00"], - "month-1": ["2022-09-01T07:00:00+00:00", "2022-10-01T07:00:00+00:00"], - "year": ["2022-01-01T08:00:00+00:00", "2023-01-01T08:00:00+00:00"], - "year-1": ["2021-01-01T08:00:00+00:00", "2022-01-01T08:00:00+00:00"], + ("hour", 0): ("2022-10-21T07:00:00", "2022-10-21T08:00:00"), + ("hour", -1): ("2022-10-21T06:00:00", "2022-10-21T07:00:00"), + ("day", 0): ("2022-10-21T07:00:00", "2022-10-22T07:00:00"), + ("day", -1): ("2022-10-20T07:00:00", "2022-10-21T07:00:00"), + ("week", 0): ("2022-10-17T07:00:00", "2022-10-24T07:00:00"), + ("week", -1): ("2022-10-10T07:00:00", "2022-10-17T07:00:00"), + ("month", 0): ("2022-10-01T07:00:00", "2022-11-01T07:00:00"), + ("month", -1): ("2022-09-01T07:00:00", "2022-10-01T07:00:00"), + ("year", 0): ("2022-01-01T08:00:00", "2023-01-01T08:00:00"), + ("year", -1): ("2021-01-01T08:00:00", "2022-01-01T08:00:00"), }, ), ( # Test 00:25 local time, standard time, February 28th a leap year datetime(2024, 2, 28, 8, 25, 50, 123, tzinfo=UTC), { - "hour": ["2024-02-28T08:00:00+00:00", "2024-02-28T09:00:00+00:00"], - "hour-1": ["2024-02-28T07:00:00+00:00", "2024-02-28T08:00:00+00:00"], - "day": ["2024-02-28T08:00:00+00:00", "2024-02-29T08:00:00+00:00"], - "day-1": ["2024-02-27T08:00:00+00:00", "2024-02-28T08:00:00+00:00"], - "week": ["2024-02-26T08:00:00+00:00", "2024-03-04T08:00:00+00:00"], - "week-1": ["2024-02-19T08:00:00+00:00", "2024-02-26T08:00:00+00:00"], - "month": ["2024-02-01T08:00:00+00:00", "2024-03-01T08:00:00+00:00"], - "month-1": ["2024-01-01T08:00:00+00:00", "2024-02-01T08:00:00+00:00"], - "year": ["2024-01-01T08:00:00+00:00", "2025-01-01T08:00:00+00:00"], - "year-1": ["2023-01-01T08:00:00+00:00", "2024-01-01T08:00:00+00:00"], + ("hour", 0): ("2024-02-28T08:00:00", "2024-02-28T09:00:00"), + ("hour", -1): ("2024-02-28T07:00:00", "2024-02-28T08:00:00"), + ("day", 0): ("2024-02-28T08:00:00", "2024-02-29T08:00:00"), + ("day", -1): ("2024-02-27T08:00:00", "2024-02-28T08:00:00"), + ("week", 0): ("2024-02-26T08:00:00", "2024-03-04T08:00:00"), + ("week", -1): ("2024-02-19T08:00:00", "2024-02-26T08:00:00"), + ("month", 0): ("2024-02-01T08:00:00", "2024-03-01T08:00:00"), + ("month", -1): ("2024-01-01T08:00:00", "2024-02-01T08:00:00"), + ("year", 0): ("2024-01-01T08:00:00", "2025-01-01T08:00:00"), + ("year", -1): ("2023-01-01T08:00:00", "2024-01-01T08:00:00"), }, ), ], @@ -1094,53 +1094,20 @@ async def test_resolve_period( hass: HomeAssistant, freezer: FrozenDateTimeFactory, start_time: datetime, - periods: dict[str, tuple[str, str]], + periods: dict[tuple[str, int], tuple[str, str]], ) -> None: - """Test statistic_during_period.""" + """Test resolve_period.""" assert hass.config.time_zone == "US/Pacific" freezer.move_to(start_time) now = dt_util.utcnow() - start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == periods["hour"][0] - assert end_t.isoformat() == periods["hour"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "hour", "offset": -1}}) - assert start_t.isoformat() == periods["hour-1"][0] - assert end_t.isoformat() == periods["hour-1"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "day"}}) - assert start_t.isoformat() == periods["day"][0] - assert end_t.isoformat() == periods["day"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "day", "offset": -1}}) - assert start_t.isoformat() == periods["day-1"][0] - assert end_t.isoformat() == periods["day-1"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "week"}}) - assert start_t.isoformat() == periods["week"][0] - assert end_t.isoformat() == periods["week"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "week", "offset": -1}}) - assert start_t.isoformat() == periods["week-1"][0] - assert end_t.isoformat() == periods["week-1"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "month"}}) - assert start_t.isoformat() == periods["month"][0] - assert end_t.isoformat() == periods["month"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "month", "offset": -1}}) - assert start_t.isoformat() == periods["month-1"][0] - assert end_t.isoformat() == periods["month-1"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "year"}}) - assert start_t.isoformat() == periods["year"][0] - assert end_t.isoformat() == periods["year"][1] - - start_t, end_t = resolve_period({"calendar": {"period": "year", "offset": -1}}) - assert start_t.isoformat() == periods["year-1"][0] - assert end_t.isoformat() == periods["year-1"][1] + for period_def, expected_period in periods.items(): + start_t, end_t = resolve_period( + {"calendar": {"period": period_def[0], "offset": period_def[1]}} + ) + assert start_t.isoformat() == f"{expected_period[0]}+00:00" + assert end_t.isoformat() == f"{expected_period[1]}+00:00" # Fixed period assert resolve_period({}) == (None, None) From 977d8fd1c8a5f54ab970e59086189063fa3fc997 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:51:10 +0100 Subject: [PATCH 0243/1198] Bump github/codeql-action from 3.27.5 to 3.27.6 (#132237) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4977139f5dc..5b8ac94e570 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.5 + uses: github/codeql-action/init@v3.27.6 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.5 + uses: github/codeql-action/analyze@v3.27.6 with: category: "/language:python" From d88f6dc6b96a8624acbacccf58b5832412c16439 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 4 Dec 2024 14:56:42 +0100 Subject: [PATCH 0244/1198] Bump knocki to 0.4.2 (#129261) --- homeassistant/components/knocki/__init__.py | 5 ++--- homeassistant/components/knocki/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/knocki/__init__.py b/homeassistant/components/knocki/__init__.py index 42c3956bd68..dfdf060e3b5 100644 --- a/homeassistant/components/knocki/__init__.py +++ b/homeassistant/components/knocki/__init__.py @@ -41,13 +41,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( - hass, client.start_websocket(), "knocki-websocket" - ) + await client.start_websocket() return True async def async_unload_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bool: """Unload a config entry.""" + await entry.runtime_data.client.close() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index d9a45b18f0e..a91119ca831 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.3.5"] + "requirements": ["knocki==0.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9b33617588d..5d9bf8809e7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1247,7 +1247,7 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.4.2 # homeassistant.components.knx knx-frontend==2024.11.16.205004 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 64e9ea27d75..070a5d4512b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1046,7 +1046,7 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.4.2 # homeassistant.components.knx knx-frontend==2024.11.16.205004 From 02db5ec88f9d1f0f37a8a397d09eb2c304eb46fb Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 4 Dec 2024 14:57:25 +0100 Subject: [PATCH 0245/1198] Update frontend to 20241127.4 (#132268) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 264f0756b82..97a67cbc082 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.3"] + "requirements": ["home-assistant-frontend==20241127.4"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 503937a44cb..dcd7a6be926 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.85.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.3 +home-assistant-frontend==20241127.4 home-assistant-intents==2024.12.2 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 5d9bf8809e7..889a9eb80a2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.3 +home-assistant-frontend==20241127.4 # homeassistant.components.conversation home-assistant-intents==2024.12.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 070a5d4512b..9ffe7fa21db 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.3 +home-assistant-frontend==20241127.4 # homeassistant.components.conversation home-assistant-intents==2024.12.2 From 8f43a71ff6e05fcb088bfbab1392b6c6679aea1e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 4 Dec 2024 15:18:04 +0100 Subject: [PATCH 0246/1198] Ensure MQTT subscriptions can be made when the broker is disconnected (#132270) --- homeassistant/components/mqtt/client.py | 2 +- tests/components/mqtt/test_client.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 1dcd0928434..d8bc0862d29 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -227,7 +227,7 @@ def async_subscribe_internal( translation_placeholders={"topic": topic}, ) from exc client = mqtt_data.client - if not client.connected and not mqtt_config_entry_enabled(hass): + if not mqtt_config_entry_enabled(hass): raise HomeAssistantError( f"Cannot subscribe to topic '{topic}', MQTT is not enabled", translation_key="mqtt_not_setup_cannot_subscribe", diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 164c164cdfc..4bfcde752ae 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1045,10 +1045,17 @@ async def test_restore_subscriptions_on_reconnect( mqtt_client_mock.reset_mock() mqtt_client_mock.on_disconnect(None, None, 0) + # Test to subscribe orther topic while the client is not connected + await mqtt.async_subscribe(hass, "test/other", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown + assert ("test/other", 0) not in help_all_subscribe_calls(mqtt_client_mock) + mock_debouncer.clear() mqtt_client_mock.on_connect(None, None, None, 0) await mock_debouncer.wait() + # Assert all subscriptions are performed at the broker assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + assert ("test/other", 0) in help_all_subscribe_calls(mqtt_client_mock) @pytest.mark.parametrize( From 5c60cffd4d4d0b1ef9b97c8a03e95aaff77bbeaa Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 4 Dec 2024 10:02:00 -0600 Subject: [PATCH 0247/1198] Bump intents to 2024.12.4 (#132274) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 2d2f2f58a3a..72e1cebf462 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.2"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.4"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index dcd7a6be926..138b8bedcce 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ hass-nabucasa==0.85.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.4 -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 889a9eb80a2..fbe4b92d267 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ holidays==0.62 home-assistant-frontend==20241127.4 # homeassistant.components.conversation -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.12.4 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9ffe7fa21db..413c96df545 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ holidays==0.62 home-assistant-frontend==20241127.4 # homeassistant.components.conversation -home-assistant-intents==2024.12.2 +home-assistant-intents==2024.12.4 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 38b8ba5e8d0..9c3b14ad4df 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.2 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " From b6b340ae636a335f07470678912dadc47565141f Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 4 Dec 2024 17:18:21 +0100 Subject: [PATCH 0248/1198] Add IronOS quality scale record (#131598) --- .../components/iron_os/quality_scale.yaml | 84 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/iron_os/quality_scale.yaml diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml new file mode 100644 index 00000000000..b793af1815f --- /dev/null +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not have actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: todo + config-flow: done + dependency-transparency: done + docs-actions: + status: done + comment: Integration does register actions aside from entity actions + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: Integration does not register events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: todo + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: Integration does not have actions + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration has no options flow + docs-installation-parameters: + status: todo + comment: Needs bluetooth address as parameter + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: Devices don't require authentication + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: Device is not connected to an ip network. Other information from discovery is immutable and does not require updating. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: Only one device per config entry. New devices are set up as new entries. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: Reconfiguration would force a new config entry + repair-issues: + status: exempt + comment: no repairs/issues + stale-devices: todo + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: Device doesn't make http requests. + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 63ca8b0d213..386f0af3e39 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -544,7 +544,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "ipp", "iqvia", "irish_rail_transport", - "iron_os", "isal", "iskra", "islamic_prayer_times", From b3ff8f56b9654235c58d3c07def87fb95ba09072 Mon Sep 17 00:00:00 2001 From: Tucker Kern Date: Wed, 4 Dec 2024 09:22:31 -0700 Subject: [PATCH 0249/1198] Refactor Snapcast client and group classes to use a common base clase (#124499) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/snapcast/__init__.py | 19 +- .../components/snapcast/coordinator.py | 72 +++ homeassistant/components/snapcast/entity.py | 11 + .../components/snapcast/media_player.py | 489 ++++++++++-------- homeassistant/components/snapcast/server.py | 143 ----- 5 files changed, 359 insertions(+), 375 deletions(-) create mode 100644 homeassistant/components/snapcast/coordinator.py create mode 100644 homeassistant/components/snapcast/entity.py delete mode 100644 homeassistant/components/snapcast/server.py diff --git a/homeassistant/components/snapcast/__init__.py b/homeassistant/components/snapcast/__init__.py index a4163355944..b853535b525 100644 --- a/homeassistant/components/snapcast/__init__.py +++ b/homeassistant/components/snapcast/__init__.py @@ -1,37 +1,28 @@ """Snapcast Integration.""" -import logging - -import snapcast.control - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from .const import DOMAIN, PLATFORMS -from .server import HomeAssistantSnapcast - -_LOGGER = logging.getLogger(__name__) +from .coordinator import SnapcastUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Snapcast from a config entry.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] + coordinator = SnapcastUpdateCoordinator(hass, host, port) + try: - server = await snapcast.control.create_server( - hass.loop, host, port, reconnect=True - ) + await coordinator.async_config_entry_first_refresh() except OSError as ex: raise ConfigEntryNotReady( f"Could not connect to Snapcast server at {host}:{port}" ) from ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantSnapcast( - hass, server, f"{host}:{port}", entry.entry_id - ) - + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/snapcast/coordinator.py b/homeassistant/components/snapcast/coordinator.py new file mode 100644 index 00000000000..5bb9ae4e51f --- /dev/null +++ b/homeassistant/components/snapcast/coordinator.py @@ -0,0 +1,72 @@ +"""Data update coordinator for Snapcast server.""" + +from __future__ import annotations + +import logging + +from snapcast.control.server import Snapserver + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +class SnapcastUpdateCoordinator(DataUpdateCoordinator[None]): + """Data update coordinator for pushed data from Snapcast server.""" + + def __init__(self, hass: HomeAssistant, host: str, port: int) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"{host}:{port}", + update_interval=None, # Disable update interval as server pushes + ) + + self._server = Snapserver(hass.loop, host, port, True) + self.last_update_success = False + + self._server.set_on_update_callback(self._on_update) + self._server.set_new_client_callback(self._on_update) + self._server.set_on_connect_callback(self._on_connect) + self._server.set_on_disconnect_callback(self._on_disconnect) + + def _on_update(self) -> None: + """Snapserver on_update callback.""" + # Assume availability if an update is received. + self.last_update_success = True + self.async_update_listeners() + + def _on_connect(self) -> None: + """Snapserver on_connect callback.""" + self.last_update_success = True + self.async_update_listeners() + + def _on_disconnect(self, ex): + """Snapsever on_disconnect callback.""" + self.async_set_update_error(ex) + + async def _async_setup(self) -> None: + """Perform async setup for the coordinator.""" + # Start the server + try: + await self._server.start() + except OSError as ex: + raise UpdateFailed from ex + + async def _async_update_data(self) -> None: + """Empty update method since data is pushed.""" + + async def disconnect(self) -> None: + """Disconnect from the server.""" + self._server.set_on_update_callback(None) + self._server.set_on_connect_callback(None) + self._server.set_on_disconnect_callback(None) + self._server.set_new_client_callback(None) + self._server.stop() + + @property + def server(self) -> Snapserver: + """Get the Snapserver object.""" + return self._server diff --git a/homeassistant/components/snapcast/entity.py b/homeassistant/components/snapcast/entity.py new file mode 100644 index 00000000000..cceeb6227fd --- /dev/null +++ b/homeassistant/components/snapcast/entity.py @@ -0,0 +1,11 @@ +"""Coordinator entity for Snapcast server.""" + +from __future__ import annotations + +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SnapcastUpdateCoordinator + + +class SnapcastCoordinatorEntity(CoordinatorEntity[SnapcastUpdateCoordinator]): + """Coordinator entity for Snapcast.""" diff --git a/homeassistant/components/snapcast/media_player.py b/homeassistant/components/snapcast/media_player.py index bda411acde3..0ec27c1ad9c 100644 --- a/homeassistant/components/snapcast/media_player.py +++ b/homeassistant/components/snapcast/media_player.py @@ -2,18 +2,29 @@ from __future__ import annotations -from snapcast.control.server import Snapserver +from collections.abc import Mapping +import logging +from typing import Any + +from snapcast.control.client import Snapclient +from snapcast.control.group import Snapgroup import voluptuous as vol from homeassistant.components.media_player import ( + DOMAIN as MEDIA_PLAYER_DOMAIN, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + entity_registry as er, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( @@ -30,6 +41,8 @@ from .const import ( SERVICE_SNAPSHOT, SERVICE_UNJOIN, ) +from .coordinator import SnapcastUpdateCoordinator +from .entity import SnapcastCoordinatorEntity STREAM_STATUS = { "idle": MediaPlayerState.IDLE, @@ -37,21 +50,23 @@ STREAM_STATUS = { "unknown": None, } +_LOGGER = logging.getLogger(__name__) -def register_services(): + +def register_services() -> None: """Register snapcast services.""" platform = entity_platform.async_get_current_platform() platform.async_register_entity_service(SERVICE_SNAPSHOT, None, "snapshot") platform.async_register_entity_service(SERVICE_RESTORE, None, "async_restore") platform.async_register_entity_service( - SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, handle_async_join + SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join" ) - platform.async_register_entity_service(SERVICE_UNJOIN, None, handle_async_unjoin) + platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin") platform.async_register_entity_service( SERVICE_SET_LATENCY, {vol.Required(ATTR_LATENCY): cv.positive_int}, - handle_set_latency, + "async_set_latency", ) @@ -61,51 +76,103 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the snapcast config entry.""" - snapcast_server: Snapserver = hass.data[DOMAIN][config_entry.entry_id].server + + # Fetch coordinator from global data + coordinator: SnapcastUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + # Create an ID for the Snapserver + host = config_entry.data[CONF_HOST] + port = config_entry.data[CONF_PORT] + host_id = f"{host}:{port}" register_services() - host = config_entry.data[CONF_HOST] - port = config_entry.data[CONF_PORT] - hpid = f"{host}:{port}" + _known_group_ids: set[str] = set() + _known_client_ids: set[str] = set() - groups: list[MediaPlayerEntity] = [ - SnapcastGroupDevice(group, hpid, config_entry.entry_id) - for group in snapcast_server.groups - ] - clients: list[MediaPlayerEntity] = [ - SnapcastClientDevice(client, hpid, config_entry.entry_id) - for client in snapcast_server.clients - ] - async_add_entities(clients + groups) - hass.data[DOMAIN][ - config_entry.entry_id - ].hass_async_add_entities = async_add_entities + @callback + def _check_entities() -> None: + nonlocal _known_group_ids, _known_client_ids + + def _update_known_ids(known_ids, ids) -> tuple[set[str], set[str]]: + ids_to_add = ids - known_ids + ids_to_remove = known_ids - ids + + # Update known IDs + known_ids.difference_update(ids_to_remove) + known_ids.update(ids_to_add) + + return ids_to_add, ids_to_remove + + group_ids = {g.identifier for g in coordinator.server.groups} + groups_to_add, groups_to_remove = _update_known_ids(_known_group_ids, group_ids) + + client_ids = {c.identifier for c in coordinator.server.clients} + clients_to_add, clients_to_remove = _update_known_ids( + _known_client_ids, client_ids + ) + + # Exit early if no changes + if not (groups_to_add | groups_to_remove | clients_to_add | clients_to_remove): + return + + _LOGGER.debug( + "New clients: %s", + str([coordinator.server.client(c).friendly_name for c in clients_to_add]), + ) + _LOGGER.debug( + "New groups: %s", + str([coordinator.server.group(g).friendly_name for g in groups_to_add]), + ) + _LOGGER.debug( + "Remove client IDs: %s", + str([list(clients_to_remove)]), + ) + _LOGGER.debug( + "Remove group IDs: %s", + str(list(groups_to_remove)), + ) + + # Add new entities + async_add_entities( + [ + SnapcastGroupDevice( + coordinator, coordinator.server.group(group_id), host_id + ) + for group_id in groups_to_add + ] + + [ + SnapcastClientDevice( + coordinator, coordinator.server.client(client_id), host_id + ) + for client_id in clients_to_add + ] + ) + + # Remove stale entities + entity_registry = er.async_get(hass) + for group_id in groups_to_remove: + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, + DOMAIN, + SnapcastGroupDevice.get_unique_id(host_id, group_id), + ): + entity_registry.async_remove(entity_id) + + for client_id in clients_to_remove: + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, + DOMAIN, + SnapcastClientDevice.get_unique_id(host_id, client_id), + ): + entity_registry.async_remove(entity_id) + + coordinator.async_add_listener(_check_entities) + _check_entities() -async def handle_async_join(entity, service_call): - """Handle the entity service join.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Entity is not a client. Can only join clients.") - await entity.async_join(service_call.data[ATTR_MASTER]) - - -async def handle_async_unjoin(entity, service_call): - """Handle the entity service unjoin.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Entity is not a client. Can only unjoin clients.") - await entity.async_unjoin() - - -async def handle_set_latency(entity, service_call): - """Handle the entity service set_latency.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Latency can only be set for a Snapcast client.") - await entity.async_set_latency(service_call.data[ATTR_LATENCY]) - - -class SnapcastGroupDevice(MediaPlayerEntity): - """Representation of a Snapcast group device.""" +class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity): + """Base class representing a Snapcast device.""" _attr_should_poll = False _attr_supported_features = ( @@ -114,166 +181,172 @@ class SnapcastGroupDevice(MediaPlayerEntity): | MediaPlayerEntityFeature.SELECT_SOURCE ) - def __init__(self, group, uid_part, entry_id): - """Initialize the Snapcast group device.""" - self._attr_available = True - self._group = group - self._entry_id = entry_id - self._attr_unique_id = f"{GROUP_PREFIX}{uid_part}_{self._group.identifier}" + def __init__( + self, + coordinator: SnapcastUpdateCoordinator, + device: Snapgroup | Snapclient, + host_id: str, + ) -> None: + """Initialize the base device.""" + super().__init__(coordinator) + + self._device = device + self._attr_unique_id = self.get_unique_id(host_id, device.identifier) + + @classmethod + def get_unique_id(cls, host, id) -> str: + """Build a unique ID.""" + raise NotImplementedError + + @property + def _current_group(self) -> Snapgroup: + """Return the group.""" + raise NotImplementedError async def async_added_to_hass(self) -> None: - """Subscribe to group events.""" - self._group.set_callback(self.schedule_update_ha_state) - self.hass.data[DOMAIN][self._entry_id].groups.append(self) + """Subscribe to events.""" + await super().async_added_to_hass() + self._device.set_callback(self.schedule_update_ha_state) async def async_will_remove_from_hass(self) -> None: - """Disconnect group object when removed.""" - self._group.set_callback(None) - self.hass.data[DOMAIN][self._entry_id].groups.remove(self) + """Disconnect object when removed.""" + self._device.set_callback(None) - def set_availability(self, available: bool) -> None: - """Set availability of group.""" - self._attr_available = available - self.schedule_update_ha_state() + @property + def identifier(self) -> str: + """Return the snapcast identifier.""" + return self._device.identifier + + @property + def source(self) -> str | None: + """Return the current input source.""" + return self._current_group.stream + + @property + def source_list(self) -> list[str]: + """List of available input sources.""" + return list(self._current_group.streams_by_name().keys()) + + async def async_select_source(self, source: str) -> None: + """Set input source.""" + streams = self._current_group.streams_by_name() + if source in streams: + await self._current_group.set_stream(streams[source].identifier) + self.async_write_ha_state() + + @property + def is_volume_muted(self) -> bool: + """Volume muted.""" + return self._device.muted + + async def async_mute_volume(self, mute: bool) -> None: + """Send the mute command.""" + await self._device.set_muted(mute) + self.async_write_ha_state() + + @property + def volume_level(self) -> float: + """Return the volume level.""" + return self._device.volume / 100 + + async def async_set_volume_level(self, volume: float) -> None: + """Set the volume level.""" + await self._device.set_volume(round(volume * 100)) + self.async_write_ha_state() + + def snapshot(self) -> None: + """Snapshot the group state.""" + self._device.snapshot() + + async def async_restore(self) -> None: + """Restore the group state.""" + await self._device.restore() + self.async_write_ha_state() + + async def async_set_latency(self, latency) -> None: + """Handle the set_latency service.""" + raise NotImplementedError + + async def async_join(self, master) -> None: + """Handle the join service.""" + raise NotImplementedError + + async def async_unjoin(self) -> None: + """Handle the unjoin service.""" + raise NotImplementedError + + +class SnapcastGroupDevice(SnapcastBaseDevice): + """Representation of a Snapcast group device.""" + + _device: Snapgroup + + @classmethod + def get_unique_id(cls, host, id) -> str: + """Get a unique ID for a group.""" + return f"{GROUP_PREFIX}{host}_{id}" + + @property + def _current_group(self) -> Snapgroup: + """Return the group.""" + return self._device + + @property + def name(self) -> str: + """Return the name of the device.""" + return f"{self._device.friendly_name} {GROUP_SUFFIX}" @property def state(self) -> MediaPlayerState | None: """Return the state of the player.""" if self.is_volume_muted: return MediaPlayerState.IDLE - return STREAM_STATUS.get(self._group.stream_status) + return STREAM_STATUS.get(self._device.stream_status) - @property - def identifier(self): - """Return the snapcast identifier.""" - return self._group.identifier + async def async_set_latency(self, latency) -> None: + """Handle the set_latency service.""" + raise ServiceValidationError("Latency can only be set for a Snapcast client.") - @property - def name(self): - """Return the name of the device.""" - return f"{self._group.friendly_name} {GROUP_SUFFIX}" + async def async_join(self, master) -> None: + """Handle the join service.""" + raise ServiceValidationError("Entity is not a client. Can only join clients.") - @property - def source(self): - """Return the current input source.""" - return self._group.stream - - @property - def volume_level(self): - """Return the volume level.""" - return self._group.volume / 100 - - @property - def is_volume_muted(self): - """Volume muted.""" - return self._group.muted - - @property - def source_list(self): - """List of available input sources.""" - return list(self._group.streams_by_name().keys()) - - async def async_select_source(self, source: str) -> None: - """Set input source.""" - streams = self._group.streams_by_name() - if source in streams: - await self._group.set_stream(streams[source].identifier) - self.async_write_ha_state() - - async def async_mute_volume(self, mute: bool) -> None: - """Send the mute command.""" - await self._group.set_muted(mute) - self.async_write_ha_state() - - async def async_set_volume_level(self, volume: float) -> None: - """Set the volume level.""" - await self._group.set_volume(round(volume * 100)) - self.async_write_ha_state() - - def snapshot(self): - """Snapshot the group state.""" - self._group.snapshot() - - async def async_restore(self): - """Restore the group state.""" - await self._group.restore() - self.async_write_ha_state() + async def async_unjoin(self) -> None: + """Handle the unjoin service.""" + raise ServiceValidationError("Entity is not a client. Can only unjoin clients.") -class SnapcastClientDevice(MediaPlayerEntity): +class SnapcastClientDevice(SnapcastBaseDevice): """Representation of a Snapcast client device.""" - _attr_should_poll = False - _attr_supported_features = ( - MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.VOLUME_SET - | MediaPlayerEntityFeature.SELECT_SOURCE - ) + _device: Snapclient - def __init__(self, client, uid_part, entry_id): - """Initialize the Snapcast client device.""" - self._attr_available = True - self._client = client - # Note: Host part is needed, when using multiple snapservers - self._attr_unique_id = f"{CLIENT_PREFIX}{uid_part}_{self._client.identifier}" - self._entry_id = entry_id - - async def async_added_to_hass(self) -> None: - """Subscribe to client events.""" - self._client.set_callback(self.schedule_update_ha_state) - self.hass.data[DOMAIN][self._entry_id].clients.append(self) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect client object when removed.""" - self._client.set_callback(None) - self.hass.data[DOMAIN][self._entry_id].clients.remove(self) - - def set_availability(self, available: bool) -> None: - """Set availability of group.""" - self._attr_available = available - self.schedule_update_ha_state() + @classmethod + def get_unique_id(cls, host, id) -> str: + """Get a unique ID for a client.""" + return f"{CLIENT_PREFIX}{host}_{id}" @property - def identifier(self): - """Return the snapcast identifier.""" - return self._client.identifier + def _current_group(self) -> Snapgroup: + """Return the group the client is associated with.""" + return self._device.group @property - def name(self): + def name(self) -> str: """Return the name of the device.""" - return f"{self._client.friendly_name} {CLIENT_SUFFIX}" - - @property - def source(self): - """Return the current input source.""" - return self._client.group.stream - - @property - def volume_level(self): - """Return the volume level.""" - return self._client.volume / 100 - - @property - def is_volume_muted(self): - """Volume muted.""" - return self._client.muted - - @property - def source_list(self): - """List of available input sources.""" - return list(self._client.group.streams_by_name().keys()) + return f"{self._device.friendly_name} {CLIENT_SUFFIX}" @property def state(self) -> MediaPlayerState | None: """Return the state of the player.""" - if self._client.connected: - if self.is_volume_muted or self._client.group.muted: + if self._device.connected: + if self.is_volume_muted or self._current_group.muted: return MediaPlayerState.IDLE - return STREAM_STATUS.get(self._client.group.stream_status) + return STREAM_STATUS.get(self._current_group.stream_status) return MediaPlayerState.STANDBY @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes.""" state_attrs = {} if self.latency is not None: @@ -281,60 +354,40 @@ class SnapcastClientDevice(MediaPlayerEntity): return state_attrs @property - def latency(self): + def latency(self) -> float | None: """Latency for Client.""" - return self._client.latency + return self._device.latency - async def async_select_source(self, source: str) -> None: - """Set input source.""" - streams = self._client.group.streams_by_name() - if source in streams: - await self._client.group.set_stream(streams[source].identifier) - self.async_write_ha_state() - - async def async_mute_volume(self, mute: bool) -> None: - """Send the mute command.""" - await self._client.set_muted(mute) + async def async_set_latency(self, latency) -> None: + """Set the latency of the client.""" + await self._device.set_latency(latency) self.async_write_ha_state() - async def async_set_volume_level(self, volume: float) -> None: - """Set the volume level.""" - await self._client.set_volume(round(volume * 100)) - self.async_write_ha_state() - - async def async_join(self, master): + async def async_join(self, master) -> None: """Join the group of the master player.""" - master_entity = next( - entity - for entity in self.hass.data[DOMAIN][self._entry_id].clients - if entity.entity_id == master - ) - if not isinstance(master_entity, SnapcastClientDevice): - raise TypeError("Master is not a client device. Can only join clients.") + entity_registry = er.async_get(self.hass) + master_entity = entity_registry.async_get(master) + if master_entity is None: + raise ServiceValidationError(f"Master entity '{master}' not found.") + # Validate master entity is a client + unique_id = master_entity.unique_id + if not unique_id.startswith(CLIENT_PREFIX): + raise ServiceValidationError( + "Master is not a client device. Can only join clients." + ) + + # Extract the client ID and locate it's group + identifier = unique_id.split("_")[-1] master_group = next( group - for group in self._client.groups_available() - if master_entity.identifier in group.clients + for group in self._device.groups_available() + if identifier in group.clients ) - await master_group.add_client(self._client.identifier) + await master_group.add_client(self._device.identifier) self.async_write_ha_state() - async def async_unjoin(self): + async def async_unjoin(self) -> None: """Unjoin the group the player is currently in.""" - await self._client.group.remove_client(self._client.identifier) - self.async_write_ha_state() - - def snapshot(self): - """Snapshot the client state.""" - self._client.snapshot() - - async def async_restore(self): - """Restore the client state.""" - await self._client.restore() - self.async_write_ha_state() - - async def async_set_latency(self, latency): - """Set the latency of the client.""" - await self._client.set_latency(latency) + await self._current_group.remove_client(self._device.identifier) self.async_write_ha_state() diff --git a/homeassistant/components/snapcast/server.py b/homeassistant/components/snapcast/server.py deleted file mode 100644 index ab4091e30af..00000000000 --- a/homeassistant/components/snapcast/server.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Snapcast Integration.""" - -from __future__ import annotations - -import logging - -import snapcast.control -from snapcast.control.client import Snapclient - -from homeassistant.components.media_player import MediaPlayerEntity -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .media_player import SnapcastClientDevice, SnapcastGroupDevice - -_LOGGER = logging.getLogger(__name__) - - -class HomeAssistantSnapcast: - """Snapcast server and data stored in the Home Assistant data object.""" - - hass: HomeAssistant - - def __init__( - self, - hass: HomeAssistant, - server: snapcast.control.Snapserver, - hpid: str, - entry_id: str, - ) -> None: - """Initialize the HomeAssistantSnapcast object. - - Parameters - ---------- - hass: HomeAssistant - hass object - server : snapcast.control.Snapserver - Snapcast server - hpid : str - host and port - entry_id: str - ConfigEntry entry_id - - Returns - ------- - None - - """ - self.hass: HomeAssistant = hass - self.server: snapcast.control.Snapserver = server - self.hpid: str = hpid - self._entry_id = entry_id - self.clients: list[SnapcastClientDevice] = [] - self.groups: list[SnapcastGroupDevice] = [] - self.hass_async_add_entities: AddEntitiesCallback - # connect callbacks - self.server.set_on_update_callback(self.on_update) - self.server.set_on_connect_callback(self.on_connect) - self.server.set_on_disconnect_callback(self.on_disconnect) - self.server.set_new_client_callback(self.on_add_client) - - async def disconnect(self) -> None: - """Disconnect from server.""" - self.server.set_on_update_callback(None) - self.server.set_on_connect_callback(None) - self.server.set_on_disconnect_callback(None) - self.server.set_new_client_callback(None) - self.server.stop() - - def on_update(self) -> None: - """Update all entities. - - Retrieve all groups/clients from server and add/update/delete entities. - """ - if not self.hass_async_add_entities: - return - new_groups: list[MediaPlayerEntity] = [] - groups: list[MediaPlayerEntity] = [] - hass_groups = {g.identifier: g for g in self.groups} - for group in self.server.groups: - if group.identifier in hass_groups: - groups.append(hass_groups[group.identifier]) - hass_groups[group.identifier].async_schedule_update_ha_state() - else: - new_groups.append(SnapcastGroupDevice(group, self.hpid, self._entry_id)) - new_clients: list[MediaPlayerEntity] = [] - clients: list[MediaPlayerEntity] = [] - hass_clients = {c.identifier: c for c in self.clients} - for client in self.server.clients: - if client.identifier in hass_clients: - clients.append(hass_clients[client.identifier]) - hass_clients[client.identifier].async_schedule_update_ha_state() - else: - new_clients.append( - SnapcastClientDevice(client, self.hpid, self._entry_id) - ) - del_entities: list[MediaPlayerEntity] = [ - x for x in self.groups if x not in groups - ] - del_entities.extend([x for x in self.clients if x not in clients]) - - _LOGGER.debug("New clients: %s", str([c.name for c in new_clients])) - _LOGGER.debug("New groups: %s", str([g.name for g in new_groups])) - _LOGGER.debug("Delete: %s", str(del_entities)) - - ent_reg = er.async_get(self.hass) - for entity in del_entities: - ent_reg.async_remove(entity.entity_id) - self.hass_async_add_entities(new_clients + new_groups) - - def on_connect(self) -> None: - """Activate all entities and update.""" - for client in self.clients: - client.set_availability(True) - for group in self.groups: - group.set_availability(True) - _LOGGER.debug("Server connected: %s", self.hpid) - self.on_update() - - def on_disconnect(self, ex: Exception | None) -> None: - """Deactivate all entities.""" - for client in self.clients: - client.set_availability(False) - for group in self.groups: - group.set_availability(False) - _LOGGER.warning( - "Server disconnected: %s. Trying to reconnect. %s", self.hpid, str(ex or "") - ) - - def on_add_client(self, client: Snapclient) -> None: - """Add a Snapcast client. - - Parameters - ---------- - client : Snapclient - Snapcast client to be added to HA. - - """ - if not self.hass_async_add_entities: - return - clients = [SnapcastClientDevice(client, self.hpid, self._entry_id)] - self.hass_async_add_entities(clients) From d92dbbf58b50b5bf17fcb7e09fe4db1b95f9f10b Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Wed, 4 Dec 2024 17:26:04 +0100 Subject: [PATCH 0250/1198] Set new polling interval for Powerfox integration (#132263) --- homeassistant/components/powerfox/const.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/powerfox/const.py b/homeassistant/components/powerfox/const.py index 24f1310f970..0970e8a1b66 100644 --- a/homeassistant/components/powerfox/const.py +++ b/homeassistant/components/powerfox/const.py @@ -8,4 +8,4 @@ from typing import Final DOMAIN: Final = "powerfox" LOGGER = logging.getLogger(__package__) -SCAN_INTERVAL = timedelta(minutes=5) +SCAN_INTERVAL = timedelta(minutes=1) From bd1ad04dab5614b5cb92f1903fd45f0d15bd49ca Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 4 Dec 2024 18:20:59 +0100 Subject: [PATCH 0251/1198] Add ista EcoTrend quality scale record (#131580) --- .../ista_ecotrend/quality_scale.yaml | 80 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/ista_ecotrend/quality_scale.yaml diff --git a/homeassistant/components/ista_ecotrend/quality_scale.yaml b/homeassistant/components/ista_ecotrend/quality_scale.yaml new file mode 100644 index 00000000000..b942ecba487 --- /dev/null +++ b/homeassistant/components/ista_ecotrend/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration registers no actions. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: Group the 3 different executor jobs as one executor job + config-flow-test-coverage: + status: todo + comment: test_form/docstrings outdated, test already_configuret, test abort conditions in reauth, + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: The integration registers no actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: The integration registers no events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: The integration registers no actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration has no configuration parameters + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + discovery: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: + status: done + comment: The default category is appropriate. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 386f0af3e39..cb00d74564e 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -549,7 +549,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "islamic_prayer_times", "israel_rail", "iss", - "ista_ecotrend", "isy994", "itach", "itunes", From 8910dbbcd19b8821fe6addb16214272ea730f892 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 4 Dec 2024 12:22:34 -0500 Subject: [PATCH 0252/1198] Record current IQS state for Cambridge Audio (#131080) --- .../cambridge_audio/quality_scale.yaml | 80 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/cambridge_audio/quality_scale.yaml diff --git a/homeassistant/components/cambridge_audio/quality_scale.yaml b/homeassistant/components/cambridge_audio/quality_scale.yaml new file mode 100644 index 00000000000..3d4963c3f29 --- /dev/null +++ b/homeassistant/components/cambridge_audio/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions beyond play media which is setup by the media player entity. + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: | + This integration is not a hub and as such only represents a single device. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: | + This integration is not a hub and only represents a single device. + discovery-update-info: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index cb00d74564e..ef64e55e0d5 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -234,7 +234,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "bthome", "buienradar", "caldav", - "cambridge_audio", "canary", "cast", "ccm15", From bd40e1e7df4e4948ee42ba1b93ec75a0e534f78c Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:12:26 +0100 Subject: [PATCH 0253/1198] Add quality scale for Husqvarna Automower (#131560) --- .../husqvarna_automower/quality_scale.yaml | 87 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/husqvarna_automower/quality_scale.yaml diff --git a/homeassistant/components/husqvarna_automower/quality_scale.yaml b/homeassistant/components/husqvarna_automower/quality_scale.yaml new file mode 100644 index 00000000000..384d58b7ece --- /dev/null +++ b/homeassistant/components/husqvarna_automower/quality_scale.yaml @@ -0,0 +1,87 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: + status: todo + comment: | + Raise ConfigEntryAuthFailed earlier, when "amc:api" is missing in the token scope. + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: done + comment: | + The integration only has an entity service, registered in the platform. + common-modules: + status: todo + comment: | + Remove unused config_entry in coordinator. + Fix typos in entity.py + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: no configuration options + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: todo + comment: Discovery not implemented, yet. + discovery: + status: todo + comment: | + Most of the mowers are connected with a SIM card, some of the also have a + Wifi connection. Check, if discovery with Wifi is possible + docs-data-update: todo + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: todo + dynamic-devices: + status: todo + comment: Add devices dynamically + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: no configuration possible + repair-issues: done + stale-devices: + status: todo + comment: We only remove devices on reload + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index ef64e55e0d5..4ef7ab0dc11 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -509,7 +509,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "hue", "huisbaasje", "hunterdouglas_powerview", - "husqvarna_automower", "husqvarna_automower_ble", "huum", "hvv_departures", From 719cbd307011ccbe6c8d4000a39627f38b972a40 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Dec 2024 12:30:48 -0600 Subject: [PATCH 0254/1198] Fix test_dump_log_object timeouts in the CI (#132234) --- tests/components/profiler/test_init.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 37940df437b..84314b7b22c 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -211,9 +211,10 @@ async def test_dump_log_object( assert hass.services.has_service(DOMAIN, SERVICE_DUMP_LOG_OBJECTS) - await hass.services.async_call( - DOMAIN, SERVICE_DUMP_LOG_OBJECTS, {CONF_TYPE: "DumpLogDummy"}, blocking=True - ) + with patch("objgraph.by_type", return_value=[obj1, obj2]): + await hass.services.async_call( + DOMAIN, SERVICE_DUMP_LOG_OBJECTS, {CONF_TYPE: "DumpLogDummy"}, blocking=True + ) assert "" in caplog.text assert "Failed to serialize" in caplog.text From 2977cf227e8107930a6ba2a32a9c06bd3cfd7329 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:49:58 +0100 Subject: [PATCH 0255/1198] Add Bring! quality scale record (#131584) --- .../components/bring/quality_scale.yaml | 74 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/bring/quality_scale.yaml diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml new file mode 100644 index 00000000000..b99c1ed24a9 --- /dev/null +++ b/homeassistant/components/bring/quality_scale.yaml @@ -0,0 +1,74 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Only entity services + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: Check uuid match in reauth + dependency-transparency: done + docs-actions: done + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: The integration registers no events + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: handled by coordinator + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Integration is a service and has no devices. + discovery: + status: exempt + comment: Integration is a service and has no devices. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + no repairs + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 4ef7ab0dc11..e16d7d095b9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -221,7 +221,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "bond", "bosch_shc", "braviatv", - "bring", "broadlink", "brother", "brottsplatskartan", From e55d8b2d2b5db3b72899fd98b82f342bc1e11c4c Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:50:15 +0100 Subject: [PATCH 0256/1198] Check token scope earlier in Husqvarna Automower (#132289) --- .../components/husqvarna_automower/__init__.py | 10 +++++----- .../components/husqvarna_automower/quality_scale.yaml | 5 +---- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 822f81f5f75..3b08a766f1c 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -62,6 +62,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> raise ConfigEntryAuthFailed from err raise ConfigEntryNotReady from err + if "amc:api" not in entry.data["token"]["scope"]: + # We raise ConfigEntryAuthFailed here because the websocket can't be used + # without the scope. So only polling would be possible. + raise ConfigEntryAuthFailed + coordinator = AutomowerDataUpdateCoordinator(hass, automower_api, entry) await coordinator.async_config_entry_first_refresh() available_devices = list(coordinator.data) @@ -74,11 +79,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> "websocket_task", ) - if "amc:api" not in entry.data["token"]["scope"]: - # We raise ConfigEntryAuthFailed here because the websocket can't be used - # without the scope. So only polling would be possible. - raise ConfigEntryAuthFailed - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/husqvarna_automower/quality_scale.yaml b/homeassistant/components/husqvarna_automower/quality_scale.yaml index 384d58b7ece..1b5accafe17 100644 --- a/homeassistant/components/husqvarna_automower/quality_scale.yaml +++ b/homeassistant/components/husqvarna_automower/quality_scale.yaml @@ -5,10 +5,7 @@ rules: unique-config-entry: done config-flow-test-coverage: done runtime-data: done - test-before-setup: - status: todo - comment: | - Raise ConfigEntryAuthFailed earlier, when "amc:api" is missing in the token scope. + test-before-setup: done appropriate-polling: done entity-unique-id: done has-entity-name: done From 80ad154dcd790dc418fe97dd8125756d8ccad2d0 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Dec 2024 20:04:50 +0100 Subject: [PATCH 0257/1198] Refactor template lock to only return LockState or None (#132093) * Refactor template lock to only return LockState or None * Test for false states * Use strings --- homeassistant/components/template/lock.py | 29 +++++++++++++++++------ 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index d7bb30dbba0..f194154a50c 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -18,7 +18,6 @@ from homeassistant.const import ( CONF_OPTIMISTIC, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_ON, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError, TemplateError @@ -89,7 +88,7 @@ class TemplateLock(TemplateEntity, LockEntity): super().__init__( hass, config=config, fallback_name=DEFAULT_NAME, unique_id=unique_id ) - self._state: str | bool | LockState | None = None + self._state: LockState | None = None name = self._attr_name assert name self._state_template = config.get(CONF_VALUE_TEMPLATE) @@ -107,7 +106,7 @@ class TemplateLock(TemplateEntity, LockEntity): @property def is_locked(self) -> bool: """Return true if lock is locked.""" - return self._state in ("true", STATE_ON, LockState.LOCKED) + return self._state == LockState.LOCKED @property def is_jammed(self) -> bool: @@ -130,7 +129,7 @@ class TemplateLock(TemplateEntity, LockEntity): return self._state == LockState.OPEN @callback - def _update_state(self, result): + def _update_state(self, result: str | TemplateError) -> None: """Update the state from the template.""" super()._update_state(result) if isinstance(result, TemplateError): @@ -142,7 +141,23 @@ class TemplateLock(TemplateEntity, LockEntity): return if isinstance(result, str): - self._state = result.lower() + if result.lower() in ( + "true", + "on", + "locked", + ): + self._state = LockState.LOCKED + elif result.lower() in ( + "false", + "off", + "unlocked", + ): + self._state = LockState.UNLOCKED + else: + try: + self._state = LockState(result.lower()) + except ValueError: + self._state = None return self._state = None @@ -189,7 +204,7 @@ class TemplateLock(TemplateEntity, LockEntity): self._raise_template_error_if_available() if self._optimistic: - self._state = True + self._state = LockState.LOCKED self.async_write_ha_state() tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} @@ -205,7 +220,7 @@ class TemplateLock(TemplateEntity, LockEntity): self._raise_template_error_if_available() if self._optimistic: - self._state = False + self._state = LockState.UNLOCKED self.async_write_ha_state() tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} From de0ffea52de218b6a50cde68027f5649b6fdb8cb Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 4 Dec 2024 20:28:43 +0100 Subject: [PATCH 0258/1198] Clean up common modules in Husqvarna Automower (#132290) --- homeassistant/components/husqvarna_automower/__init__.py | 2 +- homeassistant/components/husqvarna_automower/coordinator.py | 4 +--- homeassistant/components/husqvarna_automower/entity.py | 4 ++-- .../components/husqvarna_automower/quality_scale.yaml | 6 +----- 4 files changed, 5 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 3b08a766f1c..2cb2ebc1bd3 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -67,7 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> # without the scope. So only polling would be possible. raise ConfigEntryAuthFailed - coordinator = AutomowerDataUpdateCoordinator(hass, automower_api, entry) + coordinator = AutomowerDataUpdateCoordinator(hass, automower_api) await coordinator.async_config_entry_first_refresh() available_devices = list(coordinator.data) cleanup_removed_devices(hass, coordinator.config_entry, available_devices) diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index c19f37a040d..5f1fa022718 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -31,9 +31,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib config_entry: ConfigEntry - def __init__( - self, hass: HomeAssistant, api: AutomowerSession, entry: ConfigEntry - ) -> None: + def __init__(self, hass: HomeAssistant, api: AutomowerSession) -> None: """Initialize data updater.""" super().__init__( hass, diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index da6c0ae59ce..fef0ba03b62 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -133,7 +133,7 @@ class AutomowerControlEntity(AutomowerAvailableEntity): class WorkAreaAvailableEntity(AutomowerAvailableEntity): - """Base entity for work work areas.""" + """Base entity for work areas.""" def __init__( self, @@ -164,4 +164,4 @@ class WorkAreaAvailableEntity(AutomowerAvailableEntity): class WorkAreaControlEntity(WorkAreaAvailableEntity, AutomowerControlEntity): - """Base entity work work areas with control function.""" + """Base entity for work areas with control function.""" diff --git a/homeassistant/components/husqvarna_automower/quality_scale.yaml b/homeassistant/components/husqvarna_automower/quality_scale.yaml index 1b5accafe17..2287ccb4d4f 100644 --- a/homeassistant/components/husqvarna_automower/quality_scale.yaml +++ b/homeassistant/components/husqvarna_automower/quality_scale.yaml @@ -18,11 +18,7 @@ rules: status: done comment: | The integration only has an entity service, registered in the platform. - common-modules: - status: todo - comment: | - Remove unused config_entry in coordinator. - Fix typos in entity.py + common-modules: done docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done From 106c5d4248b3e0cbd1bdc68486e84134b508fd79 Mon Sep 17 00:00:00 2001 From: Jeff Terrace Date: Wed, 4 Dec 2024 15:15:30 -0500 Subject: [PATCH 0259/1198] Add support for onvif tplink person and vehicle events (#130769) Co-authored-by: J. Nick Koston --- homeassistant/components/onvif/parsers.py | 57 ++++ tests/components/onvif/test_parsers.py | 335 ++++++++++++++++++++++ 2 files changed, 392 insertions(+) create mode 100644 tests/components/onvif/test_parsers.py diff --git a/homeassistant/components/onvif/parsers.py b/homeassistant/components/onvif/parsers.py index 57bd8a974db..d7bbaa4fb3f 100644 --- a/homeassistant/components/onvif/parsers.py +++ b/homeassistant/components/onvif/parsers.py @@ -370,6 +370,63 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None: return None +@PARSERS.register("tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent") +@PARSERS.register("tns1:RuleEngine/PeopleDetector/People") +async def async_parse_tplink_detector(uid: str, msg) -> Event | None: + """Handle parsing tplink smart event messages. + + Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent + Topic: tns1:RuleEngine/PeopleDetector/People + """ + video_source = "" + video_analytics = "" + rule = "" + topic = "" + vehicle = False + person = False + enabled = False + try: + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value + + for item in payload.Data.SimpleItem: + if item.Name == "IsVehicle": + vehicle = True + enabled = item.Value == "true" + if item.Name == "IsPeople": + person = True + enabled = item.Value == "true" + except (AttributeError, KeyError): + return None + + if vehicle: + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Vehicle Detection", + "binary_sensor", + "motion", + None, + enabled, + ) + if person: + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Person Detection", + "binary_sensor", + "motion", + None, + enabled, + ) + + return None + + @PARSERS.register("tns1:RuleEngine/MyRuleDetector/PeopleDetect") async def async_parse_person_detector(uid: str, msg) -> Event | None: """Handle parsing event message. diff --git a/tests/components/onvif/test_parsers.py b/tests/components/onvif/test_parsers.py new file mode 100644 index 00000000000..209e7cbccef --- /dev/null +++ b/tests/components/onvif/test_parsers.py @@ -0,0 +1,335 @@ +"""Test ONVIF parsers.""" + +import datetime +import os + +import onvif +import onvif.settings +from zeep import Client +from zeep.transports import Transport + +from homeassistant.components.onvif import models, parsers +from homeassistant.core import HomeAssistant + +TEST_UID = "test-unique-id" + + +async def get_event(notification_data: dict) -> models.Event: + """Take in a zeep dict, run it through the parser, and return an Event. + + When the parser encounters an unknown topic that it doesn't know how to parse, + it outputs a message 'No registered handler for event from ...' along with a + print out of the serialized xml message from zeep. If it tries to parse and + can't, it prints out 'Unable to parse event from ...' along with the same + serialized message. This method can take the output directly from these log + messages and run them through the parser, which makes it easy to add new unit + tests that verify the message can now be parsed. + """ + zeep_client = Client( + f"{os.path.dirname(onvif.__file__)}/wsdl/events.wsdl", + wsse=None, + transport=Transport(), + ) + + notif_msg_type = zeep_client.get_type("ns5:NotificationMessageHolderType") + assert notif_msg_type is not None + notif_msg = notif_msg_type(**notification_data) + assert notif_msg is not None + + # The xsd:any type embedded inside the message doesn't parse, so parse it manually. + msg_elem = zeep_client.get_element("ns8:Message") + assert msg_elem is not None + msg_data = msg_elem(**notification_data["Message"]["_value_1"]) + assert msg_data is not None + notif_msg.Message._value_1 = msg_data + + parser = parsers.PARSERS.get(notif_msg.Topic._value_1) + assert parser is not None + + return await parser(TEST_UID, notif_msg) + + +async def test_line_detector_crossed(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/LineDetector/Crossed.""" + event = await get_event( + { + "SubscriptionReference": { + "Address": {"_value_1": None, "_attr_1": None}, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Topic": { + "_value_1": "tns1:RuleEngine/LineDetector/Crossed", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": { + "Address": { + "_value_1": "xx.xx.xx.xx/onvif/event/alarm", + "_attr_1": None, + }, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "video_source_config1", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "analytics_video_source", + }, + {"Name": "Rule", "Value": "MyLineDetectorRule"}, + ], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "ObjectId", "Value": "0"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime(2020, 5, 24, 7, 24, 47), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Line Detector Crossed" + assert event.platform == "sensor" + assert event.value == "0" + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/LineDetector/" + "Crossed_video_source_config1_analytics_video_source_MyLineDetectorRule" + ) + + +async def test_tapo_vehicle(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - vehicle.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsVehicle", "Value": "true"}], + "_attr_1": None, + }, + "Extension": None, + "Key": None, + "PropertyOperation": "Changed", + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + { + "Name": "Rule", + "Value": "MyTPSmartEventDetectorRule", + }, + ], + "_attr_1": None, + }, + "UtcTime": datetime.datetime( + 2024, 11, 2, 0, 33, 11, tzinfo=datetime.UTC + ), + "_attr_1": {}, + } + }, + "ProducerReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:5656/event", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "SubscriptionReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:2020/event-0_2020", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "Topic": { + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + "_value_1": "tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent", + }, + } + ) + + assert event is not None + assert event.name == "Vehicle Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/TPSmartEventDetector/" + "TPSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule" + ) + + +async def test_tapo_person(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - person.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + "Extension": None, + "Key": None, + "PropertyOperation": "Changed", + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + {"Name": "Rule", "Value": "MyPeopleDetectorRule"}, + ], + "_attr_1": None, + }, + "UtcTime": datetime.datetime( + 2024, 11, 3, 18, 40, 43, tzinfo=datetime.UTC + ), + "_attr_1": {}, + } + }, + "ProducerReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:5656/event", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "SubscriptionReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:2020/event-0_2020", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "Topic": { + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is not None + assert event.name == "Person Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/PeopleDetector/" + "People_VideoSourceToken_VideoAnalyticsToken_MyPeopleDetectorRule" + ) + + +async def test_tapo_missing_attributes(hass: HomeAssistant) -> None: + """Tests async_parse_tplink_detector with missing fields.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is None + + +async def test_tapo_unknown_type(hass: HomeAssistant) -> None: + """Tests async_parse_tplink_detector with unknown event type.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsNotPerson", "Value": "true"}], + "_attr_1": None, + }, + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + {"Name": "Rule", "Value": "MyPeopleDetectorRule"}, + ], + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is None From 437111453b461751ab56c10bc2e3b294e584ed81 Mon Sep 17 00:00:00 2001 From: mkmer Date: Wed, 4 Dec 2024 15:49:23 -0500 Subject: [PATCH 0260/1198] Bump aiosomecomfort to 0.0.28 in Honeywell (#132294) Bump aiosomecomfort --- homeassistant/components/honeywell/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/honeywell/manifest.json b/homeassistant/components/honeywell/manifest.json index d0f0c8281f7..4a50e326965 100644 --- a/homeassistant/components/honeywell/manifest.json +++ b/homeassistant/components/honeywell/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/honeywell", "iot_class": "cloud_polling", "loggers": ["somecomfort"], - "requirements": ["AIOSomecomfort==0.0.25"] + "requirements": ["AIOSomecomfort==0.0.28"] } diff --git a/requirements_all.txt b/requirements_all.txt index fbe4b92d267..4d114841761 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -7,7 +7,7 @@ AEMET-OpenData==0.6.3 # homeassistant.components.honeywell -AIOSomecomfort==0.0.25 +AIOSomecomfort==0.0.28 # homeassistant.components.adax Adax-local==0.1.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 413c96df545..9c0d22b51ec 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -7,7 +7,7 @@ AEMET-OpenData==0.6.3 # homeassistant.components.honeywell -AIOSomecomfort==0.0.25 +AIOSomecomfort==0.0.28 # homeassistant.components.adax Adax-local==0.1.5 From 950563cf32e0bd594cdc37a8c7196fa6f9e18deb Mon Sep 17 00:00:00 2001 From: mkmer Date: Wed, 4 Dec 2024 15:54:12 -0500 Subject: [PATCH 0261/1198] Use config_entry.runtime_data in Honeywell (#132297) * Use entry.runtime_data * switch * create new type * Extend ConfigEntry * simplify runtime_data, clean up data types * More config_entry types * Yet more missing type changes --- .../components/honeywell/__init__.py | 26 ++++++++++--------- homeassistant/components/honeywell/climate.py | 11 ++++---- .../components/honeywell/diagnostics.py | 8 +++--- homeassistant/components/honeywell/sensor.py | 7 +++-- homeassistant/components/honeywell/switch.py | 7 +++-- 5 files changed, 29 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/honeywell/__init__.py b/homeassistant/components/honeywell/__init__.py index 5a4d6374304..a8ee5975914 100644 --- a/homeassistant/components/honeywell/__init__.py +++ b/homeassistant/components/honeywell/__init__.py @@ -26,10 +26,12 @@ PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] MIGRATE_OPTIONS_KEYS = {CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE} +type HoneywellConfigEntry = ConfigEntry[HoneywellData] + @callback def _async_migrate_data_to_options( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: HoneywellConfigEntry ) -> None: if not MIGRATE_OPTIONS_KEYS.intersection(config_entry.data): return @@ -45,7 +47,9 @@ def _async_migrate_data_to_options( ) -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> bool: """Set up the Honeywell thermostat.""" _async_migrate_data_to_options(hass, config_entry) @@ -84,8 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if len(devices) == 0: _LOGGER.debug("No devices found") return False - data = HoneywellData(config_entry.entry_id, client, devices) - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = data + config_entry.runtime_data = HoneywellData(config_entry.entry_id, client, devices) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) config_entry.async_on_unload(config_entry.add_update_listener(update_listener)) @@ -93,19 +96,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def update_listener( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> None: """Update listener.""" await hass.config_entries.async_reload(config_entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> bool: """Unload the config and platforms.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) @dataclass diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index d4e5ee10a6b..9f6b7682470 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -31,7 +31,6 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -40,7 +39,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import TemperatureConverter -from . import HoneywellData +from . import HoneywellConfigEntry, HoneywellData from .const import ( _LOGGER, CONF_COOL_AWAY_TEMPERATURE, @@ -97,13 +96,15 @@ SCAN_INTERVAL = datetime.timedelta(seconds=30) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: HoneywellConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell thermostat.""" cool_away_temp = entry.options.get(CONF_COOL_AWAY_TEMPERATURE) heat_away_temp = entry.options.get(CONF_HEAT_AWAY_TEMPERATURE) - data: HoneywellData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data _async_migrate_unique_id(hass, data.devices) async_add_entities( [ @@ -131,7 +132,7 @@ def _async_migrate_unique_id( def remove_stale_devices( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, devices: dict[str, SomeComfortDevice], ) -> None: """Remove stale devices from device registry.""" diff --git a/homeassistant/components/honeywell/diagnostics.py b/homeassistant/components/honeywell/diagnostics.py index 35624c8fc39..b266e06d110 100644 --- a/homeassistant/components/honeywell/diagnostics.py +++ b/homeassistant/components/honeywell/diagnostics.py @@ -4,19 +4,17 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import HoneywellData -from .const import DOMAIN +from . import HoneywellConfigEntry async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - honeywell: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + honeywell = config_entry.runtime_data return { f"Device {device}": { diff --git a/homeassistant/components/honeywell/sensor.py b/homeassistant/components/honeywell/sensor.py index 31ed8d646c5..a9109d5d557 100644 --- a/homeassistant/components/honeywell/sensor.py +++ b/homeassistant/components/honeywell/sensor.py @@ -14,14 +14,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HoneywellData +from . import HoneywellConfigEntry from .const import DOMAIN OUTDOOR_TEMPERATURE_STATUS_KEY = "outdoor_temperature" @@ -81,11 +80,11 @@ SENSOR_TYPES: tuple[HoneywellSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell thermostat.""" - data: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data async_add_entities( HoneywellSensor(device, description) diff --git a/homeassistant/components/honeywell/switch.py b/homeassistant/components/honeywell/switch.py index b90dd339593..3602dd1ba10 100644 --- a/homeassistant/components/honeywell/switch.py +++ b/homeassistant/components/honeywell/switch.py @@ -12,13 +12,12 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HoneywellData +from . import HoneywellConfigEntry, HoneywellData from .const import DOMAIN EMERGENCY_HEAT_KEY = "emergency_heat" @@ -34,11 +33,11 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell switches.""" - data: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data async_add_entities( HoneywellSwitch(data, device, description) for device in data.devices.values() From 94b16da90f0b6863255fb5fdbfee554e28c82045 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Dec 2024 22:58:45 +0100 Subject: [PATCH 0262/1198] Set command_line quality scale to legacy (#132306) --- homeassistant/components/command_line/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/command_line/manifest.json b/homeassistant/components/command_line/manifest.json index 3e76cf4a6a6..2a54f500504 100644 --- a/homeassistant/components/command_line/manifest.json +++ b/homeassistant/components/command_line/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@gjohansson-ST"], "documentation": "https://www.home-assistant.io/integrations/command_line", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["jsonpath==0.82.2"] } From 84e6c0b9ac428812e36cf6f7cdfd651fe2797308 Mon Sep 17 00:00:00 2001 From: Alberto Geniola Date: Wed, 4 Dec 2024 23:59:40 +0100 Subject: [PATCH 0263/1198] Bump elmax-api to 0.0.6.3 (#131876) --- homeassistant/components/elmax/common.py | 2 +- homeassistant/components/elmax/config_flow.py | 2 +- homeassistant/components/elmax/cover.py | 4 ++-- homeassistant/components/elmax/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/elmax/conftest.py | 17 +++++++++++++++-- 7 files changed, 22 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/elmax/common.py b/homeassistant/components/elmax/common.py index 88e61e36a68..18350e45efe 100644 --- a/homeassistant/components/elmax/common.py +++ b/homeassistant/components/elmax/common.py @@ -35,7 +35,7 @@ def check_local_version_supported(api_version: str | None) -> bool: class DirectPanel(PanelEntry): """Helper class for wrapping a directly accessed Elmax Panel.""" - def __init__(self, panel_uri): + def __init__(self, panel_uri) -> None: """Construct the object.""" super().__init__(panel_uri, True, {}) diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index bf479e997ef..3bb01efd3d5 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -203,7 +203,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_direct(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Handle the direct setup step.""" - self._selected_mode = CONF_ELMAX_MODE_CLOUD + self._selected_mode = CONF_ELMAX_MODE_DIRECT if user_input is None: return self.async_show_form( step_id=CONF_ELMAX_MODE_DIRECT, diff --git a/homeassistant/components/elmax/cover.py b/homeassistant/components/elmax/cover.py index a53c28c5f33..403bc51dbff 100644 --- a/homeassistant/components/elmax/cover.py +++ b/homeassistant/components/elmax/cover.py @@ -121,13 +121,13 @@ class ElmaxCover(ElmaxEntity, CoverEntity): else: _LOGGER.debug("Ignoring stop request as the cover is IDLE") - async def async_open_cover(self, **kwargs): + async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.UP ) - async def async_close_cover(self, **kwargs): + async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.DOWN diff --git a/homeassistant/components/elmax/manifest.json b/homeassistant/components/elmax/manifest.json index efa97a9f6b9..dfa20326d0c 100644 --- a/homeassistant/components/elmax/manifest.json +++ b/homeassistant/components/elmax/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/elmax", "iot_class": "cloud_polling", "loggers": ["elmax_api"], - "requirements": ["elmax-api==0.0.6.1"], + "requirements": ["elmax-api==0.0.6.3"], "zeroconf": [ { "type": "_elmax-ssl._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index 4d114841761..55a5d3a9e5a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -821,7 +821,7 @@ eliqonline==1.2.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.1 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9c0d22b51ec..6c4349415ab 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -696,7 +696,7 @@ elgato==5.1.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.1 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index f92fc2f1827..f8cf33ffe1a 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,6 +1,7 @@ """Configuration for Elmax tests.""" from collections.abc import Generator +from datetime import datetime, timedelta import json from unittest.mock import AsyncMock, patch @@ -11,6 +12,7 @@ from elmax_api.constants import ( ENDPOINT_LOGIN, ) from httpx import Response +import jwt import pytest import respx @@ -64,9 +66,20 @@ def httpx_mock_direct_fixture() -> Generator[respx.MockRouter]: ) as respx_mock: # Mock Login POST. login_route = respx_mock.post(f"/api/v2/{ENDPOINT_LOGIN}", name="login") - login_route.return_value = Response( - 200, json=json.loads(load_fixture("direct/login.json", "elmax")) + + login_json = json.loads(load_fixture("direct/login.json", "elmax")) + decoded_jwt = jwt.decode_complete( + login_json["token"].split(" ")[1], + algorithms="HS256", + options={"verify_signature": False}, ) + expiration = datetime.now() + timedelta(hours=1) + decoded_jwt["payload"]["exp"] = int(expiration.timestamp()) + jws_string = jwt.encode( + payload=decoded_jwt["payload"], algorithm="HS256", key="" + ) + login_json["token"] = f"JWT {jws_string}" + login_route.return_value = Response(200, json=login_json) # Mock Device list GET. list_devices_route = respx_mock.get( From 1456d5802d43ce88682622296763ae6af5471384 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 4 Dec 2024 18:20:27 -0500 Subject: [PATCH 0264/1198] Fix runtime data in Cambridge Audio (#132285) * Fix runtime data in Cambridge Audio * Update --- homeassistant/components/cambridge_audio/media_player.py | 4 ++-- homeassistant/components/cambridge_audio/quality_scale.yaml | 2 +- homeassistant/components/cambridge_audio/select.py | 4 ++-- homeassistant/components/cambridge_audio/switch.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index 805cf8ec7f6..9896effb07d 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -20,11 +20,11 @@ from homeassistant.components.media_player import ( MediaType, RepeatMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import CambridgeAudioConfigEntry from .const import ( CAMBRIDGE_MEDIA_TYPE_AIRABLE, CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, @@ -62,7 +62,7 @@ PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CambridgeAudioConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Cambridge Audio device based on a config entry.""" diff --git a/homeassistant/components/cambridge_audio/quality_scale.yaml b/homeassistant/components/cambridge_audio/quality_scale.yaml index 3d4963c3f29..65b921268f4 100644 --- a/homeassistant/components/cambridge_audio/quality_scale.yaml +++ b/homeassistant/components/cambridge_audio/quality_scale.yaml @@ -20,7 +20,7 @@ rules: entity-event-setup: done entity-unique-id: done has-entity-name: done - runtime-data: todo + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done diff --git a/homeassistant/components/cambridge_audio/select.py b/homeassistant/components/cambridge_audio/select.py index b1bc0f9e4df..6bfe83c2539 100644 --- a/homeassistant/components/cambridge_audio/select.py +++ b/homeassistant/components/cambridge_audio/select.py @@ -7,11 +7,11 @@ from aiostreammagic import StreamMagicClient from aiostreammagic.models import DisplayBrightness from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import CambridgeAudioConfigEntry from .entity import CambridgeAudioEntity, command PARALLEL_UPDATES = 0 @@ -81,7 +81,7 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CambridgeAudioConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Cambridge Audio select entities based on a config entry.""" diff --git a/homeassistant/components/cambridge_audio/switch.py b/homeassistant/components/cambridge_audio/switch.py index 72aa0d3cbea..065a1da4f94 100644 --- a/homeassistant/components/cambridge_audio/switch.py +++ b/homeassistant/components/cambridge_audio/switch.py @@ -7,11 +7,11 @@ from typing import Any from aiostreammagic import StreamMagicClient from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import CambridgeAudioConfigEntry from .entity import CambridgeAudioEntity, command PARALLEL_UPDATES = 0 @@ -45,7 +45,7 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CambridgeAudioConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Cambridge Audio switch entities based on a config entry.""" From f68b78d00ebe5cba3a34a62510778a9348274d76 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Thu, 5 Dec 2024 02:34:07 +0100 Subject: [PATCH 0265/1198] Add quality scale to Onkyo (#131322) * Add quality scale to Onkyo * Update homeassistant/components/onkyo/quality_scale.yaml Co-authored-by: Joost Lekkerkerker * docs limitations todo Co-authored-by: Franck Nijhof * entity event setup --------- Co-authored-by: Joost Lekkerkerker Co-authored-by: Franck Nijhof --- .../components/onkyo/quality_scale.yaml | 86 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/onkyo/quality_scale.yaml diff --git a/homeassistant/components/onkyo/quality_scale.yaml b/homeassistant/components/onkyo/quality_scale.yaml new file mode 100644 index 00000000000..46f0f6d3b0d --- /dev/null +++ b/homeassistant/components/onkyo/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow: + status: todo + comment: | + The data_descriptions are missing. + config-flow-test-coverage: + status: todo + comment: | + Coverage is 100%, but the tests need to be improved. + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: done + comment: | + Currently we store created entities in hass.data. That should be removed in the future. + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: todo + diagnostics: todo + discovery: todo + discovery-update-info: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device. + entity-category: done + entity-device-class: todo + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration is not making any HTTP requests. + strict-typing: + status: todo + comment: | + The library is not fully typed yet. diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index e16d7d095b9..137fa3084a9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -759,7 +759,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "oncue", "ondilo_ico", "onewire", - "onkyo", "onvif", "open_meteo", "openai_conversation", From 5137b06ee7b5e797ef0ac6170466cfdd622d4ea6 Mon Sep 17 00:00:00 2001 From: Tobias Perschon Date: Thu, 5 Dec 2024 02:53:33 +0100 Subject: [PATCH 0266/1198] Remove stale requirement for androidtv (#132319) * removed stale pure-python-adb reference Signed-off-by: Tobias Perschon * reverted wrong changes Signed-off-by: Tobias Perschon * removed wrong file Signed-off-by: Tobias Perschon * cosmetic update Signed-off-by: Tobias Perschon --------- Signed-off-by: Tobias Perschon --- homeassistant/components/androidtv/__init__.py | 2 +- homeassistant/components/androidtv/entity.py | 2 +- homeassistant/components/androidtv/manifest.json | 8 ++------ requirements_all.txt | 3 --- requirements_test_all.txt | 3 --- 5 files changed, 4 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 34c4212c913..44e4c54b560 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -110,7 +110,7 @@ def _setup_androidtv( adb_log = f"using Python ADB implementation with adbkey='{adbkey}'" else: - # Use "pure-python-adb" (communicate with ADB server) + # Communicate via ADB server signer = None adb_log = ( "using ADB server at" diff --git a/homeassistant/components/androidtv/entity.py b/homeassistant/components/androidtv/entity.py index 626dd0f7794..fa583bb2777 100644 --- a/homeassistant/components/androidtv/entity.py +++ b/homeassistant/components/androidtv/entity.py @@ -151,5 +151,5 @@ class AndroidTVEntity(Entity): # Using "adb_shell" (Python ADB implementation) self.exceptions = ADB_PYTHON_EXCEPTIONS else: - # Using "pure-python-adb" (communicate with ADB server) + # Communicate via ADB server self.exceptions = ADB_TCP_EXCEPTIONS diff --git a/homeassistant/components/androidtv/manifest.json b/homeassistant/components/androidtv/manifest.json index fe8e36f0c2f..e30d03fc2d5 100644 --- a/homeassistant/components/androidtv/manifest.json +++ b/homeassistant/components/androidtv/manifest.json @@ -6,10 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/androidtv", "integration_type": "device", "iot_class": "local_polling", - "loggers": ["adb_shell", "androidtv", "pure_python_adb"], - "requirements": [ - "adb-shell[async]==0.4.4", - "androidtv[async]==0.0.75", - "pure-python-adb[async]==0.3.0.dev0" - ] + "loggers": ["adb_shell", "androidtv"], + "requirements": ["adb-shell[async]==0.4.4", "androidtv[async]==0.0.75"] } diff --git a/requirements_all.txt b/requirements_all.txt index 55a5d3a9e5a..5fe9f3950da 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1662,9 +1662,6 @@ psutil==6.1.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 -# homeassistant.components.androidtv -pure-python-adb[async]==0.3.0.dev0 - # homeassistant.components.pushbullet pushbullet.py==0.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6c4349415ab..3c3ca8625c4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1360,9 +1360,6 @@ psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor psutil==6.1.0 -# homeassistant.components.androidtv -pure-python-adb[async]==0.3.0.dev0 - # homeassistant.components.pushbullet pushbullet.py==0.11.0 From 9fd23a6d30b4d40e052a6e605d037fbca59dd467 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 08:41:53 +0100 Subject: [PATCH 0267/1198] Revert "Pin rpds-py to 0.21.0 to fix CI" (#132331) Revert "Pin rpds-py to 0.21.0 to fix CI (#132170)" This reverts commit 7e079303429335200da325c7830cc8a2232d323e. --- homeassistant/package_constraints.txt | 5 ----- script/gen_requirements_all.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 138b8bedcce..8617ed58ed5 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -205,8 +205,3 @@ async-timeout==4.0.3 # https://github.com/home-assistant/core/issues/122508 # https://github.com/home-assistant/core/issues/118004 aiofiles>=24.1.0 - -# 0.22.0 causes CI failures on Python 3.13 -# python3 -X dev -m pytest tests/components/matrix -# python3 -X dev -m pytest tests/components/zha -rpds-py==0.21.0 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 450469096ea..97ffcac79a4 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -238,11 +238,6 @@ async-timeout==4.0.3 # https://github.com/home-assistant/core/issues/122508 # https://github.com/home-assistant/core/issues/118004 aiofiles>=24.1.0 - -# 0.22.0 causes CI failures on Python 3.13 -# python3 -X dev -m pytest tests/components/matrix -# python3 -X dev -m pytest tests/components/zha -rpds-py==0.21.0 """ GENERATED_MESSAGE = ( From 33ad27d569d022dd1b75eaa5a2d3c9f1321df8e3 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Thu, 5 Dec 2024 10:28:57 +0100 Subject: [PATCH 0268/1198] Bump pylamarzocco to 1.3.2 (#132344) --- homeassistant/components/lamarzocco/__init__.py | 11 ++++++----- homeassistant/components/lamarzocco/config_flow.py | 12 ++++++------ homeassistant/components/lamarzocco/coordinator.py | 12 +++++------- homeassistant/components/lamarzocco/entity.py | 2 +- homeassistant/components/lamarzocco/manifest.json | 2 +- homeassistant/components/lamarzocco/number.py | 2 +- homeassistant/components/lamarzocco/select.py | 2 +- homeassistant/components/lamarzocco/sensor.py | 2 +- homeassistant/components/lamarzocco/switch.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lamarzocco/conftest.py | 2 +- tests/components/lamarzocco/test_init.py | 3 +-- 13 files changed, 27 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index a69b97242f3..b3021ef1543 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -3,9 +3,9 @@ import logging from packaging import version -from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.clients.cloud import LaMarzoccoCloudClient +from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful @@ -22,7 +22,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.httpx_client import create_async_httpx_client +from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import CONF_USE_BLUETOOTH, DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator @@ -46,7 +46,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - assert entry.unique_id serial = entry.unique_id - client = create_async_httpx_client(hass) + + client = async_create_clientsession(hass) cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 05dfcbc5196..5d927c6cc79 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -6,9 +6,9 @@ from collections.abc import Mapping import logging from typing import Any -from httpx import AsyncClient -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient +from aiohttp import ClientSession +from pylamarzocco.clients.cloud import LaMarzoccoCloudClient +from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from pylamarzocco.models import LaMarzoccoDeviceInfo import voluptuous as vol @@ -37,7 +37,7 @@ from homeassistant.const import ( ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.httpx_client import create_async_httpx_client +from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, @@ -58,7 +58,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 - _client: AsyncClient + _client: ClientSession def __init__(self) -> None: """Initialize the config flow.""" @@ -82,8 +82,8 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): **user_input, **self._discovered, } - self._client = create_async_httpx_client(self.hass) + self._client = async_create_clientsession(self.hass) cloud_client = LaMarzoccoCloudClient( username=data[CONF_USERNAME], password=data[CONF_PASSWORD], diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 46a8e05745e..1281b11db02 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -8,12 +8,11 @@ import logging from time import time from typing import Any -from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.clients.cloud import LaMarzoccoCloudClient +from pylamarzocco.clients.local import LaMarzoccoLocalClient +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine -from websockets.protocol import State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP @@ -86,9 +85,8 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): if ( self._local_client is not None and self._local_client.websocket is not None - and self._local_client.websocket.state is State.OPEN + and not self._local_client.websocket.closed ): - self._local_client.terminating = True await self._local_client.websocket.close() self.config_entry.async_on_unload( diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index 5542906d887..c3385eebd52 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -4,7 +4,7 @@ from collections.abc import Callable from dataclasses import dataclass from pylamarzocco.const import FirmwareType -from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.devices.machine import LaMarzoccoMachine from homeassistant.const import CONF_ADDRESS, CONF_MAC from homeassistant.helpers.device_registry import ( diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 43b1c7deb47..54413ccf28f 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.2.12"] + "requirements": ["pylamarzocco==1.3.2"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index f32607fd73b..feeb7e4a282 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -11,8 +11,8 @@ from pylamarzocco.const import ( PhysicalKey, PrebrewMode, ) +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.number import ( diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 637ef935979..e6b5f9a3d94 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -5,8 +5,8 @@ from dataclasses import dataclass from typing import Any from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.select import SelectEntity, SelectEntityDescription diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 04b095e798c..d9e858b8191 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -4,7 +4,7 @@ from collections.abc import Callable from dataclasses import dataclass from pylamarzocco.const import BoilerType, MachineModel, PhysicalKey -from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.devices.machine import LaMarzoccoMachine from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index 4dc701c4c29..263bb5dc6ec 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -5,8 +5,8 @@ from dataclasses import dataclass from typing import Any from pylamarzocco.const import BoilerType +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription diff --git a/requirements_all.txt b/requirements_all.txt index 5fe9f3950da..0e9b7e6d60b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2027,7 +2027,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.2.12 +pylamarzocco==1.3.2 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3c3ca8625c4..9b1787e40ba 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1635,7 +1635,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.2.12 +pylamarzocco==1.3.2 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index d6d59cf9ebc..0bd3fb2a737 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel -from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.models import LaMarzoccoDeviceInfo import pytest diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index cb6b028bda0..80c038c4948 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -6,7 +6,6 @@ from pylamarzocco.const import FirmwareType from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest from syrupy import SnapshotAssertion -from websockets.protocol import State from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import DOMAIN @@ -200,7 +199,7 @@ async def test_websocket_closed_on_unload( ) as local_client: client = local_client.return_value client.websocket = AsyncMock() - client.websocket.state = State.OPEN + client.websocket.closed = False await async_init_integration(hass, mock_config_entry) hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() From 13a59dee5a58e0a709019756ae164c98a0a3c8e5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 11:26:11 +0100 Subject: [PATCH 0269/1198] Remove dead code in fritzbox_callmonitor (#132353) --- .../fritzbox_callmonitor/config_flow.py | 20 ++++--------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/fritzbox_callmonitor/config_flow.py b/homeassistant/components/fritzbox_callmonitor/config_flow.py index 7bd0eacb66a..8435eff3e18 100644 --- a/homeassistant/components/fritzbox_callmonitor/config_flow.py +++ b/homeassistant/components/fritzbox_callmonitor/config_flow.py @@ -12,19 +12,12 @@ from requests.exceptions import ConnectionError as RequestsConnectionError import voluptuous as vol from homeassistant.config_entries import ( - SOURCE_IMPORT, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME from homeassistant.core import callback from .base import FritzBoxPhonebook @@ -170,16 +163,11 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): if result != ConnectResult.SUCCESS: return self.async_abort(reason=result) - if self.context["source"] == SOURCE_IMPORT: - self._phonebook_id = user_input[CONF_PHONEBOOK] - self._phonebook_name = user_input[CONF_NAME] - - elif len(self._phonebook_ids) > 1: + if len(self._phonebook_ids) > 1: return await self.async_step_phonebook() - else: - self._phonebook_id = DEFAULT_PHONEBOOK - self._phonebook_name = await self._get_name_of_phonebook(self._phonebook_id) + self._phonebook_id = DEFAULT_PHONEBOOK + self._phonebook_name = await self._get_name_of_phonebook(self._phonebook_id) await self.async_set_unique_id(f"{self._serial_number}-{self._phonebook_id}") self._abort_if_unique_id_configured() From 7de9e9d37a8d67956c9cfc1cba5e4f09259c397a Mon Sep 17 00:00:00 2001 From: Diogo Gomes Date: Thu, 5 Dec 2024 17:45:04 +0000 Subject: [PATCH 0270/1198] Removes references to croniter from utility_meter (#132364) remove croniter --- homeassistant/components/utility_meter/__init__.py | 13 ++++++++----- .../components/utility_meter/manifest.json | 1 - 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/utility_meter/__init__.py b/homeassistant/components/utility_meter/__init__.py index c6a8635f831..aac31e085a0 100644 --- a/homeassistant/components/utility_meter/__init__.py +++ b/homeassistant/components/utility_meter/__init__.py @@ -1,9 +1,9 @@ """Support for tracking consumption over given periods of time.""" -from datetime import timedelta +from datetime import datetime, timedelta import logging -from croniter import croniter +from cronsim import CronSim, CronSimError import voluptuous as vol from homeassistant.components.select import DOMAIN as SELECT_DOMAIN @@ -47,9 +47,12 @@ DEFAULT_OFFSET = timedelta(hours=0) def validate_cron_pattern(pattern): """Check that the pattern is well-formed.""" - if croniter.is_valid(pattern): - return pattern - raise vol.Invalid("Invalid pattern") + try: + CronSim(pattern, datetime(2020, 1, 1)) # any date will do + except CronSimError as err: + _LOGGER.error("Invalid cron pattern %s: %s", pattern, err) + raise vol.Invalid("Invalid pattern") from err + return pattern def period_or_cron(config): diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 31a2d4e9584..5167c51469d 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/utility_meter", "integration_type": "helper", "iot_class": "local_push", - "loggers": ["croniter"], "quality_scale": "internal", "requirements": ["cronsim==2.6"] } From c38a33d3304cd219932ca22e6483723de3389bd0 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Thu, 5 Dec 2024 18:48:15 +0100 Subject: [PATCH 0271/1198] Fix missing AV info in Onkyo (#132328) Add additional AV info to Onkyo --- homeassistant/components/onkyo/media_player.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 41e36a7f237..24d63c0d9e4 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -111,6 +111,7 @@ AUDIO_INFORMATION_MAPPING = [ "precision_quartz_lock_system", "auto_phase_control_delay", "auto_phase_control_phase", + "upmix_mode", ] VIDEO_INFORMATION_MAPPING = [ @@ -123,6 +124,7 @@ VIDEO_INFORMATION_MAPPING = [ "output_color_schema", "output_color_depth", "picture_mode", + "input_hdr", ] ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" From 39abeb4600fee1ec15e3d2125a371ec35e4bd2df Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 20:24:21 +0100 Subject: [PATCH 0272/1198] Use typed config entry in husqvarna_automower (#132346) --- .../components/husqvarna_automower/__init__.py | 6 ++++-- .../components/husqvarna_automower/coordinator.py | 11 ++++++++--- .../components/husqvarna_automower/diagnostics.py | 3 +-- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 2cb2ebc1bd3..da7965250cd 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -89,7 +89,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) - def cleanup_removed_devices( - hass: HomeAssistant, config_entry: ConfigEntry, available_devices: list[str] + hass: HomeAssistant, + config_entry: AutomowerConfigEntry, + available_devices: list[str], ) -> None: """Cleanup entity and device registry from removed devices.""" device_reg = dr.async_get(hass) @@ -104,7 +106,7 @@ def cleanup_removed_devices( def remove_work_area_entities( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AutomowerConfigEntry, removed_work_areas: set[int], mower_id: str, ) -> None: diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index 5f1fa022718..57be02e7066 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -1,8 +1,11 @@ """Data UpdateCoordinator for the Husqvarna Automower integration.""" +from __future__ import annotations + import asyncio from datetime import timedelta import logging +from typing import TYPE_CHECKING from aioautomower.exceptions import ( ApiException, @@ -13,13 +16,15 @@ from aioautomower.exceptions import ( from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN +if TYPE_CHECKING: + from . import AutomowerConfigEntry + _LOGGER = logging.getLogger(__name__) MAX_WS_RECONNECT_TIME = 600 SCAN_INTERVAL = timedelta(minutes=8) @@ -29,7 +34,7 @@ DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): """Class to manage fetching Husqvarna data.""" - config_entry: ConfigEntry + config_entry: AutomowerConfigEntry def __init__(self, hass: HomeAssistant, api: AutomowerSession) -> None: """Initialize data updater.""" @@ -64,7 +69,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib async def client_listen( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: AutomowerConfigEntry, automower_client: AutomowerSession, ) -> None: """Listen with the client.""" diff --git a/homeassistant/components/husqvarna_automower/diagnostics.py b/homeassistant/components/husqvarna_automower/diagnostics.py index 658f6f94445..ceeec0f3e0d 100644 --- a/homeassistant/components/husqvarna_automower/diagnostics.py +++ b/homeassistant/components/husqvarna_automower/diagnostics.py @@ -6,7 +6,6 @@ import logging from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -26,7 +25,7 @@ _LOGGER = logging.getLogger(__name__) async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: AutomowerConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" return async_redact_data(entry.as_dict(), TO_REDACT) From 17afe1ae519c2bd296aa423fe361be0a9364c5da Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 20:32:59 +0100 Subject: [PATCH 0273/1198] Remove deprecated supported features warning in FanEntity (#132369) --- homeassistant/components/baf/fan.py | 2 +- homeassistant/components/balboa/fan.py | 2 +- homeassistant/components/comfoconnect/fan.py | 2 +- homeassistant/components/deconz/fan.py | 1 - homeassistant/components/demo/fan.py | 1 - homeassistant/components/esphome/fan.py | 1 - homeassistant/components/fan/__init__.py | 95 ------ homeassistant/components/fjaraskupan/fan.py | 2 +- homeassistant/components/freedompro/fan.py | 1 - homeassistant/components/group/fan.py | 1 - .../components/homekit_controller/fan.py | 1 - homeassistant/components/insteon/fan.py | 1 - homeassistant/components/intellifire/fan.py | 1 - homeassistant/components/isy994/fan.py | 1 - homeassistant/components/knx/fan.py | 1 - homeassistant/components/lutron/fan.py | 1 - homeassistant/components/lutron_caseta/fan.py | 1 - homeassistant/components/matter/fan.py | 2 +- homeassistant/components/modbus/fan.py | 2 - homeassistant/components/modern_forms/fan.py | 1 - homeassistant/components/mqtt/fan.py | 1 - homeassistant/components/netatmo/fan.py | 1 - homeassistant/components/rabbitair/fan.py | 1 - homeassistant/components/renson/fan.py | 1 - homeassistant/components/smartthings/fan.py | 1 - homeassistant/components/smarty/fan.py | 1 - homeassistant/components/snooz/fan.py | 1 - homeassistant/components/switch_as_x/fan.py | 1 - homeassistant/components/tasmota/fan.py | 1 - homeassistant/components/template/fan.py | 1 - homeassistant/components/tolo/fan.py | 1 - homeassistant/components/tplink/fan.py | 1 - homeassistant/components/tradfri/fan.py | 1 - homeassistant/components/tuya/fan.py | 1 - homeassistant/components/vallox/fan.py | 1 - homeassistant/components/vesync/fan.py | 1 - homeassistant/components/vicare/fan.py | 1 - homeassistant/components/wemo/fan.py | 1 - homeassistant/components/wilight/fan.py | 1 - homeassistant/components/xiaomi_miio/fan.py | 1 - homeassistant/components/zha/fan.py | 1 - homeassistant/components/zwave_js/fan.py | 1 - homeassistant/components/zwave_me/fan.py | 1 - tests/components/fan/test_init.py | 311 +----------------- 44 files changed, 6 insertions(+), 448 deletions(-) diff --git a/homeassistant/components/baf/fan.py b/homeassistant/components/baf/fan.py index d0ba668373a..8f7aab40b79 100644 --- a/homeassistant/components/baf/fan.py +++ b/homeassistant/components/baf/fan.py @@ -46,7 +46,7 @@ class BAFFan(BAFEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_preset_modes = [PRESET_MODE_AUTO] _attr_speed_count = SPEED_COUNT _attr_name = None diff --git a/homeassistant/components/balboa/fan.py b/homeassistant/components/balboa/fan.py index 67c1d9a9a62..3ecfec53a1e 100644 --- a/homeassistant/components/balboa/fan.py +++ b/homeassistant/components/balboa/fan.py @@ -38,7 +38,7 @@ class BalboaPumpFanEntity(BalboaEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_translation_key = "pump" def __init__(self, control: SpaControl) -> None: diff --git a/homeassistant/components/comfoconnect/fan.py b/homeassistant/components/comfoconnect/fan.py index 4e30b3ee3dc..2295fdb4e8e 100644 --- a/homeassistant/components/comfoconnect/fan.py +++ b/homeassistant/components/comfoconnect/fan.py @@ -68,7 +68,7 @@ class ComfoConnectFan(FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_preset_modes = PRESET_MODES current_speed: float | None = None diff --git a/homeassistant/components/deconz/fan.py b/homeassistant/components/deconz/fan.py index 48f29cf9b72..26e4d3328b8 100644 --- a/homeassistant/components/deconz/fan.py +++ b/homeassistant/components/deconz/fan.py @@ -65,7 +65,6 @@ class DeconzFan(DeconzDevice[Light], FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Light, hub: DeconzHub) -> None: """Set up fan.""" diff --git a/homeassistant/components/demo/fan.py b/homeassistant/components/demo/fan.py index 064ee3bb4f7..42e7f9e2434 100644 --- a/homeassistant/components/demo/fan.py +++ b/homeassistant/components/demo/fan.py @@ -100,7 +100,6 @@ class BaseDemoFan(FanEntity): _attr_should_poll = False _attr_translation_key = "demo" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/esphome/fan.py b/homeassistant/components/esphome/fan.py index 454c5edf030..c09145c17b5 100644 --- a/homeassistant/components/esphome/fan.py +++ b/homeassistant/components/esphome/fan.py @@ -45,7 +45,6 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity): """A fan implementation for ESPHome.""" _supports_speed_levels: bool = True - _enable_turn_on_off_backwards_compatibility = False async def async_set_percentage(self, percentage: int) -> None: """Set the speed percentage of the fan.""" diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index 71fb9c53353..863ae705603 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from datetime import timedelta from enum import IntFlag import functools as ft @@ -25,7 +24,6 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.hass_dict import HassKey @@ -219,99 +217,6 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_speed_count: int = 100 _attr_supported_features: FanEntityFeature = FanEntityFeature(0) - __mod_supported_features: FanEntityFeature = FanEntityFeature(0) - # Integrations should set `_enable_turn_on_off_backwards_compatibility` to False - # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. - _enable_turn_on_off_backwards_compatibility: bool = True - - def __getattribute__(self, name: str, /) -> Any: - """Get attribute. - - Modify return of `supported_features` to - include `_mod_supported_features` if attribute is set. - """ - if name != "supported_features": - return super().__getattribute__(name) - - # Convert the supported features to ClimateEntityFeature. - # Remove this compatibility shim in 2025.1 or later. - _supported_features: FanEntityFeature = super().__getattribute__( - "supported_features" - ) - _mod_supported_features: FanEntityFeature = super().__getattribute__( - "_FanEntity__mod_supported_features" - ) - if type(_supported_features) is int: # noqa: E721 - _features = FanEntityFeature(_supported_features) - self._report_deprecated_supported_features_values(_features) - else: - _features = _supported_features - - if not _mod_supported_features: - return _features - - # Add automatically calculated FanEntityFeature.TURN_OFF/TURN_ON to - # supported features and return it - return _features | _mod_supported_features - - @callback - def add_to_platform_start( - self, - hass: HomeAssistant, - platform: EntityPlatform, - parallel_updates: asyncio.Semaphore | None, - ) -> None: - """Start adding an entity to a platform.""" - super().add_to_platform_start(hass, platform, parallel_updates) - - def _report_turn_on_off(feature: str, method: str) -> None: - """Log warning not implemented turn on/off feature.""" - report_issue = self._suggest_report_issue() - message = ( - "Entity %s (%s) does not set FanEntityFeature.%s" - " but implements the %s method. Please %s" - ) - _LOGGER.warning( - message, - self.entity_id, - type(self), - feature, - method, - report_issue, - ) - - # Adds FanEntityFeature.TURN_OFF/TURN_ON depending on service calls implemented - # This should be removed in 2025.2. - if self._enable_turn_on_off_backwards_compatibility is False: - # Return if integration has migrated already - return - - supported_features = self.supported_features - if supported_features & (FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF): - # The entity supports both turn_on and turn_off, the backwards compatibility - # checks are not needed - return - - if not supported_features & FanEntityFeature.TURN_OFF and ( - type(self).async_turn_off is not ToggleEntity.async_turn_off - or type(self).turn_off is not ToggleEntity.turn_off - ): - # turn_off implicitly supported by implementing turn_off method - _report_turn_on_off("TURN_OFF", "turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - FanEntityFeature.TURN_OFF - ) - - if not supported_features & FanEntityFeature.TURN_ON and ( - type(self).async_turn_on is not FanEntity.async_turn_on - or type(self).turn_on is not FanEntity.turn_on - ): - # turn_on implicitly supported by implementing turn_on method - _report_turn_on_off("TURN_ON", "turn_on") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - FanEntityFeature.TURN_ON - ) - def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" raise NotImplementedError diff --git a/homeassistant/components/fjaraskupan/fan.py b/homeassistant/components/fjaraskupan/fan.py index 864160cb464..540a7dd410d 100644 --- a/homeassistant/components/fjaraskupan/fan.py +++ b/homeassistant/components/fjaraskupan/fan.py @@ -71,7 +71,7 @@ class Fan(CoordinatorEntity[FjaraskupanCoordinator], FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_has_entity_name = True _attr_name = None diff --git a/homeassistant/components/freedompro/fan.py b/homeassistant/components/freedompro/fan.py index 698d57d1001..d21ede9bad3 100644 --- a/homeassistant/components/freedompro/fan.py +++ b/homeassistant/components/freedompro/fan.py @@ -40,7 +40,6 @@ class FreedomproFan(CoordinatorEntity[FreedomproDataUpdateCoordinator], FanEntit _attr_name = None _attr_is_on = False _attr_percentage = 0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/group/fan.py b/homeassistant/components/group/fan.py index 03341b0f46b..87d9cb281f4 100644 --- a/homeassistant/components/group/fan.py +++ b/homeassistant/components/group/fan.py @@ -109,7 +109,6 @@ class FanGroup(GroupEntity, FanEntity): """Representation of a FanGroup.""" _attr_available: bool = False - _enable_turn_on_off_backwards_compatibility = False def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None: """Initialize a FanGroup entity.""" diff --git a/homeassistant/components/homekit_controller/fan.py b/homeassistant/components/homekit_controller/fan.py index 63de146a024..2ae534099ae 100644 --- a/homeassistant/components/homekit_controller/fan.py +++ b/homeassistant/components/homekit_controller/fan.py @@ -42,7 +42,6 @@ class BaseHomeKitFan(HomeKitEntity, FanEntity): # This must be set in subclasses to the name of a boolean characteristic # that controls whether the fan is on or off. on_characteristic: str - _enable_turn_on_off_backwards_compatibility = False @callback def _async_reconfigure(self) -> None: diff --git a/homeassistant/components/insteon/fan.py b/homeassistant/components/insteon/fan.py index c13e22bf8c5..0f1c70b9ea8 100644 --- a/homeassistant/components/insteon/fan.py +++ b/homeassistant/components/insteon/fan.py @@ -56,7 +56,6 @@ class InsteonFanEntity(InsteonEntity, FanEntity): | FanEntityFeature.TURN_ON ) _attr_speed_count = 3 - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/intellifire/fan.py b/homeassistant/components/intellifire/fan.py index dc2fc279a5d..c5bec07faaa 100644 --- a/homeassistant/components/intellifire/fan.py +++ b/homeassistant/components/intellifire/fan.py @@ -81,7 +81,6 @@ class IntellifireFan(IntellifireEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def is_on(self) -> bool: diff --git a/homeassistant/components/isy994/fan.py b/homeassistant/components/isy994/fan.py index 1d8af78f83c..fc0406e2d5f 100644 --- a/homeassistant/components/isy994/fan.py +++ b/homeassistant/components/isy994/fan.py @@ -53,7 +53,6 @@ class ISYFanEntity(ISYNodeEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index ce17517b970..75d91e48048 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -43,7 +43,6 @@ class KNXFan(KnxYamlEntity, FanEntity): """Representation of a KNX fan.""" _device: XknxFan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX fan.""" diff --git a/homeassistant/components/lutron/fan.py b/homeassistant/components/lutron/fan.py index dc881b393de..7db8b12c8d0 100644 --- a/homeassistant/components/lutron/fan.py +++ b/homeassistant/components/lutron/fan.py @@ -51,7 +51,6 @@ class LutronFan(LutronDevice, FanEntity): ) _lutron_device: Output _prev_percentage: int | None = None - _enable_turn_on_off_backwards_compatibility = False def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" diff --git a/homeassistant/components/lutron_caseta/fan.py b/homeassistant/components/lutron_caseta/fan.py index e2bf7f15098..69167929e14 100644 --- a/homeassistant/components/lutron_caseta/fan.py +++ b/homeassistant/components/lutron_caseta/fan.py @@ -50,7 +50,6 @@ class LutronCasetaFan(LutronCasetaUpdatableEntity, FanEntity): | FanEntityFeature.TURN_ON ) _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/matter/fan.py b/homeassistant/components/matter/fan.py index 51c2fb0c882..593693dbbf9 100644 --- a/homeassistant/components/matter/fan.py +++ b/homeassistant/components/matter/fan.py @@ -58,7 +58,7 @@ class MatterFan(MatterEntity, FanEntity): _last_known_preset_mode: str | None = None _last_known_percentage: int = 0 - _enable_turn_on_off_backwards_compatibility = False + _feature_map: int | None = None _platform_translation_key = "fan" diff --git a/homeassistant/components/modbus/fan.py b/homeassistant/components/modbus/fan.py index 5d12fe37fd1..bed8ff102bb 100644 --- a/homeassistant/components/modbus/fan.py +++ b/homeassistant/components/modbus/fan.py @@ -38,8 +38,6 @@ async def async_setup_platform( class ModbusFan(BaseSwitch, FanEntity): """Class representing a Modbus fan.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__( self, hass: HomeAssistant, hub: ModbusHub, config: dict[str, Any] ) -> None: diff --git a/homeassistant/components/modern_forms/fan.py b/homeassistant/components/modern_forms/fan.py index a599c5b6dd6..988edcb60e5 100644 --- a/homeassistant/components/modern_forms/fan.py +++ b/homeassistant/components/modern_forms/fan.py @@ -78,7 +78,6 @@ class ModernFormsFanEntity(FanEntity, ModernFormsDeviceEntity): | FanEntityFeature.TURN_ON ) _attr_translation_key = "fan" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, entry_id: str, coordinator: ModernFormsDataUpdateCoordinator diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index b3c0f22789c..4d2e764a0d5 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -226,7 +226,6 @@ class MqttFan(MqttEntity, FanEntity): _optimistic_preset_mode: bool _payload: dict[str, Any] _speed_range: tuple[int, int] - _enable_turn_on_off_backwards_compatibility = False @staticmethod def config_schema() -> VolSchemaType: diff --git a/homeassistant/components/netatmo/fan.py b/homeassistant/components/netatmo/fan.py index 8610882a453..71a8c548622 100644 --- a/homeassistant/components/netatmo/fan.py +++ b/homeassistant/components/netatmo/fan.py @@ -51,7 +51,6 @@ class NetatmoFan(NetatmoModuleEntity, FanEntity): _attr_configuration_url = CONF_URL_CONTROL _attr_name = None device: NaModules.Fan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, netatmo_device: NetatmoDevice) -> None: """Initialize of Netatmo fan.""" diff --git a/homeassistant/components/rabbitair/fan.py b/homeassistant/components/rabbitair/fan.py index ba1896cba2f..cfbee0be67c 100644 --- a/homeassistant/components/rabbitair/fan.py +++ b/homeassistant/components/rabbitair/fan.py @@ -55,7 +55,6 @@ class RabbitAirFanEntity(RabbitAirBaseEntity, FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/renson/fan.py b/homeassistant/components/renson/fan.py index 44bea28ce3c..56b3655ef94 100644 --- a/homeassistant/components/renson/fan.py +++ b/homeassistant/components/renson/fan.py @@ -127,7 +127,6 @@ class RensonFan(RensonEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api: RensonVentilation, coordinator: RensonCoordinator) -> None: """Initialize the Renson fan.""" diff --git a/homeassistant/components/smartthings/fan.py b/homeassistant/components/smartthings/fan.py index 131cccdd869..61e30589273 100644 --- a/homeassistant/components/smartthings/fan.py +++ b/homeassistant/components/smartthings/fan.py @@ -70,7 +70,6 @@ class SmartThingsFan(SmartThingsEntity, FanEntity): """Define a SmartThings Fan.""" _attr_speed_count = int_states_in_range(SPEED_RANGE) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device): """Init the class.""" diff --git a/homeassistant/components/smarty/fan.py b/homeassistant/components/smarty/fan.py index 378585a33e1..2804f14ee15 100644 --- a/homeassistant/components/smarty/fan.py +++ b/homeassistant/components/smarty/fan.py @@ -48,7 +48,6 @@ class SmartyFan(SmartyEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: SmartyCoordinator) -> None: """Initialize the entity.""" diff --git a/homeassistant/components/snooz/fan.py b/homeassistant/components/snooz/fan.py index 8c721432709..bfe773b4780 100644 --- a/homeassistant/components/snooz/fan.py +++ b/homeassistant/components/snooz/fan.py @@ -83,7 +83,6 @@ class SnoozFan(FanEntity, RestoreEntity): _attr_should_poll = False _is_on: bool | None = None _percentage: int | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, data: SnoozConfigurationData) -> None: """Initialize a Snooz fan entity.""" diff --git a/homeassistant/components/switch_as_x/fan.py b/homeassistant/components/switch_as_x/fan.py index 91d3a4d119a..858379e71df 100644 --- a/homeassistant/components/switch_as_x/fan.py +++ b/homeassistant/components/switch_as_x/fan.py @@ -46,7 +46,6 @@ class FanSwitch(BaseToggleEntity, FanEntity): """Represents a Switch as a Fan.""" _attr_supported_features = FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON - _enable_turn_on_off_backwards_compatibility = False @property def is_on(self) -> bool | None: diff --git a/homeassistant/components/tasmota/fan.py b/homeassistant/components/tasmota/fan.py index 15664201d99..e927bd6ad72 100644 --- a/homeassistant/components/tasmota/fan.py +++ b/homeassistant/components/tasmota/fan.py @@ -72,7 +72,6 @@ class TasmotaFan( ) _fan_speed = tasmota_const.FAN_SPEED_MEDIUM _tasmota_entity: tasmota_fan.TasmotaFan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, **kwds: Any) -> None: """Initialize the Tasmota fan.""" diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index cedd7d0d725..7720ef7e1b3 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -124,7 +124,6 @@ class TemplateFan(TemplateEntity, FanEntity): """A template fan component.""" _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tolo/fan.py b/homeassistant/components/tolo/fan.py index 9b62346a83b..9e48778b507 100644 --- a/homeassistant/components/tolo/fan.py +++ b/homeassistant/components/tolo/fan.py @@ -29,7 +29,6 @@ class ToloFan(ToloSaunaCoordinatorEntity, FanEntity): _attr_translation_key = "fan" _attr_supported_features = FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry diff --git a/homeassistant/components/tplink/fan.py b/homeassistant/components/tplink/fan.py index f90eadbc531..64ad01eb671 100644 --- a/homeassistant/components/tplink/fan.py +++ b/homeassistant/components/tplink/fan.py @@ -64,7 +64,6 @@ class TPLinkFanEntity(CoordinatedTPLinkEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tradfri/fan.py b/homeassistant/components/tradfri/fan.py index 75616607ee8..3f45ee3e1eb 100644 --- a/homeassistant/components/tradfri/fan.py +++ b/homeassistant/components/tradfri/fan.py @@ -69,7 +69,6 @@ class TradfriAirPurifierFan(TradfriBaseEntity, FanEntity): # ... with step size 1 # 50 = Max _attr_speed_count = ATTR_MAX_FAN_STEPS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tuya/fan.py b/homeassistant/components/tuya/fan.py index 4a6de1cae09..ffab9efdde8 100644 --- a/homeassistant/components/tuya/fan.py +++ b/homeassistant/components/tuya/fan.py @@ -66,7 +66,6 @@ class TuyaFanEntity(TuyaEntity, FanEntity): _speeds: EnumTypeData | None = None _switch: DPCode | None = None _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index 5fac46177cb..3a21ef060a7 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -83,7 +83,6 @@ class ValloxFanEntity(ValloxEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 098a17e90f0..5be6a06e1d0 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -95,7 +95,6 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): ) _attr_name = None _attr_translation_key = "vesync" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: """Initialize the VeSync fan device.""" diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 1800704a16f..6e8513a1f7e 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -125,7 +125,6 @@ class ViCareFan(ViCareEntity, FanEntity): _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key = "ventilation" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index f9d3270aaa0..42dae679aa5 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -81,7 +81,6 @@ class WemoHumidifier(WemoBinaryStateEntity, FanEntity): ) wemo: Humidifier _last_fan_on_mode: FanMode - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceCoordinator) -> None: """Initialize the WeMo switch.""" diff --git a/homeassistant/components/wilight/fan.py b/homeassistant/components/wilight/fan.py index 71f1098603b..a14198e3b5d 100644 --- a/homeassistant/components/wilight/fan.py +++ b/homeassistant/components/wilight/fan.py @@ -64,7 +64,6 @@ class WiLightFan(WiLightDevice, FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: """Initialize the device.""" diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index 81ca38eb053..e1de3f56252 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -300,7 +300,6 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity): """Representation of a generic Xiaomi device.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, entry, unique_id, coordinator): """Initialize the generic Xiaomi device.""" diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 767c0d4cfb7..73b23e97387 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -47,7 +47,6 @@ class ZhaFan(FanEntity, ZHAEntity): """Representation of a ZHA fan.""" _attr_translation_key: str = "fan" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, entity_data: EntityData) -> None: """Initialize the ZHA fan.""" diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index 37d3fc57886..d83132e4b95 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -83,7 +83,6 @@ class ZwaveFan(ZWaveBaseEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo diff --git a/homeassistant/components/zwave_me/fan.py b/homeassistant/components/zwave_me/fan.py index 1016586ab55..bd0feba0dfb 100644 --- a/homeassistant/components/zwave_me/fan.py +++ b/homeassistant/components/zwave_me/fan.py @@ -49,7 +49,6 @@ class ZWaveMeFan(ZWaveMeEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int: diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index fbb09ab879c..90061ec60a1 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,7 +1,5 @@ """Tests for fan platforms.""" -from unittest.mock import patch - import pytest from homeassistant.components.fan import ( @@ -13,23 +11,13 @@ from homeassistant.components.fan import ( FanEntityFeature, NotValidPresetModeError, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er from homeassistant.setup import async_setup_component from .common import MockFan -from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, - mock_integration, - mock_platform, - setup_test_component_platform, -) +from tests.common import setup_test_component_platform class BaseFan(FanEntity): @@ -161,300 +149,3 @@ async def test_preset_mode_validation( with pytest.raises(NotValidPresetModeError) as exc: await test_fan._valid_preset_mode_or_raise("invalid") assert exc.value.translation_key == "not_valid_preset_mode" - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockFan(FanEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockFan() - assert entity.supported_features is FanEntityFeature(1) - assert "MockFan" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "FanEntityFeature.SET_SPEED" in caplog.text - caplog.clear() - assert entity.supported_features is FanEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - -async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test adding feature flag and warn if missing when methods are set.""" - - called = [] - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - def turn_on( - self, - percentage: int | None = None, - preset_mode: str | None = None, - ) -> None: - """Turn on.""" - called.append("turn_on") - - def turn_off(self) -> None: - """Turn off.""" - called.append("turn_off") - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_OFF but implements the turn_off method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_ON but implements the turn_on method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - - assert len(called) == 2 - assert "turn_on" in called - assert "turn_off" in called - - -async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when feature flags are set.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _enable_turn_on_off_backwards_compatibility = False - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text From c41cf570d3f312ec3e1f5d0701b789d3e45771ff Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Dec 2024 20:37:17 +0100 Subject: [PATCH 0274/1198] Remove deprecated supported features warning in `ClimateEntity` (#132206) * Remove deprecated features from ClimateEntity * Remove not needed tests * Remove add_to_platform_start --- homeassistant/components/climate/__init__.py | 111 ------- tests/components/climate/test_init.py | 293 +------------------ 2 files changed, 2 insertions(+), 402 deletions(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 045003dcd0f..ca85979f19a 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from datetime import timedelta import functools as ft import logging @@ -28,7 +27,6 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.temperature import display_temp as show_temp from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_issue_tracker, async_suggest_report_issue @@ -303,115 +301,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): __climate_reported_legacy_aux = False - __mod_supported_features: ClimateEntityFeature = ClimateEntityFeature(0) - # Integrations should set `_enable_turn_on_off_backwards_compatibility` to False - # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. - _enable_turn_on_off_backwards_compatibility: bool = True - - def __getattribute__(self, name: str, /) -> Any: - """Get attribute. - - Modify return of `supported_features` to - include `_mod_supported_features` if attribute is set. - """ - if name != "supported_features": - return super().__getattribute__(name) - - # Convert the supported features to ClimateEntityFeature. - # Remove this compatibility shim in 2025.1 or later. - _supported_features: ClimateEntityFeature = super().__getattribute__( - "supported_features" - ) - _mod_supported_features: ClimateEntityFeature = super().__getattribute__( - "_ClimateEntity__mod_supported_features" - ) - if type(_supported_features) is int: # noqa: E721 - _features = ClimateEntityFeature(_supported_features) - self._report_deprecated_supported_features_values(_features) - else: - _features = _supported_features - - if not _mod_supported_features: - return _features - - # Add automatically calculated ClimateEntityFeature.TURN_OFF/TURN_ON to - # supported features and return it - return _features | _mod_supported_features - - @callback - def add_to_platform_start( - self, - hass: HomeAssistant, - platform: EntityPlatform, - parallel_updates: asyncio.Semaphore | None, - ) -> None: - """Start adding an entity to a platform.""" - super().add_to_platform_start(hass, platform, parallel_updates) - - def _report_turn_on_off(feature: str, method: str) -> None: - """Log warning not implemented turn on/off feature.""" - report_issue = self._suggest_report_issue() - if feature.startswith("TURN"): - message = ( - "Entity %s (%s) does not set ClimateEntityFeature.%s" - " but implements the %s method. Please %s" - ) - else: - message = ( - "Entity %s (%s) implements HVACMode(s): %s and therefore implicitly" - " supports the %s methods without setting the proper" - " ClimateEntityFeature. Please %s" - ) - _LOGGER.warning( - message, - self.entity_id, - type(self), - feature, - method, - report_issue, - ) - - # Adds ClimateEntityFeature.TURN_OFF/TURN_ON depending on service calls implemented - # This should be removed in 2025.1. - if self._enable_turn_on_off_backwards_compatibility is False: - # Return if integration has migrated already - return - - supported_features = self.supported_features - if supported_features & CHECK_TURN_ON_OFF_FEATURE_FLAG: - # The entity supports both turn_on and turn_off, the backwards compatibility - # checks are not needed - return - - if not supported_features & ClimateEntityFeature.TURN_OFF and ( - type(self).async_turn_off is not ClimateEntity.async_turn_off - or type(self).turn_off is not ClimateEntity.turn_off - ): - # turn_off implicitly supported by implementing turn_off method - _report_turn_on_off("TURN_OFF", "turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_OFF - ) - - if not supported_features & ClimateEntityFeature.TURN_ON and ( - type(self).async_turn_on is not ClimateEntity.async_turn_on - or type(self).turn_on is not ClimateEntity.turn_on - ): - # turn_on implicitly supported by implementing turn_on method - _report_turn_on_off("TURN_ON", "turn_on") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_ON - ) - - if (modes := self.hvac_modes) and len(modes) >= 2 and HVACMode.OFF in modes: - # turn_on/off implicitly supported by including more modes than 1 and one of these - # are HVACMode.OFF - _modes = [_mode for _mode in modes if _mode is not None] - _report_turn_on_off(", ".join(_modes or []), "turn_on/turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - def _report_legacy_aux(self) -> None: """Log warning and create an issue if the entity implements legacy auxiliary heater.""" diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index a7f47668612..8851b2d60c5 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from enum import Enum from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, Mock import pytest import voluptuous as vol @@ -38,13 +38,7 @@ from homeassistant.components.climate.const import ( ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - PRECISION_WHOLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - UnitOfTemperature, -) +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -430,289 +424,6 @@ async def test_mode_validation( assert exc.value.translation_key == "not_valid_fan_mode" -@pytest.mark.parametrize( - "supported_features_at_int", - [ - ClimateEntityFeature.TARGET_TEMPERATURE.value, - ClimateEntityFeature.TARGET_TEMPERATURE.value - | ClimateEntityFeature.TURN_ON.value - | ClimateEntityFeature.TURN_OFF.value, - ], -) -def test_deprecated_supported_features_ints( - caplog: pytest.LogCaptureFixture, supported_features_at_int: int -) -> None: - """Test deprecated supported features ints.""" - - class MockClimateEntity(ClimateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return supported_features_at_int - - entity = MockClimateEntity() - assert entity.supported_features is ClimateEntityFeature(supported_features_at_int) - assert "MockClimateEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "ClimateEntityFeature.TARGET_TEMPERATURE" in caplog.text - caplog.clear() - assert entity.supported_features is ClimateEntityFeature(supported_features_at_int) - assert "is using deprecated supported features values" not in caplog.text - - -async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test adding feature flag and warn if missing when methods are set.""" - - called = [] - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - def turn_on(self) -> None: - """Turn on.""" - called.append("turn_on") - - def turn_off(self) -> None: - """Turn off.""" - called.append("turn_off") - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - " Please report it to the author of the 'test' custom integration" - in caplog.text - ) - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - " Please report it to the author of the 'test' custom integration" - in caplog.text - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - { - "entity_id": "climate.test", - }, - blocking=True, - ) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - { - "entity_id": "climate.test", - }, - blocking=True, - ) - - assert len(called) == 2 - assert "turn_on" in called - assert "turn_off" in called - - -async def test_implicit_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test adding feature flag and warn if missing when methods are not set. - - (implicit by hvac mode) - """ - - class MockClimateEntityTest(MockEntity, ClimateEntity): - """Mock Climate device.""" - - _attr_temperature_unit = UnitOfTemperature.CELSIUS - - @property - def hvac_mode(self) -> HVACMode: - """Return hvac operation ie. heat, cool mode. - - Need to be one of HVACMode.*. - """ - return HVACMode.HEAT - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return the list of available hvac operation modes. - - Need to be a subset of HVAC_MODES. - """ - return [HVACMode.OFF, HVACMode.HEAT] - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " implements HVACMode(s): off, heat and therefore implicitly supports the turn_on/turn_off" - " methods without setting the proper ClimateEntityFeature. Please report it to the author" - " of the 'test' custom integration" in caplog.text - ) - - -async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when feature flags are set.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - not in caplog.text - ) - assert ( - "does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - not in caplog.text - ) - assert ( - " implements HVACMode(s): off, heat and therefore implicitly supports the off, heat methods" - not in caplog.text - ) - - -async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _enable_turn_on_off_backwards_compatibility = False - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - not in caplog.text - ) - assert ( - "does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - not in caplog.text - ) - assert ( - " implements HVACMode(s): off, heat and therefore implicitly supports the off, heat methods" - not in caplog.text - ) - - -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert "does not set ClimateEntityFeature" not in caplog.text - assert "implements HVACMode(s):" not in caplog.text - - async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" From e5851c20e91b300d0d9ed553826b95527715df4f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 20:55:54 +0100 Subject: [PATCH 0275/1198] Mark test-before-setup as exempt in mqtt (#132334) --- homeassistant/components/mqtt/quality_scale.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index d459f0420f1..d1730d8d2fe 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -29,9 +29,12 @@ rules: MQTT broker, this happens during integration setup, and only one config entry is allowed. test-before-configure: done - test-before-setup: done + test-before-setup: + status: exempt + comment: > + We choose to early exit the entry as it can take some time for the client + to connect. Waiting for the client would increase the overall setup time. unique-config-entry: done - # Silver config-entry-unloading: done log-when-unavailable: done From 3a2460f9f961f7a23e23136cc4eb23634146e694 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 5 Dec 2024 20:57:43 +0100 Subject: [PATCH 0276/1198] Remove yaml import from feedreader integration (#132278) * Remove yaml import from feedreader integration * Update homeassistant/components/feedreader/config_flow.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Drop _max_entries class attribute --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- .../components/feedreader/__init__.py | 67 ++----------------- .../components/feedreader/config_flow.py | 28 +------- .../components/feedreader/test_config_flow.py | 64 +----------------- 3 files changed, 6 insertions(+), 153 deletions(-) diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index b9f0b006e2a..9faed54c041 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -2,17 +2,12 @@ from __future__ import annotations -import voluptuous as vol - -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL, Platform -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL, Platform +from homeassistant.core import HomeAssistant from homeassistant.util.hass_dict import HassKey -from .const import CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, DEFAULT_SCAN_INTERVAL, DOMAIN +from .const import CONF_MAX_ENTRIES, DOMAIN from .coordinator import FeedReaderCoordinator, StoredData type FeedReaderConfigEntry = ConfigEntry[FeedReaderCoordinator] @@ -21,60 +16,6 @@ CONF_URLS = "urls" MY_KEY: HassKey[StoredData] = HassKey(DOMAIN) -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_URLS): vol.All(cv.ensure_list, [cv.url]), - vol.Optional( - CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL - ): cv.time_period, - vol.Optional( - CONF_MAX_ENTRIES, default=DEFAULT_MAX_ENTRIES - ): cv.positive_int, - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Feedreader component.""" - if DOMAIN in config: - for url in config[DOMAIN][CONF_URLS]: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_URL: url, - CONF_MAX_ENTRIES: config[DOMAIN][CONF_MAX_ENTRIES], - }, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Feedreader", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) -> bool: """Set up Feedreader from a config entry.""" diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index 72042de25ed..f3e56ad1778 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -11,7 +11,6 @@ import feedparser import voluptuous as vol from homeassistant.config_entries import ( - SOURCE_IMPORT, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -20,13 +19,11 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ( TextSelector, TextSelectorConfig, TextSelectorType, ) -from homeassistant.util import slugify from .const import CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, DOMAIN @@ -42,7 +39,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - _max_entries: int | None = None @staticmethod @callback @@ -75,21 +71,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - def abort_on_import_error(self, url: str, error: str) -> ConfigFlowResult: - """Abort import flow on error.""" - async_create_issue( - self.hass, - DOMAIN, - f"import_yaml_error_{DOMAIN}_{error}_{slugify(url)}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"import_yaml_error_{error}", - translation_placeholders={"url": url}, - ) - return self.async_abort(reason=error) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -104,8 +85,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): if feed.bozo: LOGGER.debug("feed bozo_exception: %s", feed.bozo_exception) if isinstance(feed.bozo_exception, urllib.error.URLError): - if self.context["source"] == SOURCE_IMPORT: - return self.abort_on_import_error(user_input[CONF_URL], "url_error") return self.show_user_form(user_input, {"base": "url_error"}) feed_title = html.unescape(feed["feed"]["title"]) @@ -113,14 +92,9 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title=feed_title, data=user_input, - options={CONF_MAX_ENTRIES: self._max_entries or DEFAULT_MAX_ENTRIES}, + options={CONF_MAX_ENTRIES: DEFAULT_MAX_ENTRIES}, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Handle an import flow.""" - self._max_entries = import_data[CONF_MAX_ENTRIES] - return await self.async_step_user({CONF_URL: import_data[CONF_URL]}) - async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index e801227293c..c9fc89179db 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -5,7 +5,6 @@ import urllib import pytest -from homeassistant.components.feedreader import CONF_URLS from homeassistant.components.feedreader.const import ( CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, @@ -13,10 +12,8 @@ from homeassistant.components.feedreader.const import ( ) from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_URL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component from . import create_mock_entry from .const import FEED_TITLE, URL, VALID_CONFIG_DEFAULT @@ -95,65 +92,6 @@ async def test_user_errors( assert result["options"][CONF_MAX_ENTRIES] == DEFAULT_MAX_ENTRIES -@pytest.mark.parametrize( - ("data", "expected_data", "expected_options"), - [ - ({CONF_URLS: [URL]}, {CONF_URL: URL}, {CONF_MAX_ENTRIES: DEFAULT_MAX_ENTRIES}), - ( - {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}, - {CONF_URL: URL}, - {CONF_MAX_ENTRIES: 5}, - ), - ], -) -async def test_import( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - data, - expected_data, - expected_options, - feedparser, - setup_entry, -) -> None: - """Test starting an import flow.""" - config_entries = hass.config_entries.async_entries(DOMAIN) - assert not config_entries - - assert await async_setup_component(hass, DOMAIN, {DOMAIN: data}) - - config_entries = hass.config_entries.async_entries(DOMAIN) - assert config_entries - assert len(config_entries) == 1 - assert config_entries[0].title == FEED_TITLE - assert config_entries[0].data == expected_data - assert config_entries[0].options == expected_options - - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_feedreader" - ) - - -async def test_import_errors( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - feedparser, - setup_entry, - feed_one_event, -) -> None: - """Test starting an import flow which results in an URL error.""" - config_entries = hass.config_entries.async_entries(DOMAIN) - assert not config_entries - - # raise URLError - feedparser.side_effect = urllib.error.URLError("Test") - feedparser.return_value = None - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue( - DOMAIN, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ) - - async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: """Test starting a reconfigure flow.""" entry = create_mock_entry(VALID_CONFIG_DEFAULT) From f4896f7b09e0368df82e51e0af2ca984abf20aef Mon Sep 17 00:00:00 2001 From: robinostlund Date: Thu, 5 Dec 2024 21:14:04 +0100 Subject: [PATCH 0277/1198] Add missing UnitOfPower to sensor (#132352) * Add missing UnitOfPower to sensor * Update homeassistant/components/sensor/const.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * adding to number --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/number/const.py | 8 +++++++- homeassistant/components/sensor/const.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 5a2f4c8675c..47158826e75 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -467,7 +467,13 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - NumberDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + NumberDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, NumberDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), NumberDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), NumberDeviceClass.PRESSURE: set(UnitOfPressure), diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 4d0454cbff3..a2e3cb52173 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -561,7 +561,13 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - SensorDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + SensorDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, SensorDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), SensorDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), SensorDeviceClass.PRESSURE: set(UnitOfPressure), From 5fdd705edf2821ac29a52cbd8273c46e5d40939f Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 5 Dec 2024 21:15:26 +0100 Subject: [PATCH 0278/1198] Remove yaml import from incomfort integration after deprecation time (#132275) * Remove yaml import from incomfort integration after deprecation time * Cleanup CONFIG_SCHEMA * restore missing DOMAIN import * Import DOMAIN from const --- .../components/incomfort/__init__.py | 71 +------------------ .../components/incomfort/config_flow.py | 8 --- tests/components/incomfort/conftest.py | 2 +- .../components/incomfort/test_config_flow.py | 48 +------------ 4 files changed, 6 insertions(+), 123 deletions(-) diff --git a/homeassistant/components/incomfort/__init__.py b/homeassistant/components/incomfort/__init__.py index 39e471b7614..4b6a6a5fcc3 100644 --- a/homeassistant/components/incomfort/__init__.py +++ b/homeassistant/components/incomfort/__init__.py @@ -4,33 +4,15 @@ from __future__ import annotations from aiohttp import ClientResponseError from incomfortclient import IncomfortError, InvalidHeaterList -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN from .coordinator import InComfortDataCoordinator, async_connect_gateway from .errors import InConfortTimeout, InConfortUnknownError, NoHeaters, NotFound -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Inclusive(CONF_USERNAME, "credentials"): cv.string, - vol.Inclusive(CONF_PASSWORD, "credentials"): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = ( Platform.WATER_HEATER, Platform.BINARY_SENSOR, @@ -43,53 +25,6 @@ INTEGRATION_TITLE = "Intergas InComfort/Intouch Lan2RF gateway" type InComfortConfigEntry = ConfigEntry[InComfortDataCoordinator] -async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: - """Import config entry from configuration.yaml.""" - if not hass.config_entries.async_entries(DOMAIN): - # Start import flow - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - if result["type"] == FlowResultType.ABORT: - ir.async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - - -async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: - """Create an Intergas InComfort/Intouch system.""" - if config := hass_config.get(DOMAIN): - hass.async_create_task(_async_import(hass, config)) - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" try: diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index e905f0d743d..f4838a9771d 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -81,11 +81,3 @@ class InComfortConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=CONFIG_SCHEMA, errors=errors ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import `incomfort` config entry from configuration.yaml.""" - errors: dict[str, str] | None = None - if (errors := await async_try_connect_gateway(self.hass, import_data)) is None: - return self.async_create_entry(title=TITLE, data=import_data) - reason = next(iter(errors.items()))[1] - return self.async_abort(reason=reason) diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index f17547a1445..b00e3a638c8 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from incomfortclient import DisplayCode import pytest -from homeassistant.components.incomfort import DOMAIN +from homeassistant.components.incomfort.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/incomfort/test_config_flow.py b/tests/components/incomfort/test_config_flow.py index 7a942dab817..287fd85715f 100644 --- a/tests/components/incomfort/test_config_flow.py +++ b/tests/components/incomfort/test_config_flow.py @@ -6,8 +6,8 @@ from aiohttp import ClientResponseError from incomfortclient import IncomfortError, InvalidHeaterList import pytest -from homeassistant.components.incomfort import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.components.incomfort.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -38,50 +38,6 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 -async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_incomfort: MagicMock -) -> None: - """Test we van import from YAML.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Intergas InComfort/Intouch Lan2RF gateway" - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("exc", "abort_reason"), - [ - (IncomfortError(ClientResponseError(None, None, status=401)), "auth_error"), - (IncomfortError(ClientResponseError(None, None, status=404)), "not_found"), - (IncomfortError(ClientResponseError(None, None, status=500)), "unknown"), - (IncomfortError, "unknown"), - (InvalidHeaterList, "no_heaters"), - (ValueError, "unknown"), - (TimeoutError, "timeout_error"), - ], -) -async def test_import_fails( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_incomfort: MagicMock, - exc: Exception, - abort_reason: str, -) -> None: - """Test YAML import fails.""" - mock_incomfort().heaters.side_effect = exc - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == abort_reason - assert len(mock_setup_entry.mock_calls) == 0 - - async def test_entry_already_configured(hass: HomeAssistant) -> None: """Test aborting if the entry is already configured.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG) From 1ca2f3393cd2ad735abe53a07cb3b4b95eb1bda2 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Thu, 5 Dec 2024 21:15:40 +0100 Subject: [PATCH 0279/1198] Add data description for Onkyo config flow (#132349) --- .../components/onkyo/quality_scale.yaml | 5 +---- homeassistant/components/onkyo/strings.json | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/onkyo/quality_scale.yaml b/homeassistant/components/onkyo/quality_scale.yaml index 46f0f6d3b0d..cdcf88e72d7 100644 --- a/homeassistant/components/onkyo/quality_scale.yaml +++ b/homeassistant/components/onkyo/quality_scale.yaml @@ -7,10 +7,7 @@ rules: This integration uses a push API. No polling required. brands: done common-modules: done - config-flow: - status: todo - comment: | - The data_descriptions are missing. + config-flow: done config-flow-test-coverage: status: todo comment: | diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json index 1b0eadcc45e..95ca1199a36 100644 --- a/homeassistant/components/onkyo/strings.json +++ b/homeassistant/components/onkyo/strings.json @@ -10,18 +10,28 @@ "manual": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of the receiver." } }, "eiscp_discovery": { "data": { "device": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "device": "Select the receiver to configure." } }, "configure_receiver": { "description": "Configure {name}", "data": { - "volume_resolution": "Number of steps it takes for the receiver to go from the lowest to the highest possible volume", - "input_sources": "List of input sources supported by the receiver" + "volume_resolution": "Volume resolution", + "input_sources": "Input sources" + }, + "data_description": { + "volume_resolution": "Number of steps it takes for the receiver to go from the lowest to the highest possible volume.", + "input_sources": "List of input sources supported by the receiver." } } }, @@ -43,6 +53,9 @@ "init": { "data": { "max_volume": "Maximum volume limit (%)" + }, + "data_description": { + "max_volume": "Maximum volume limit as a percentage. This will associate Home Assistant's maximum volume to this value on the receiver, i.e., if you set this to 50%, then setting the volume to 100% in Home Assistant will cause the volume on the receiver to be set to 50% of its maximum value." } } } From b2ac16e95f2909f9c2294bdd7b35c4c773c93495 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 21:18:45 +0100 Subject: [PATCH 0280/1198] Remove deprecated supported features warning in CoverEntity (#132367) Cleanup magic numbers for cover supported features --- homeassistant/components/cover/__init__.py | 4 ---- tests/components/cover/test_init.py | 19 ------------------- 2 files changed, 23 deletions(-) diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index 001bff51991..9ce526712f0 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -300,10 +300,6 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def supported_features(self) -> CoverEntityFeature: """Flag supported features.""" if (features := self._attr_supported_features) is not None: - if type(features) is int: # noqa: E721 - new_features = CoverEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features return features supported_features = ( diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 646c44e4ac2..e43b64b16a7 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -2,8 +2,6 @@ from enum import Enum -import pytest - from homeassistant.components import cover from homeassistant.components.cover import CoverState from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, SERVICE_TOGGLE @@ -155,20 +153,3 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s def test_all() -> None: """Test module.__all__ is correctly set.""" help_test_all(cover) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockCoverEntity(cover.CoverEntity): - _attr_supported_features = 1 - - entity = MockCoverEntity() - assert entity.supported_features is cover.CoverEntityFeature(1) - assert "MockCoverEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "CoverEntityFeature.OPEN" in caplog.text - caplog.clear() - assert entity.supported_features is cover.CoverEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text From b1379f6a8979b6b628101f40e38eb7ae0becddd9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 21:20:02 +0100 Subject: [PATCH 0281/1198] Avoid access to `self.context["source"]` in integration config flows (#132355) * Avoid access to `self.context["source"]` in integration config flows * One more * One more --- homeassistant/components/cert_expiry/config_flow.py | 2 +- homeassistant/components/hive/config_flow.py | 4 ++-- homeassistant/components/vizio/config_flow.py | 8 ++++---- homeassistant/components/zwave_js/config_flow.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 22d443c700d..3fbb1c08c9b 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -74,7 +74,7 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): title=title, data={CONF_HOST: host, CONF_PORT: port}, ) - if self.context["source"] == SOURCE_IMPORT: + if self.source == SOURCE_IMPORT: _LOGGER.error("Config import failed for %s", user_input[CONF_HOST]) return self.async_abort(reason="import_failed") else: diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index a997954f4cc..8df9a635302 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -104,7 +104,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "no_internet_available" if not errors: - if self.context["source"] == SOURCE_REAUTH: + if self.source == SOURCE_REAUTH: return await self.async_setup_hive_entry() self.device_registration = True return await self.async_step_configuration() @@ -144,7 +144,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): # Setup the config entry self.data["tokens"] = self.tokens - if self.context["source"] == SOURCE_REAUTH: + if self.source == SOURCE_REAUTH: assert self.entry self.hass.config_entries.async_update_entry( self.entry, title=self.data["username"], data=self.data diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index 49f6a709565..54031930503 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -231,7 +231,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "existing_config_entry_found" if not errors: - if self._must_show_form and self.context["source"] == SOURCE_ZEROCONF: + if self._must_show_form and self.source == SOURCE_ZEROCONF: # Discovery should always display the config form before trying to # create entry so that user can update default config options self._must_show_form = False @@ -251,7 +251,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: return await self._create_entry(user_input) - elif self._must_show_form and self.context["source"] == SOURCE_IMPORT: + elif self._must_show_form and self.source == SOURCE_IMPORT: # Import should always display the config form if CONF_ACCESS_TOKEN # wasn't included but is needed so that the user can choose to update # their configuration.yaml or to proceed with config flow pairing. We @@ -272,7 +272,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): schema = self._user_schema or _get_config_schema() - if errors and self.context["source"] == SOURCE_IMPORT: + if errors and self.source == SOURCE_IMPORT: # Log an error message if import config flow fails since otherwise failure is silent _LOGGER.error( "Importing from configuration.yaml failed: %s", @@ -434,7 +434,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): self._data[CONF_ACCESS_TOKEN] = pair_data.auth_token self._must_show_form = True - if self.context["source"] == SOURCE_IMPORT: + if self.source == SOURCE_IMPORT: # If user is pairing via config import, show different message return await self.async_step_pairing_complete_import() diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 36f208e18d5..711eb14070d 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -671,7 +671,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): discovery_info = await self._async_get_addon_discovery_info() self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}" - if not self.unique_id or self.context["source"] == SOURCE_USB: + if not self.unique_id or self.source == SOURCE_USB: if not self.version_info: try: self.version_info = await async_get_version_info( From 768c2b0f3dc3f33a040839c85db57985e19ab657 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Dec 2024 21:46:59 +0100 Subject: [PATCH 0282/1198] Remove _enable_turn_on_off_backwards_compatibility A-F (#132417) Remove _enable_turn_on_off_backwards_compatibility A-G --- homeassistant/components/adax/climate.py | 1 - homeassistant/components/advantage_air/climate.py | 2 -- homeassistant/components/airtouch4/climate.py | 2 -- homeassistant/components/airtouch5/climate.py | 1 - homeassistant/components/airzone/climate.py | 1 - homeassistant/components/airzone_cloud/climate.py | 1 - homeassistant/components/atag/climate.py | 1 - homeassistant/components/baf/climate.py | 1 - homeassistant/components/balboa/climate.py | 1 - homeassistant/components/blebox/climate.py | 1 - homeassistant/components/broadlink/climate.py | 1 - homeassistant/components/bryant_evolution/climate.py | 1 - homeassistant/components/bsblan/climate.py | 1 - homeassistant/components/ccm15/climate.py | 1 - homeassistant/components/comelit/climate.py | 1 - homeassistant/components/coolmaster/climate.py | 1 - homeassistant/components/daikin/climate.py | 1 - homeassistant/components/deconz/climate.py | 1 - homeassistant/components/demo/climate.py | 1 - homeassistant/components/devolo_home_control/climate.py | 1 - homeassistant/components/duotecno/climate.py | 1 - homeassistant/components/ecobee/climate.py | 1 - homeassistant/components/econet/climate.py | 1 - homeassistant/components/electrasmart/climate.py | 1 - homeassistant/components/elkm1/climate.py | 1 - homeassistant/components/ephember/climate.py | 1 - homeassistant/components/escea/climate.py | 1 - homeassistant/components/esphome/climate.py | 1 - homeassistant/components/evohome/climate.py | 1 - homeassistant/components/fibaro/climate.py | 2 -- homeassistant/components/flexit/climate.py | 1 - homeassistant/components/flexit_bacnet/climate.py | 1 - homeassistant/components/freedompro/climate.py | 1 - homeassistant/components/fritzbox/climate.py | 1 - homeassistant/components/fujitsu_fglair/climate.py | 2 -- 35 files changed, 39 deletions(-) diff --git a/homeassistant/components/adax/climate.py b/homeassistant/components/adax/climate.py index ac381ff46d5..15022ba3c9f 100644 --- a/homeassistant/components/adax/climate.py +++ b/homeassistant/components/adax/climate.py @@ -75,7 +75,6 @@ class AdaxDevice(ClimateEntity): ) _attr_target_temperature_step = PRECISION_WHOLE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None: """Initialize the heater.""" diff --git a/homeassistant/components/advantage_air/climate.py b/homeassistant/components/advantage_air/climate.py index 8da46cc7463..d07a3182ed7 100644 --- a/homeassistant/components/advantage_air/climate.py +++ b/homeassistant/components/advantage_air/climate.py @@ -102,7 +102,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity): _attr_max_temp = 32 _attr_min_temp = 16 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False _support_preset = ClimateEntityFeature(0) def __init__(self, instance: AdvantageAirData, ac_key: str) -> None: @@ -261,7 +260,6 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity): _attr_target_temperature_step = PRECISION_WHOLE _attr_max_temp = 32 _attr_min_temp = 16 - _enable_turn_on_off_backwards_compatibility = False def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None: """Initialize an AdvantageAir Zone control.""" diff --git a/homeassistant/components/airtouch4/climate.py b/homeassistant/components/airtouch4/climate.py index dbb6f02859b..0af920bd7a9 100644 --- a/homeassistant/components/airtouch4/climate.py +++ b/homeassistant/components/airtouch4/climate.py @@ -95,7 +95,6 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, ac_number, info): """Initialize the climate device.""" @@ -205,7 +204,6 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = AT_GROUP_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, group_number, info): """Initialize the climate device.""" diff --git a/homeassistant/components/airtouch5/climate.py b/homeassistant/components/airtouch5/climate.py index dfc34c1beaf..16566f5d664 100644 --- a/homeassistant/components/airtouch5/climate.py +++ b/homeassistant/components/airtouch5/climate.py @@ -124,7 +124,6 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity): _attr_translation_key = DOMAIN _attr_target_temperature_step = 1 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False class Airtouch5AC(Airtouch5ClimateEntity): diff --git a/homeassistant/components/airzone/climate.py b/homeassistant/components/airzone/climate.py index 6be7416bbb0..4ed54286cff 100644 --- a/homeassistant/components/airzone/climate.py +++ b/homeassistant/components/airzone/climate.py @@ -136,7 +136,6 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity): _attr_name = None _speeds: dict[int, str] = {} _speeds_reverse: dict[str, int] = {} - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/airzone_cloud/climate.py b/homeassistant/components/airzone_cloud/climate.py index 5ee15ff6819..b98473072e4 100644 --- a/homeassistant/components/airzone_cloud/climate.py +++ b/homeassistant/components/airzone_cloud/climate.py @@ -177,7 +177,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def _init_attributes(self) -> None: """Init common climate device attributes.""" diff --git a/homeassistant/components/atag/climate.py b/homeassistant/components/atag/climate.py index daeb64f7f0a..a362b71fbc8 100644 --- a/homeassistant/components/atag/climate.py +++ b/homeassistant/components/atag/climate.py @@ -46,7 +46,6 @@ class AtagThermostat(AtagEntity, ClimateEntity): _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: """Initialize an Atag climate device.""" diff --git a/homeassistant/components/baf/climate.py b/homeassistant/components/baf/climate.py index 38407813d37..c30d49e8c9d 100644 --- a/homeassistant/components/baf/climate.py +++ b/homeassistant/components/baf/climate.py @@ -40,7 +40,6 @@ class BAFAutoComfort(BAFEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.OFF, HVACMode.FAN_ONLY] _attr_translation_key = "auto_comfort" - _enable_turn_on_off_backwards_compatibility = False @callback def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/balboa/climate.py b/homeassistant/components/balboa/climate.py index d27fd459676..76b02f0e165 100644 --- a/homeassistant/components/balboa/climate.py +++ b/homeassistant/components/balboa/climate.py @@ -65,7 +65,6 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity): ) _attr_translation_key = DOMAIN _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, client: SpaClient) -> None: """Initialize the climate entity.""" diff --git a/homeassistant/components/blebox/climate.py b/homeassistant/components/blebox/climate.py index e04503974b7..2c528d50e3e 100644 --- a/homeassistant/components/blebox/climate.py +++ b/homeassistant/components/blebox/climate.py @@ -57,7 +57,6 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @property def hvac_modes(self): diff --git a/homeassistant/components/broadlink/climate.py b/homeassistant/components/broadlink/climate.py index dbfd982795c..25a6bbd60a5 100644 --- a/homeassistant/components/broadlink/climate.py +++ b/homeassistant/components/broadlink/climate.py @@ -52,7 +52,6 @@ class BroadlinkThermostat(BroadlinkEntity, ClimateEntity): ) _attr_target_temperature_step = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: BroadlinkDevice) -> None: """Initialize the climate entity.""" diff --git a/homeassistant/components/bryant_evolution/climate.py b/homeassistant/components/bryant_evolution/climate.py index dd31097a1ee..2d54ced8217 100644 --- a/homeassistant/components/bryant_evolution/climate.py +++ b/homeassistant/components/bryant_evolution/climate.py @@ -77,7 +77,6 @@ class BryantEvolutionClimate(ClimateEntity): HVACMode.OFF, ] _attr_fan_modes = ["auto", "low", "med", "high"] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 6d992da395a..2833d6549b4 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -65,7 +65,6 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): _attr_preset_modes = PRESET_MODES _attr_hvac_modes = HVAC_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/ccm15/climate.py b/homeassistant/components/ccm15/climate.py index a6e5d2cab61..3db8c3e1016 100644 --- a/homeassistant/components/ccm15/climate.py +++ b/homeassistant/components/ccm15/climate.py @@ -70,7 +70,6 @@ class CCM15Climate(CoordinatorEntity[CCM15Coordinator], ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, ac_host: str, ac_index: int, coordinator: CCM15Coordinator diff --git a/homeassistant/components/comelit/climate.py b/homeassistant/components/comelit/climate.py index 0b88367c0fa..6dc7c7e26d9 100644 --- a/homeassistant/components/comelit/climate.py +++ b/homeassistant/components/comelit/climate.py @@ -100,7 +100,6 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/coolmaster/climate.py b/homeassistant/components/coolmaster/climate.py index d3cb7122109..29be416d57e 100644 --- a/homeassistant/components/coolmaster/climate.py +++ b/homeassistant/components/coolmaster/climate.py @@ -55,7 +55,6 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity): """Representation of a coolmaster climate device.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, unit_id, info, supported_modes): """Initialize the climate device.""" diff --git a/homeassistant/components/daikin/climate.py b/homeassistant/components/daikin/climate.py index 39e92ab1921..751683656f2 100644 --- a/homeassistant/components/daikin/climate.py +++ b/homeassistant/components/daikin/climate.py @@ -104,7 +104,6 @@ class DaikinClimate(DaikinEntity, ClimateEntity): _attr_target_temperature_step = 1 _attr_fan_modes: list[str] _attr_swing_modes: list[str] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DaikinCoordinator) -> None: """Initialize the climate device.""" diff --git a/homeassistant/components/deconz/climate.py b/homeassistant/components/deconz/climate.py index 1e228dc6c48..690f943379d 100644 --- a/homeassistant/components/deconz/climate.py +++ b/homeassistant/components/deconz/climate.py @@ -101,7 +101,6 @@ class DeconzThermostat(DeconzDevice[Thermostat], ClimateEntity): TYPE = CLIMATE_DOMAIN _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Thermostat, hub: DeconzHub) -> None: """Set up thermostat device.""" diff --git a/homeassistant/components/demo/climate.py b/homeassistant/components/demo/climate.py index 5424591f021..d5b763caa5a 100644 --- a/homeassistant/components/demo/climate.py +++ b/homeassistant/components/demo/climate.py @@ -98,7 +98,6 @@ class DemoClimate(ClimateEntity): _attr_name = None _attr_should_poll = False _attr_translation_key = "ubercool" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/devolo_home_control/climate.py b/homeassistant/components/devolo_home_control/climate.py index 29177ae2437..1f407eb6804 100644 --- a/homeassistant/components/devolo_home_control/climate.py +++ b/homeassistant/components/devolo_home_control/climate.py @@ -56,7 +56,6 @@ class DevoloClimateDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, ClimateEntit _attr_precision = PRECISION_TENTHS _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, homecontrol: HomeControl, device_instance: Zwave, element_uid: str diff --git a/homeassistant/components/duotecno/climate.py b/homeassistant/components/duotecno/climate.py index 77b602c8716..0355d2855d3 100644 --- a/homeassistant/components/duotecno/climate.py +++ b/homeassistant/components/duotecno/climate.py @@ -57,7 +57,6 @@ class DuotecnoClimate(DuotecnoEntity, ClimateEntity): _attr_hvac_modes = list(HVACMODE_REVERSE) _attr_preset_modes = list(PRESETMODES) _attr_translation_key = "duotecno" - _enable_turn_on_off_backwards_compatibility = False @property def current_temperature(self) -> float | None: diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index 6a9ec0d5db9..709926d8496 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -353,7 +353,6 @@ class Thermostat(ClimateEntity): _attr_fan_modes = [FAN_AUTO, FAN_ON] _attr_name = None _attr_has_entity_name = True - _enable_turn_on_off_backwards_compatibility = False _attr_translation_key = "ecobee" def __init__( diff --git a/homeassistant/components/econet/climate.py b/homeassistant/components/econet/climate.py index bac123bf206..cdf82f6817f 100644 --- a/homeassistant/components/econet/climate.py +++ b/homeassistant/components/econet/climate.py @@ -68,7 +68,6 @@ class EcoNetThermostat(EcoNetEntity, ClimateEntity): _attr_should_poll = True _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, thermostat): """Initialize.""" diff --git a/homeassistant/components/electrasmart/climate.py b/homeassistant/components/electrasmart/climate.py index 81a07545a30..04e4742554b 100644 --- a/homeassistant/components/electrasmart/climate.py +++ b/homeassistant/components/electrasmart/climate.py @@ -111,7 +111,6 @@ class ElectraClimateEntity(ClimateEntity): _attr_hvac_modes = ELECTRA_MODES _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: ElectraAirConditioner, api: ElectraAPI) -> None: """Initialize Electra climate entity.""" diff --git a/homeassistant/components/elkm1/climate.py b/homeassistant/components/elkm1/climate.py index bf5650f237b..1448acc6079 100644 --- a/homeassistant/components/elkm1/climate.py +++ b/homeassistant/components/elkm1/climate.py @@ -90,7 +90,6 @@ class ElkThermostat(ElkEntity, ClimateEntity): _attr_target_temperature_step = 1 _attr_fan_modes = [FAN_AUTO, FAN_ON] _element: Thermostat - _enable_turn_on_off_backwards_compatibility = False @property def temperature_unit(self) -> str: diff --git a/homeassistant/components/ephember/climate.py b/homeassistant/components/ephember/climate.py index 44e5986970d..cedad8b76e2 100644 --- a/homeassistant/components/ephember/climate.py +++ b/homeassistant/components/ephember/climate.py @@ -84,7 +84,6 @@ class EphEmberThermostat(ClimateEntity): _attr_hvac_modes = OPERATION_LIST _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, ember, zone): """Initialize the thermostat.""" diff --git a/homeassistant/components/escea/climate.py b/homeassistant/components/escea/climate.py index 555da1494d7..c3fb0015e68 100644 --- a/homeassistant/components/escea/climate.py +++ b/homeassistant/components/escea/climate.py @@ -89,7 +89,6 @@ class ControllerEntity(ClimateEntity): ) _attr_target_temperature_step = PRECISION_WHOLE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, controller: Controller) -> None: """Initialise ControllerDevice.""" diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index 1b9b53f24cd..8089fc4712a 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -129,7 +129,6 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "climate" - _enable_turn_on_off_backwards_compatibility = False @callback def _on_static_info_update(self, static_info: EntityInfo) -> None: diff --git a/homeassistant/components/evohome/climate.py b/homeassistant/components/evohome/climate.py index 1388585bc17..c71831fa4bc 100644 --- a/homeassistant/components/evohome/climate.py +++ b/homeassistant/components/evohome/climate.py @@ -150,7 +150,6 @@ class EvoClimateEntity(EvoDevice, ClimateEntity): _attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False class EvoZone(EvoChild, EvoClimateEntity): diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index 0bfc2223317..2541781773c 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -128,8 +128,6 @@ async def async_setup_entry( class FibaroThermostat(FibaroEntity, ClimateEntity): """Representation of a Fibaro Thermostat.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, fibaro_device: DeviceModel) -> None: """Initialize the Fibaro device.""" super().__init__(fibaro_device) diff --git a/homeassistant/components/flexit/climate.py b/homeassistant/components/flexit/climate.py index d456fbef6fc..8be5df4eca7 100644 --- a/homeassistant/components/flexit/climate.py +++ b/homeassistant/components/flexit/climate.py @@ -70,7 +70,6 @@ class Flexit(ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, hub: ModbusHub, modbus_slave: int | None, name: str | None diff --git a/homeassistant/components/flexit_bacnet/climate.py b/homeassistant/components/flexit_bacnet/climate.py index 0526a0d6bd3..a2291dea9d6 100644 --- a/homeassistant/components/flexit_bacnet/climate.py +++ b/homeassistant/components/flexit_bacnet/climate.py @@ -74,7 +74,6 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_max_temp = MAX_TEMP _attr_min_temp = MIN_TEMP - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: FlexitCoordinator) -> None: """Initialize the Flexit unit.""" diff --git a/homeassistant/components/freedompro/climate.py b/homeassistant/components/freedompro/climate.py index d534db7e858..a5b0144ce0c 100644 --- a/homeassistant/components/freedompro/climate.py +++ b/homeassistant/components/freedompro/climate.py @@ -73,7 +73,6 @@ class Device(CoordinatorEntity[FreedomproDataUpdateCoordinator], ClimateEntity): _attr_current_temperature = 0 _attr_target_temperature = 0 _attr_hvac_mode = HVACMode.OFF - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/fritzbox/climate.py b/homeassistant/components/fritzbox/climate.py index 924d92d6c5b..d5a81fdef1a 100644 --- a/homeassistant/components/fritzbox/climate.py +++ b/homeassistant/components/fritzbox/climate.py @@ -88,7 +88,6 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): _attr_precision = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "thermostat" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/fujitsu_fglair/climate.py b/homeassistant/components/fujitsu_fglair/climate.py index 726096eab1a..5359075c728 100644 --- a/homeassistant/components/fujitsu_fglair/climate.py +++ b/homeassistant/components/fujitsu_fglair/climate.py @@ -81,8 +81,6 @@ class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity): _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility: bool = False - def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None: """Store the representation of the device and set the static attributes.""" super().__init__(coordinator, context=device.device_serial_number) From ee6be6bfd600dd3bc289c026100ebb9e6d99a430 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Dec 2024 21:47:13 +0100 Subject: [PATCH 0283/1198] Remove _enable_turn_on_off_backwards_compatibility G-M (#132418) --- homeassistant/components/generic_thermostat/climate.py | 1 - homeassistant/components/geniushub/climate.py | 1 - homeassistant/components/gree/climate.py | 1 - homeassistant/components/heatmiser/climate.py | 1 - homeassistant/components/hisense_aehw4a1/climate.py | 1 - homeassistant/components/hive/climate.py | 1 - homeassistant/components/homekit_controller/climate.py | 1 - homeassistant/components/homematic/climate.py | 1 - homeassistant/components/homematicip_cloud/climate.py | 1 - homeassistant/components/honeywell/climate.py | 1 - homeassistant/components/huum/climate.py | 1 - homeassistant/components/iaqualink/climate.py | 1 - homeassistant/components/incomfort/climate.py | 1 - homeassistant/components/insteon/climate.py | 1 - homeassistant/components/intellifire/climate.py | 1 - homeassistant/components/intesishome/climate.py | 1 - homeassistant/components/isy994/climate.py | 1 - homeassistant/components/izone/climate.py | 1 - homeassistant/components/knx/climate.py | 1 - homeassistant/components/lcn/climate.py | 2 -- homeassistant/components/lightwave/climate.py | 1 - homeassistant/components/livisi/climate.py | 1 - homeassistant/components/lookin/climate.py | 1 - homeassistant/components/lyric/climate.py | 1 - homeassistant/components/matter/climate.py | 2 +- homeassistant/components/maxcube/climate.py | 1 - homeassistant/components/melcloud/climate.py | 1 - homeassistant/components/melissa/climate.py | 1 - homeassistant/components/mill/climate.py | 2 -- homeassistant/components/modbus/climate.py | 1 - homeassistant/components/moehlenhoff_alpha2/climate.py | 1 - homeassistant/components/mqtt/climate.py | 1 - homeassistant/components/mysensors/climate.py | 1 - 33 files changed, 1 insertion(+), 35 deletions(-) diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index f82da4483eb..dd6829eacce 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -205,7 +205,6 @@ class GenericThermostat(ClimateEntity, RestoreEntity): """Representation of a Generic Thermostat device.""" _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/geniushub/climate.py b/homeassistant/components/geniushub/climate.py index 99d1bde8099..e20d649541e 100644 --- a/homeassistant/components/geniushub/climate.py +++ b/homeassistant/components/geniushub/climate.py @@ -51,7 +51,6 @@ class GeniusClimateZone(GeniusHeatingZone, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, broker, zone) -> None: """Initialize the climate device.""" diff --git a/homeassistant/components/gree/climate.py b/homeassistant/components/gree/climate.py index 6a8f48780c8..f197f21a4e1 100644 --- a/homeassistant/components/gree/climate.py +++ b/homeassistant/components/gree/climate.py @@ -126,7 +126,6 @@ class GreeClimateEntity(GreeEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_min_temp = TEMP_MIN _attr_max_temp = TEMP_MAX - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceDataUpdateCoordinator) -> None: """Initialize the Gree device.""" diff --git a/homeassistant/components/heatmiser/climate.py b/homeassistant/components/heatmiser/climate.py index 1102dbc0c74..de66315a467 100644 --- a/homeassistant/components/heatmiser/climate.py +++ b/homeassistant/components/heatmiser/climate.py @@ -82,7 +82,6 @@ class HeatmiserV3Thermostat(ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, therm, device, uh1): """Initialize the thermostat.""" diff --git a/homeassistant/components/hisense_aehw4a1/climate.py b/homeassistant/components/hisense_aehw4a1/climate.py index 656ba6c68c0..68f79439162 100644 --- a/homeassistant/components/hisense_aehw4a1/climate.py +++ b/homeassistant/components/hisense_aehw4a1/climate.py @@ -155,7 +155,6 @@ class ClimateAehW4a1(ClimateEntity): _attr_target_temperature_step = 1 _previous_state: HVACMode | str | None = None _on: str | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device): """Initialize the climate device.""" diff --git a/homeassistant/components/hive/climate.py b/homeassistant/components/hive/climate.py index 4e5ea95f2fa..c76379cf940 100644 --- a/homeassistant/components/hive/climate.py +++ b/homeassistant/components/hive/climate.py @@ -100,7 +100,6 @@ class HiveClimateEntity(HiveEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: """Initialize the Climate device.""" diff --git a/homeassistant/components/homekit_controller/climate.py b/homeassistant/components/homekit_controller/climate.py index 4e55c8212be..ba5237e6e2d 100644 --- a/homeassistant/components/homekit_controller/climate.py +++ b/homeassistant/components/homekit_controller/climate.py @@ -136,7 +136,6 @@ class HomeKitBaseClimateEntity(HomeKitEntity, ClimateEntity): """The base HomeKit Controller climate entity.""" _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @callback def _async_reconfigure(self) -> None: diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index 2be28487cbb..6e16e16ba99 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -63,7 +63,6 @@ class HMThermostat(HMDevice, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/homematicip_cloud/climate.py b/homeassistant/components/homematicip_cloud/climate.py index f6a69f50770..e7132fac83c 100644 --- a/homeassistant/components/homematicip_cloud/climate.py +++ b/homeassistant/components/homematicip_cloud/climate.py @@ -81,7 +81,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hap: HomematicipHAP, device: AsyncHeatingGroup) -> None: """Initialize heating group.""" diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 9f6b7682470..7398ada23be 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -165,7 +165,6 @@ class HoneywellUSThermostat(ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_translation_key = "honeywell" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/huum/climate.py b/homeassistant/components/huum/climate.py index df740aea3d1..7e0e4ce5ef1 100644 --- a/homeassistant/components/huum/climate.py +++ b/homeassistant/components/huum/climate.py @@ -56,7 +56,6 @@ class HuumDevice(ClimateEntity): _target_temperature: int | None = None _status: HuumStatusResponse | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, huum_handler: Huum, unique_id: str) -> None: """Initialize the heater.""" diff --git a/homeassistant/components/iaqualink/climate.py b/homeassistant/components/iaqualink/climate.py index 78da1eff071..53d1bce80de 100644 --- a/homeassistant/components/iaqualink/climate.py +++ b/homeassistant/components/iaqualink/climate.py @@ -54,7 +54,6 @@ class HassAqualinkThermostat(AqualinkEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, dev: AqualinkThermostat) -> None: """Initialize AquaLink thermostat.""" diff --git a/homeassistant/components/incomfort/climate.py b/homeassistant/components/incomfort/climate.py index eccf03588dc..41470180051 100644 --- a/homeassistant/components/incomfort/climate.py +++ b/homeassistant/components/incomfort/climate.py @@ -46,7 +46,6 @@ class InComfortClimate(IncomfortEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/insteon/climate.py b/homeassistant/components/insteon/climate.py index 3db8edbf1c9..506841e7efb 100644 --- a/homeassistant/components/insteon/climate.py +++ b/homeassistant/components/insteon/climate.py @@ -94,7 +94,6 @@ class InsteonClimateEntity(InsteonEntity, ClimateEntity): _attr_hvac_modes = list(HVAC_MODES.values()) _attr_fan_modes = list(FAN_MODES.values()) _attr_min_humidity = 1 - _enable_turn_on_off_backwards_compatibility = False @property def temperature_unit(self) -> str: diff --git a/homeassistant/components/intellifire/climate.py b/homeassistant/components/intellifire/climate.py index 4eddde5ff10..f72df254424 100644 --- a/homeassistant/components/intellifire/climate.py +++ b/homeassistant/components/intellifire/climate.py @@ -58,7 +58,6 @@ class IntellifireClimate(IntellifireEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS last_temp = DEFAULT_THERMOSTAT_TEMP - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/intesishome/climate.py b/homeassistant/components/intesishome/climate.py index 82b653a34c7..1a1f58a6b80 100644 --- a/homeassistant/components/intesishome/climate.py +++ b/homeassistant/components/intesishome/climate.py @@ -147,7 +147,6 @@ class IntesisAC(ClimateEntity): _attr_should_poll = False _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, ih_device_id, ih_device, controller): """Initialize the thermostat.""" diff --git a/homeassistant/components/isy994/climate.py b/homeassistant/components/isy994/climate.py index d4376b5a3b4..d5deba56284 100644 --- a/homeassistant/components/isy994/climate.py +++ b/homeassistant/components/isy994/climate.py @@ -88,7 +88,6 @@ class ISYThermostatEntity(ISYNodeEntity, ClimateEntity): ) _attr_target_temperature_step = 1.0 _attr_fan_modes = [FAN_AUTO, FAN_ON] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, node: Node, device_info: DeviceInfo | None = None) -> None: """Initialize the ISY Thermostat entity.""" diff --git a/homeassistant/components/izone/climate.py b/homeassistant/components/izone/climate.py index 2a602939250..e61917c825b 100644 --- a/homeassistant/components/izone/climate.py +++ b/homeassistant/components/izone/climate.py @@ -141,7 +141,6 @@ class ControllerDevice(ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_target_temperature_step = 0.5 - _enable_turn_on_off_backwards_compatibility = False def __init__(self, controller: Controller) -> None: """Initialise ControllerDevice.""" diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 0e0da4d5c0c..af58dd6ef4d 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -148,7 +148,6 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): _device: XknxClimate _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "knx_climate" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX climate device.""" diff --git a/homeassistant/components/lcn/climate.py b/homeassistant/components/lcn/climate.py index 1c7472bc4e3..360b732c02e 100644 --- a/homeassistant/components/lcn/climate.py +++ b/homeassistant/components/lcn/climate.py @@ -81,8 +81,6 @@ async def async_setup_entry( class LcnClimate(LcnEntity, ClimateEntity): """Representation of a LCN climate device.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize of a LCN climate device.""" super().__init__(config, config_entry) diff --git a/homeassistant/components/lightwave/climate.py b/homeassistant/components/lightwave/climate.py index 1016e8ce80d..942fb4a1fbc 100644 --- a/homeassistant/components/lightwave/climate.py +++ b/homeassistant/components/lightwave/climate.py @@ -55,7 +55,6 @@ class LightwaveTrv(ClimateEntity): ) _attr_target_temperature_step = 0.5 _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, name, device_id, lwlink, serial): """Initialize LightwaveTrv entity.""" diff --git a/homeassistant/components/livisi/climate.py b/homeassistant/components/livisi/climate.py index 5d70936fc53..3ecdcb486c0 100644 --- a/homeassistant/components/livisi/climate.py +++ b/homeassistant/components/livisi/climate.py @@ -68,7 +68,6 @@ class LivisiClimate(LivisiEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/lookin/climate.py b/homeassistant/components/lookin/climate.py index fadeb6d16fa..051a18c9a32 100644 --- a/homeassistant/components/lookin/climate.py +++ b/homeassistant/components/lookin/climate.py @@ -107,7 +107,6 @@ class ConditionerEntity(LookinCoordinatorEntity, ClimateEntity): _attr_min_temp = MIN_TEMP _attr_max_temp = MAX_TEMP _attr_target_temperature_step = PRECISION_WHOLE - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/lyric/climate.py b/homeassistant/components/lyric/climate.py index bf8e17527e8..87b5d566bb8 100644 --- a/homeassistant/components/lyric/climate.py +++ b/homeassistant/components/lyric/climate.py @@ -174,7 +174,6 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): PRESET_TEMPORARY_HOLD, PRESET_VACATION_HOLD, ] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index cdbe1e36245..0378d0ea226 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -187,7 +187,7 @@ class MatterClimate(MatterEntity, ClimateEntity): _attr_temperature_unit: str = UnitOfTemperature.CELSIUS _attr_hvac_mode: HVACMode = HVACMode.OFF _feature_map: int | None = None - _enable_turn_on_off_backwards_compatibility = False + _platform_translation_key = "thermostat" async def async_set_temperature(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/maxcube/climate.py b/homeassistant/components/maxcube/climate.py index b14efbbe073..da5a9f34dda 100644 --- a/homeassistant/components/maxcube/climate.py +++ b/homeassistant/components/maxcube/climate.py @@ -73,7 +73,6 @@ class MaxCubeClimate(ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, handler, device): """Initialize MAX! Cube ClimateEntity.""" diff --git a/homeassistant/components/melcloud/climate.py b/homeassistant/components/melcloud/climate.py index 08b3658c270..4defd47bc39 100644 --- a/homeassistant/components/melcloud/climate.py +++ b/homeassistant/components/melcloud/climate.py @@ -115,7 +115,6 @@ class MelCloudClimate(ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: MelCloudDevice) -> None: """Initialize the climate.""" diff --git a/homeassistant/components/melissa/climate.py b/homeassistant/components/melissa/climate.py index 0ad663faa2a..ff68820d70f 100644 --- a/homeassistant/components/melissa/climate.py +++ b/homeassistant/components/melissa/climate.py @@ -65,7 +65,6 @@ class MelissaClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api, serial_number, init_data): """Initialize the climate device.""" diff --git a/homeassistant/components/mill/climate.py b/homeassistant/components/mill/climate.py index 5c5c7882634..4f700d24e1b 100644 --- a/homeassistant/components/mill/climate.py +++ b/homeassistant/components/mill/climate.py @@ -100,7 +100,6 @@ class MillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntity): ) _attr_target_temperature_step = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: MillDataUpdateCoordinator, heater: mill.Heater @@ -194,7 +193,6 @@ class LocalMillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntit ) _attr_target_temperature_step = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: MillDataUpdateCoordinator) -> None: """Initialize the thermostat.""" diff --git a/homeassistant/components/modbus/climate.py b/homeassistant/components/modbus/climate.py index bcbaa0f32af..111c0458ef4 100644 --- a/homeassistant/components/modbus/climate.py +++ b/homeassistant/components/modbus/climate.py @@ -130,7 +130,6 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/moehlenhoff_alpha2/climate.py b/homeassistant/components/moehlenhoff_alpha2/climate.py index 33f17271800..7c24dad4469 100644 --- a/homeassistant/components/moehlenhoff_alpha2/climate.py +++ b/homeassistant/components/moehlenhoff_alpha2/climate.py @@ -47,7 +47,6 @@ class Alpha2Climate(CoordinatorEntity[Alpha2BaseCoordinator], ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_preset_modes = [PRESET_AUTO, PRESET_DAY, PRESET_NIGHT] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: Alpha2BaseCoordinator, heat_area_id: str) -> None: """Initialize Alpha2 ClimateEntity.""" diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 2419e3f32ac..e62303472ed 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -521,7 +521,6 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity): _attributes_extra_blocked = MQTT_CLIMATE_ATTRIBUTES_BLOCKED _attr_target_temperature_low: float | None = None _attr_target_temperature_high: float | None = None - _enable_turn_on_off_backwards_compatibility = False @staticmethod def config_schema() -> VolSchemaType: diff --git a/homeassistant/components/mysensors/climate.py b/homeassistant/components/mysensors/climate.py index ce15faa589c..23b7c47ebf3 100644 --- a/homeassistant/components/mysensors/climate.py +++ b/homeassistant/components/mysensors/climate.py @@ -72,7 +72,6 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): """Representation of a MySensors HVAC.""" _attr_hvac_modes = OPERATION_LIST - _enable_turn_on_off_backwards_compatibility = False @property def supported_features(self) -> ClimateEntityFeature: From 60563ae88a0b0b397a68dfda1a66ddf9a27f761b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Dec 2024 21:47:31 +0100 Subject: [PATCH 0284/1198] Remove _enable_turn_on_off_backwards_compatibility N-S (#132422) --- homeassistant/components/nest/climate.py | 1 - homeassistant/components/netatmo/climate.py | 1 - homeassistant/components/nexia/climate.py | 1 - homeassistant/components/nibe_heatpump/climate.py | 1 - homeassistant/components/nobo_hub/climate.py | 1 - homeassistant/components/nuheat/climate.py | 1 - homeassistant/components/oem/climate.py | 1 - homeassistant/components/opentherm_gw/climate.py | 2 +- .../components/overkiz/climate/atlantic_electrical_heater.py | 1 - ...c_electrical_heater_with_adjustable_temperature_setpoint.py | 1 - .../overkiz/climate/atlantic_electrical_towel_dryer.py | 1 - .../overkiz/climate/atlantic_heat_recovery_ventilation.py | 1 - .../climate/atlantic_pass_apc_heat_pump_main_component.py | 1 - .../overkiz/climate/atlantic_pass_apc_heating_zone.py | 1 - .../overkiz/climate/atlantic_pass_apc_zone_control.py | 1 - .../overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py | 1 - .../overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py | 1 - .../overkiz/climate/somfy_heating_temperature_interface.py | 1 - homeassistant/components/overkiz/climate/somfy_thermostat.py | 1 - .../overkiz/climate/valve_heating_temperature_interface.py | 1 - homeassistant/components/plugwise/climate.py | 1 - homeassistant/components/proliphix/climate.py | 1 - homeassistant/components/radiotherm/climate.py | 1 - homeassistant/components/schluter/climate.py | 1 - homeassistant/components/screenlogic/climate.py | 1 - homeassistant/components/sensibo/climate.py | 1 - homeassistant/components/senz/climate.py | 1 - homeassistant/components/shelly/climate.py | 2 -- homeassistant/components/smartthings/climate.py | 3 --- homeassistant/components/smarttub/climate.py | 1 - homeassistant/components/stiebel_eltron/climate.py | 1 - homeassistant/components/switchbee/climate.py | 1 - homeassistant/components/switchbot_cloud/climate.py | 1 - homeassistant/components/switcher_kis/climate.py | 1 - 34 files changed, 1 insertion(+), 37 deletions(-) diff --git a/homeassistant/components/nest/climate.py b/homeassistant/components/nest/climate.py index 1e2727bfab7..d5ad28c2dfd 100644 --- a/homeassistant/components/nest/climate.py +++ b/homeassistant/components/nest/climate.py @@ -95,7 +95,6 @@ class ThermostatEntity(ClimateEntity): _attr_has_entity_name = True _attr_should_poll = False _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Device) -> None: """Initialize ThermostatEntity.""" diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index 752dee5a952..02c955beac3 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -192,7 +192,6 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity): _attr_name = None _away: bool | None = None _connected: bool | None = None - _enable_turn_on_off_backwards_compatibility = False _away_temperature: float | None = None _hg_temperature: float | None = None diff --git a/homeassistant/components/nexia/climate.py b/homeassistant/components/nexia/climate.py index 9b22607d5a8..becd664756b 100644 --- a/homeassistant/components/nexia/climate.py +++ b/homeassistant/components/nexia/climate.py @@ -155,7 +155,6 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity): """Provides Nexia Climate support.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: NexiaDataUpdateCoordinator, zone: NexiaThermostatZone diff --git a/homeassistant/components/nibe_heatpump/climate.py b/homeassistant/components/nibe_heatpump/climate.py index f89d6ec29a9..94db90e7f58 100644 --- a/homeassistant/components/nibe_heatpump/climate.py +++ b/homeassistant/components/nibe_heatpump/climate.py @@ -74,7 +74,6 @@ class NibeClimateEntity(CoordinatorEntity[CoilCoordinator], ClimateEntity): _attr_target_temperature_step = 0.5 _attr_max_temp = 35.0 _attr_min_temp = 5.0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/nobo_hub/climate.py b/homeassistant/components/nobo_hub/climate.py index f1e2f4a78f0..a089209cde5 100644 --- a/homeassistant/components/nobo_hub/climate.py +++ b/homeassistant/components/nobo_hub/climate.py @@ -82,7 +82,6 @@ class NoboZone(ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature_step = 1 # Need to poll to get preset change when in HVACMode.AUTO, so can't set _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__(self, zone_id, hub: nobo, override_type) -> None: """Initialize the climate device.""" diff --git a/homeassistant/components/nuheat/climate.py b/homeassistant/components/nuheat/climate.py index db85827fc9b..8248c1b9b82 100644 --- a/homeassistant/components/nuheat/climate.py +++ b/homeassistant/components/nuheat/climate.py @@ -79,7 +79,6 @@ class NuHeatThermostat(CoordinatorEntity, ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_preset_modes = PRESET_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, thermostat, temperature_unit): """Initialize the thermostat.""" diff --git a/homeassistant/components/oem/climate.py b/homeassistant/components/oem/climate.py index cf16f1ba87e..4cecb9ff195 100644 --- a/homeassistant/components/oem/climate.py +++ b/homeassistant/components/oem/climate.py @@ -73,7 +73,6 @@ class ThermostatDevice(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, thermostat, name): """Initialize the device.""" diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index e93a76fe7b7..e8aa99f7325 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -85,7 +85,7 @@ class OpenThermClimate(OpenThermStatusEntity, ClimateEntity): _away_mode_b: int | None = None _away_state_a = False _away_state_b = False - _enable_turn_on_off_backwards_compatibility = False + _target_temperature: float | None = None _new_target_temperature: float | None = None entity_description: OpenThermClimateEntityDescription diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py b/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py index ce9857f9d8c..059e64ef55d 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py @@ -54,7 +54,6 @@ class AtlanticElectricalHeater(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py index 64a7dc1e645..93c7d03293b 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py @@ -76,7 +76,6 @@ class AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint( | ClimateEntityFeature.TURN_ON ) _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py index e49fc4358e9..92bd6ceae82 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py @@ -46,7 +46,6 @@ class AtlanticElectricalTowelDryer(OverkizEntity, ClimateEntity): _attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py b/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py index f1d96b5687b..bb84fa76f22 100644 --- a/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py +++ b/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py @@ -55,7 +55,6 @@ class AtlanticHeatRecoveryVentilation(OverkizEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py index 1cd13205b13..800516e4bda 100644 --- a/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py @@ -41,7 +41,6 @@ class AtlanticPassAPCHeatPumpMainComponent(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py index 3da2ccc922b..3df31fb44fc 100644 --- a/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py @@ -92,7 +92,6 @@ class AtlanticPassAPCHeatingZone(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py index 7fbab821b8d..7846b058619 100644 --- a/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py @@ -31,7 +31,6 @@ class AtlanticPassAPCZoneControl(OverkizEntity, ClimateEntity): _attr_supported_features = ( ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py index efdae2165a9..41da90f1ce8 100644 --- a/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py @@ -91,7 +91,6 @@ class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py index b31ecf91ec0..f60cbbeca2b 100644 --- a/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py @@ -95,7 +95,6 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py b/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py index acc761664ec..5ca17f9b6b1 100644 --- a/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py @@ -82,7 +82,6 @@ class SomfyHeatingTemperatureInterface(OverkizEntity, ClimateEntity): # Both min and max temp values have been retrieved from the Somfy Application. _attr_min_temp = 15.0 _attr_max_temp = 26.0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate/somfy_thermostat.py b/homeassistant/components/overkiz/climate/somfy_thermostat.py index 829a3bad03b..66a04af4e7a 100644 --- a/homeassistant/components/overkiz/climate/somfy_thermostat.py +++ b/homeassistant/components/overkiz/climate/somfy_thermostat.py @@ -65,7 +65,6 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] _attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False # Both min and max temp values have been retrieved from the Somfy Application. _attr_min_temp = 15.0 diff --git a/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py b/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py index e2165e8b6c6..54c00b33167 100644 --- a/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py @@ -56,7 +56,6 @@ class ValveHeatingTemperatureInterface(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 06b8171a528..b27fd1d4f0e 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -63,7 +63,6 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False _previous_mode: str = "heating" diff --git a/homeassistant/components/proliphix/climate.py b/homeassistant/components/proliphix/climate.py index 18b974800a3..be7d394993a 100644 --- a/homeassistant/components/proliphix/climate.py +++ b/homeassistant/components/proliphix/climate.py @@ -61,7 +61,6 @@ class ProliphixThermostat(ClimateEntity): _attr_precision = PRECISION_TENTHS _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, pdp): """Initialize the thermostat.""" diff --git a/homeassistant/components/radiotherm/climate.py b/homeassistant/components/radiotherm/climate.py index 73ab3644a0b..af52c5fcea3 100644 --- a/homeassistant/components/radiotherm/climate.py +++ b/homeassistant/components/radiotherm/climate.py @@ -107,7 +107,6 @@ class RadioThermostat(RadioThermostatEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT _attr_precision = PRECISION_HALVES _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: RadioThermUpdateCoordinator) -> None: """Initialize the thermostat.""" diff --git a/homeassistant/components/schluter/climate.py b/homeassistant/components/schluter/climate.py index 6f0a49e6eb9..7db15d3923c 100644 --- a/homeassistant/components/schluter/climate.py +++ b/homeassistant/components/schluter/climate.py @@ -82,7 +82,6 @@ class SchluterThermostat(CoordinatorEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, serial_number, api, session_id): """Initialize the thermostat.""" diff --git a/homeassistant/components/screenlogic/climate.py b/homeassistant/components/screenlogic/climate.py index 4d93dcf81d3..08300900f5d 100644 --- a/homeassistant/components/screenlogic/climate.py +++ b/homeassistant/components/screenlogic/climate.py @@ -80,7 +80,6 @@ class ScreenLogicClimate(ScreenLogicPushEntity, ClimateEntity, RestoreEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, entity_description) -> None: """Initialize a ScreenLogic climate entity.""" diff --git a/homeassistant/components/sensibo/climate.py b/homeassistant/components/sensibo/climate.py index c2f03c2d568..181b02e84ad 100644 --- a/homeassistant/components/sensibo/climate.py +++ b/homeassistant/components/sensibo/climate.py @@ -194,7 +194,6 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): _attr_name = None _attr_precision = PRECISION_TENTHS _attr_translation_key = "climate_device" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: SensiboDataUpdateCoordinator, device_id: str diff --git a/homeassistant/components/senz/climate.py b/homeassistant/components/senz/climate.py index 3b834654ca6..d5749a3f040 100644 --- a/homeassistant/components/senz/climate.py +++ b/homeassistant/components/senz/climate.py @@ -46,7 +46,6 @@ class SENZClimate(CoordinatorEntity, ClimateEntity): _attr_min_temp = 5 _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index b77f45afb3f..842abc5ecc4 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -172,7 +172,6 @@ class BlockSleepingClimate( ) _attr_target_temperature_step = SHTRV_01_TEMPERATURE_SETTINGS["step"] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -456,7 +455,6 @@ class RpcClimate(ShellyRpcEntity, ClimateEntity): ) _attr_target_temperature_step = RPC_THERMOSTAT_SETTINGS["step"] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: ShellyRpcCoordinator, id_: int) -> None: """Initialize.""" diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index 073a1470c21..d9535272295 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -164,8 +164,6 @@ def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): """Define a SmartThings climate entities.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, device): """Init the class.""" super().__init__(device) @@ -347,7 +345,6 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): """Define a SmartThings Air Conditioner.""" _hvac_modes: list[HVACMode] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device) -> None: """Init the class.""" diff --git a/homeassistant/components/smarttub/climate.py b/homeassistant/components/smarttub/climate.py index f0bb84b3390..7f3163834e0 100644 --- a/homeassistant/components/smarttub/climate.py +++ b/homeassistant/components/smarttub/climate.py @@ -68,7 +68,6 @@ class SmartTubThermostat(SmartTubEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_preset_modes = list(PRESET_MODES.values()) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, spa): """Initialize the entity.""" diff --git a/homeassistant/components/stiebel_eltron/climate.py b/homeassistant/components/stiebel_eltron/climate.py index 41015ac16a4..676f613f382 100644 --- a/homeassistant/components/stiebel_eltron/climate.py +++ b/homeassistant/components/stiebel_eltron/climate.py @@ -80,7 +80,6 @@ class StiebelEltron(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, name, ste_data): """Initialize the unit.""" diff --git a/homeassistant/components/switchbee/climate.py b/homeassistant/components/switchbee/climate.py index 7ec0ad4d88b..d946ed1761b 100644 --- a/homeassistant/components/switchbee/climate.py +++ b/homeassistant/components/switchbee/climate.py @@ -90,7 +90,6 @@ class SwitchBeeClimateEntity(SwitchBeeDeviceEntity[SwitchBeeThermostat], Climate _attr_fan_modes = SUPPORTED_FAN_MODES _attr_target_temperature_step = 1 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/switchbot_cloud/climate.py b/homeassistant/components/switchbot_cloud/climate.py index cd60313f37a..90d8258d0a3 100644 --- a/homeassistant/components/switchbot_cloud/climate.py +++ b/homeassistant/components/switchbot_cloud/climate.py @@ -80,7 +80,6 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature = 21 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False async def _do_send_command( self, diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index f2d4fb60252..5285e7549ef 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -83,7 +83,6 @@ class SwitcherClimateEntity(SwitcherEntity, ClimateEntity): """Representation of a Switcher climate entity.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: SwitcherDataUpdateCoordinator, remote: SwitcherBreezeRemote From e7f44048e942f75db8e0af7b8ccf955853df01e8 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Dec 2024 21:48:02 +0100 Subject: [PATCH 0285/1198] Remove _enable_turn_on_off_backwards_compatibility T-Z (#132423) --- homeassistant/components/tado/climate.py | 1 - homeassistant/components/tesla_fleet/climate.py | 3 +-- homeassistant/components/teslemetry/climate.py | 3 +-- homeassistant/components/tessie/climate.py | 1 - homeassistant/components/tfiac/climate.py | 1 - homeassistant/components/tolo/climate.py | 1 - homeassistant/components/toon/climate.py | 1 - homeassistant/components/touchline/climate.py | 1 - homeassistant/components/tplink/climate.py | 1 - homeassistant/components/tuya/climate.py | 1 - homeassistant/components/velbus/climate.py | 1 - homeassistant/components/venstar/climate.py | 1 - homeassistant/components/vera/climate.py | 1 - homeassistant/components/vicare/climate.py | 1 - homeassistant/components/whirlpool/climate.py | 1 - homeassistant/components/xs1/climate.py | 1 - homeassistant/components/yolink/climate.py | 1 - homeassistant/components/zha/climate.py | 1 - homeassistant/components/zhong_hong/climate.py | 1 - homeassistant/components/zwave_js/climate.py | 1 - homeassistant/components/zwave_me/climate.py | 1 - tests/components/climate/test_init.py | 1 - 22 files changed, 2 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 21a09086d46..5a81e951293 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -269,7 +269,6 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): _attr_name = None _attr_translation_key = DOMAIN _available = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tesla_fleet/climate.py b/homeassistant/components/tesla_fleet/climate.py index 9a1533a688f..06e9c9d7c64 100644 --- a/homeassistant/components/tesla_fleet/climate.py +++ b/homeassistant/components/tesla_fleet/climate.py @@ -74,7 +74,6 @@ class TeslaFleetClimateEntity(TeslaFleetVehicleEntity, ClimateEntity): | ClimateEntityFeature.PRESET_MODE ) _attr_preset_modes = ["off", "keep", "dog", "camp"] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -220,7 +219,7 @@ class TeslaFleetCabinOverheatProtectionEntity(TeslaFleetVehicleEntity, ClimateEn _attr_max_temp = COP_LEVELS["High"] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _enable_turn_on_off_backwards_compatibility = False + _attr_entity_registry_enabled_default = False def __init__( diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 5e933d1dbce..020085140cc 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -74,7 +74,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): | ClimateEntityFeature.PRESET_MODE ) _attr_preset_modes = ["off", "keep", "dog", "camp"] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -209,7 +208,7 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn _attr_max_temp = COP_LEVELS["High"] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _enable_turn_on_off_backwards_compatibility = False + _attr_entity_registry_enabled_default = False def __init__( diff --git a/homeassistant/components/tessie/climate.py b/homeassistant/components/tessie/climate.py index e0649432e05..1d26926aeaa 100644 --- a/homeassistant/components/tessie/climate.py +++ b/homeassistant/components/tessie/climate.py @@ -60,7 +60,6 @@ class TessieClimateEntity(TessieEntity, ClimateEntity): TessieClimateKeeper.DOG, TessieClimateKeeper.CAMP, ] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tfiac/climate.py b/homeassistant/components/tfiac/climate.py index 81517a6f1f5..e3aa9060787 100644 --- a/homeassistant/components/tfiac/climate.py +++ b/homeassistant/components/tfiac/climate.py @@ -88,7 +88,6 @@ class TfiacClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hass, client): """Init class.""" diff --git a/homeassistant/components/tolo/climate.py b/homeassistant/components/tolo/climate.py index 8c5176b3e4e..5e6428525c1 100644 --- a/homeassistant/components/tolo/climate.py +++ b/homeassistant/components/tolo/climate.py @@ -60,7 +60,6 @@ class SaunaClimate(ToloSaunaCoordinatorEntity, ClimateEntity): ) _attr_target_temperature_step = 1 _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry diff --git a/homeassistant/components/toon/climate.py b/homeassistant/components/toon/climate.py index 365706ba4fd..0c2e5b9b232 100644 --- a/homeassistant/components/toon/climate.py +++ b/homeassistant/components/toon/climate.py @@ -52,7 +52,6 @@ class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/touchline/climate.py b/homeassistant/components/touchline/climate.py index 7b14404ee34..e9d27341cb7 100644 --- a/homeassistant/components/touchline/climate.py +++ b/homeassistant/components/touchline/climate.py @@ -70,7 +70,6 @@ class Touchline(ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, touchline_thermostat): """Initialize the Touchline device.""" diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index 0bd25d9f80c..75a6599959d 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -67,7 +67,6 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): _attr_precision = PRECISION_TENTHS # This disables the warning for async_turn_{on,off}, can be removed later. - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tuya/climate.py b/homeassistant/components/tuya/climate.py index 93aaaa40c26..62aa29494e9 100644 --- a/homeassistant/components/tuya/climate.py +++ b/homeassistant/components/tuya/climate.py @@ -120,7 +120,6 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity): _set_temperature: IntegerTypeData | None = None entity_description: TuyaClimateEntityDescription _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index ed47d8b0a91..18142482539 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -42,7 +42,6 @@ class VelbusClimate(VelbusEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL] _attr_preset_modes = list(PRESET_MODES) - _enable_turn_on_off_backwards_compatibility = False @property def target_temperature(self) -> float | None: diff --git a/homeassistant/components/venstar/climate.py b/homeassistant/components/venstar/climate.py index 2865d64201e..c5323e1e9a8 100644 --- a/homeassistant/components/venstar/climate.py +++ b/homeassistant/components/venstar/climate.py @@ -110,7 +110,6 @@ class VenstarThermostat(VenstarEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL, HVACMode.OFF, HVACMode.AUTO] _attr_precision = PRECISION_HALVES _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/vera/climate.py b/homeassistant/components/vera/climate.py index 01fe26be6bc..eb2a5206f30 100644 --- a/homeassistant/components/vera/climate.py +++ b/homeassistant/components/vera/climate.py @@ -54,7 +54,6 @@ class VeraThermostat(VeraEntity[veraApi.VeraThermostat], ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, vera_device: veraApi.VeraThermostat, controller_data: ControllerData diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 8a116038533..67330bf201d 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -140,7 +140,6 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _current_action: bool | None = None _current_mode: str | None = None _current_program: str | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/whirlpool/climate.py b/homeassistant/components/whirlpool/climate.py index aa399746006..e1cedd38c04 100644 --- a/homeassistant/components/whirlpool/climate.py +++ b/homeassistant/components/whirlpool/climate.py @@ -110,7 +110,6 @@ class AirConEntity(ClimateEntity): _attr_swing_modes = SUPPORTED_SWING_MODES _attr_target_temperature_step = SUPPORTED_TARGET_TEMPERATURE_STEP _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/xs1/climate.py b/homeassistant/components/xs1/climate.py index c7d580631d3..3bb80df25b2 100644 --- a/homeassistant/components/xs1/climate.py +++ b/homeassistant/components/xs1/climate.py @@ -56,7 +56,6 @@ class XS1ThermostatEntity(XS1DeviceEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, sensor): """Initialize the actuator.""" diff --git a/homeassistant/components/yolink/climate.py b/homeassistant/components/yolink/climate.py index 98f1b764498..ff3bbf0d93b 100644 --- a/homeassistant/components/yolink/climate.py +++ b/homeassistant/components/yolink/climate.py @@ -63,7 +63,6 @@ class YoLinkClimateEntity(YoLinkEntity, ClimateEntity): """YoLink Climate Entity.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index fcf5afb5ac5..af9f56cd7dc 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -88,7 +88,6 @@ class Thermostat(ZHAEntity, ClimateEntity): _attr_precision = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key: str = "thermostat" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: """Initialize the ZHA thermostat entity.""" diff --git a/homeassistant/components/zhong_hong/climate.py b/homeassistant/components/zhong_hong/climate.py index eaf00b5432f..b5acc230472 100644 --- a/homeassistant/components/zhong_hong/climate.py +++ b/homeassistant/components/zhong_hong/climate.py @@ -135,7 +135,6 @@ class ZhongHongClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hub, addr_out, addr_in): """Set up the ZhongHong climate devices.""" diff --git a/homeassistant/components/zwave_js/climate.py b/homeassistant/components/zwave_js/climate.py index c7ab579c2cb..580694cae11 100644 --- a/homeassistant/components/zwave_js/climate.py +++ b/homeassistant/components/zwave_js/climate.py @@ -128,7 +128,6 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): """Representation of a Z-Wave climate.""" _attr_precision = PRECISION_TENTHS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo diff --git a/homeassistant/components/zwave_me/climate.py b/homeassistant/components/zwave_me/climate.py index de6f606745f..b8eed88b505 100644 --- a/homeassistant/components/zwave_me/climate.py +++ b/homeassistant/components/zwave_me/climate.py @@ -57,7 +57,6 @@ class ZWaveMeClimate(ZWaveMeEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 8851b2d60c5..45570c63008 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -462,7 +462,6 @@ async def test_sync_toggle(hass: HomeAssistant) -> None: class MockClimateEntityTest(MockClimateEntity): """Mock Climate device.""" - _enable_turn_on_off_backwards_compatibility = False _attr_supported_features = ( ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) From 841773bb6897e991f0744afd77380ae11263a38b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Dec 2024 22:16:18 +0100 Subject: [PATCH 0286/1198] Remove yaml import from hive (#132354) * Raise issue on hive deprecated YAML configuration * Remove YAML import --- homeassistant/components/hive/__init__.py | 47 +--------------- homeassistant/components/hive/config_flow.py | 4 -- tests/components/hive/test_config_flow.py | 58 -------------------- 3 files changed, 3 insertions(+), 106 deletions(-) diff --git a/homeassistant/components/hive/__init__.py b/homeassistant/components/hive/__init__.py index 1c11ccad595..ac008b857af 100644 --- a/homeassistant/components/hive/__init__.py +++ b/homeassistant/components/hive/__init__.py @@ -10,65 +10,24 @@ from typing import Any, Concatenate from aiohttp.web_exceptions import HTTPException from apyhiveapi import Auth, Hive from apyhiveapi.helper.hive_exceptions import HiveReauthRequired -import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME +from homeassistant.const import CONF_SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import aiohttp_client, config_validation as cv +from homeassistant.helpers import aiohttp_client from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, PLATFORM_LOOKUP, PLATFORMS from .entity import HiveEntity _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int, - }, - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Hive configuration setup.""" - hass.data[DOMAIN] = {} - - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - if not hass.config_entries.async_entries(DOMAIN): - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_USERNAME: conf[CONF_USERNAME], - CONF_PASSWORD: conf[CONF_PASSWORD], - }, - ) - ) - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Hive from a config entry.""" + hass.data.setdefault(DOMAIN, {}) web_session = aiohttp_client.async_get_clientsession(hass) hive_config = dict(entry.data) diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index 8df9a635302..e3180dc9734 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -163,10 +163,6 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user(data) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import user.""" - return await self.async_step_user(import_data) - @staticmethod @callback def async_get_options_flow( diff --git a/tests/components/hive/test_config_flow.py b/tests/components/hive/test_config_flow.py index e5dba49dcc1..8749954c364 100644 --- a/tests/components/hive/test_config_flow.py +++ b/tests/components/hive/test_config_flow.py @@ -25,52 +25,6 @@ MFA_RESEND_CODE = "0000" MFA_INVALID_CODE = "HIVE" -async def test_import_flow(hass: HomeAssistant) -> None: - """Check import flow.""" - - with ( - patch( - "homeassistant.components.hive.config_flow.Auth.login", - return_value={ - "ChallengeName": "SUCCESS", - "AuthenticationResult": { - "RefreshToken": "mock-refresh-token", - "AccessToken": "mock-access-token", - }, - }, - ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.hive.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == USERNAME - assert result["data"] == { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - "tokens": { - "AuthenticationResult": { - "AccessToken": "mock-access-token", - "RefreshToken": "mock-refresh-token", - }, - "ChallengeName": "SUCCESS", - }, - } - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_user_flow(hass: HomeAssistant) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( @@ -91,9 +45,6 @@ async def test_user_flow(hass: HomeAssistant) -> None: }, }, ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -119,7 +70,6 @@ async def test_user_flow(hass: HomeAssistant) -> None: }, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -185,9 +135,6 @@ async def test_user_flow_2fa(hass: HomeAssistant) -> None: "mock-device-password", ], ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -220,7 +167,6 @@ async def test_user_flow_2fa(hass: HomeAssistant) -> None: ], } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -462,9 +408,6 @@ async def test_user_flow_2fa_send_new_code(hass: HomeAssistant) -> None: "mock-device-password", ], ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -493,7 +436,6 @@ async def test_user_flow_2fa_send_new_code(hass: HomeAssistant) -> None: "mock-device-password", ], } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 From 3e98df707daee99c3b272a44c89ea4752b720dcc Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 5 Dec 2024 22:23:31 +0100 Subject: [PATCH 0287/1198] Remove deprecated integration dte_energy_bridge (#132276) * Remove deprecated integration dte_energy_bridge * Update quality scale script and ran hassfest --- .../components/dte_energy_bridge/__init__.py | 1 - .../dte_energy_bridge/manifest.json | 8 -- .../components/dte_energy_bridge/sensor.py | 127 ------------------ .../components/dte_energy_bridge/strings.json | 8 -- homeassistant/generated/integrations.json | 6 - script/hassfest/quality_scale.py | 1 - .../components/dte_energy_bridge/__init__.py | 1 - .../dte_energy_bridge/test_sensor.py | 58 -------- 8 files changed, 210 deletions(-) delete mode 100644 homeassistant/components/dte_energy_bridge/__init__.py delete mode 100644 homeassistant/components/dte_energy_bridge/manifest.json delete mode 100644 homeassistant/components/dte_energy_bridge/sensor.py delete mode 100644 homeassistant/components/dte_energy_bridge/strings.json delete mode 100644 tests/components/dte_energy_bridge/__init__.py delete mode 100644 tests/components/dte_energy_bridge/test_sensor.py diff --git a/homeassistant/components/dte_energy_bridge/__init__.py b/homeassistant/components/dte_energy_bridge/__init__.py deleted file mode 100644 index 2525d047bce..00000000000 --- a/homeassistant/components/dte_energy_bridge/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The dte_energy_bridge component.""" diff --git a/homeassistant/components/dte_energy_bridge/manifest.json b/homeassistant/components/dte_energy_bridge/manifest.json deleted file mode 100644 index 8285469a745..00000000000 --- a/homeassistant/components/dte_energy_bridge/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "dte_energy_bridge", - "name": "DTE Energy Bridge", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/dte_energy_bridge", - "iot_class": "local_polling", - "quality_scale": "legacy" -} diff --git a/homeassistant/components/dte_energy_bridge/sensor.py b/homeassistant/components/dte_energy_bridge/sensor.py deleted file mode 100644 index a0b9253034e..00000000000 --- a/homeassistant/components/dte_energy_bridge/sensor.py +++ /dev/null @@ -1,127 +0,0 @@ -"""Support for monitoring energy usage using the DTE energy bridge.""" - -from __future__ import annotations - -from http import HTTPStatus -import logging - -import requests -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, - SensorStateClass, -) -from homeassistant.const import CONF_NAME, UnitOfPower -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -_LOGGER = logging.getLogger(__name__) - -CONF_IP_ADDRESS = "ip" -CONF_VERSION = "version" - -DEFAULT_NAME = "Current Energy Usage" -DEFAULT_VERSION = 1 -DOMAIN = "dte_energy_bridge" - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_IP_ADDRESS): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.All( - vol.Coerce(int), vol.Any(1, 2) - ), - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the DTE energy bridge sensor.""" - create_issue( - hass, - DOMAIN, - "deprecated_integration", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_integration", - translation_placeholders={"domain": DOMAIN}, - ) - - name = config[CONF_NAME] - ip_address = config[CONF_IP_ADDRESS] - version = config[CONF_VERSION] - - add_entities([DteEnergyBridgeSensor(ip_address, name, version)], True) - - -class DteEnergyBridgeSensor(SensorEntity): - """Implementation of the DTE Energy Bridge sensors.""" - - _attr_device_class = SensorDeviceClass.POWER - _attr_native_unit_of_measurement = UnitOfPower.KILO_WATT - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__(self, ip_address, name, version): - """Initialize the sensor.""" - self._version = version - - if self._version == 1: - self._url = f"http://{ip_address}/instantaneousdemand" - elif self._version == 2: - self._url = f"http://{ip_address}:8888/zigbee/se/instantaneousdemand" - - self._attr_name = name - - def update(self) -> None: - """Get the energy usage data from the DTE energy bridge.""" - try: - response = requests.get(self._url, timeout=5) - except (requests.exceptions.RequestException, ValueError): - _LOGGER.warning( - "Could not update status for DTE Energy Bridge (%s)", self._attr_name - ) - return - - if response.status_code != HTTPStatus.OK: - _LOGGER.warning( - "Invalid status_code from DTE Energy Bridge: %s (%s)", - response.status_code, - self._attr_name, - ) - return - - response_split = response.text.split() - - if len(response_split) != 2: - _LOGGER.warning( - 'Invalid response from DTE Energy Bridge: "%s" (%s)', - response.text, - self._attr_name, - ) - return - - val = float(response_split[0]) - - # A workaround for a bug in the DTE energy bridge. - # The returned value can randomly be in W or kW. Checking for a - # a decimal seems to be a reliable way to determine the units. - # Limiting to version 1 because version 2 apparently always returns - # values in the format 000000.000 kW, but the scaling is Watts - # NOT kWatts - if self._version == 1 and "." in response_split[0]: - self._attr_native_value = val - else: - self._attr_native_value = val / 1000 diff --git a/homeassistant/components/dte_energy_bridge/strings.json b/homeassistant/components/dte_energy_bridge/strings.json deleted file mode 100644 index f75867b8faa..00000000000 --- a/homeassistant/components/dte_energy_bridge/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_integration": { - "title": "The DTE Energy Bridge integration will be removed", - "description": "The DTE Energy Bridge integration will be removed as new users can't get any supported devices, and the integration will fail as soon as a current device gets internet access.\n\n Please remove all `{domain}`platform sensors from your configuration and restart Home Assistant." - } - } -} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index d2f0a90065a..c87218cb1b1 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1374,12 +1374,6 @@ "config_flow": true, "iot_class": "local_push" }, - "dte_energy_bridge": { - "name": "DTE Energy Bridge", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "dublin_bus_transport": { "name": "Dublin Bus", "integration_type": "hub", diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 137fa3084a9..c55915c19c1 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -310,7 +310,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "drop_connect", "dsmr", "dsmr_reader", - "dte_energy_bridge", "dublin_bus_transport", "duckdns", "duke_energy", diff --git a/tests/components/dte_energy_bridge/__init__.py b/tests/components/dte_energy_bridge/__init__.py deleted file mode 100644 index 615944bda88..00000000000 --- a/tests/components/dte_energy_bridge/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the dte_energy_bridge component.""" diff --git a/tests/components/dte_energy_bridge/test_sensor.py b/tests/components/dte_energy_bridge/test_sensor.py deleted file mode 100644 index 41d340fae48..00000000000 --- a/tests/components/dte_energy_bridge/test_sensor.py +++ /dev/null @@ -1,58 +0,0 @@ -"""The tests for the DTE Energy Bridge.""" - -import requests_mock - -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -DTE_ENERGY_BRIDGE_CONFIG = {"platform": "dte_energy_bridge", "ip": "192.168.1.1"} - - -async def test_setup_with_config(hass: HomeAssistant) -> None: - """Test the platform setup with configuration.""" - assert await async_setup_component( - hass, "sensor", {"dte_energy_bridge": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - - -async def test_setup_correct_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge returns a correct value.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", - text=".411 kW", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "0.411" - - -async def test_setup_incorrect_units_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge handles a value with incorrect units.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", - text="411 kW", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "0.411" - - -async def test_setup_bad_format_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge handles an invalid value.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", - text="411", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "unknown" From 0aeb8f44f4122328776d8ccd61ed50e70b79aa15 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 6 Dec 2024 08:04:02 +1000 Subject: [PATCH 0288/1198] Bump tesla-fleet-api to 0.8.5 (#132339) --- homeassistant/components/tesla_fleet/const.py | 1 + homeassistant/components/tesla_fleet/manifest.json | 2 +- homeassistant/components/teslemetry/manifest.json | 2 +- homeassistant/components/tessie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/tesla_fleet/snapshots/test_diagnostics.ambr | 1 + 7 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 53e34092326..c70cc3291f7 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -21,6 +21,7 @@ SCOPES = [ Scope.OPENID, Scope.OFFLINE_ACCESS, Scope.VEHICLE_DEVICE_DATA, + Scope.VEHICLE_LOCATION, Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS, Scope.ENERGY_DEVICE_DATA, diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index f27929032d7..95062a8f856 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.8.4"] + "requirements": ["tesla-fleet-api==0.8.5"] } diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index fc82dea6445..3736d76bf36 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.8.4", "teslemetry-stream==0.4.2"] + "requirements": ["tesla-fleet-api==0.8.5", "teslemetry-stream==0.4.2"] } diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index cab9f4c706d..2b8ae924fe3 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.4"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0e9b7e6d60b..bd85008b784 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2810,7 +2810,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9b1787e40ba..db228c449b9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2241,7 +2241,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr index eb8c57910a4..cdb24b1d2b5 100644 --- a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -165,6 +165,7 @@ 'openid', 'offline_access', 'vehicle_device_data', + 'vehicle_location', 'vehicle_cmds', 'vehicle_charging_cmds', 'energy_device_data', From edc857b365f40dfb2dd4e5b9c0cd5d87a0f0126e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 19:50:02 -0600 Subject: [PATCH 0289/1198] Bump aiohttp to 3.11.10 (#132441) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 8617ed58ed5..d57ed20ab6b 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.9 +aiohttp==3.11.10 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index af910075b32..1707b92ede9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.9", + "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index e4aa6dc121a..761af716056 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.9 +aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 88eb611eef7add22e4438d87bdf0a96a805256bc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 20:52:48 -0600 Subject: [PATCH 0290/1198] Fix deprecated call to mimetypes.guess_type in CachingStaticResource (#132299) --- homeassistant/components/http/static.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/http/static.py b/homeassistant/components/http/static.py index 29c5840a4bf..9ca34af3741 100644 --- a/homeassistant/components/http/static.py +++ b/homeassistant/components/http/static.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Mapping from pathlib import Path +import sys from typing import Final from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE @@ -17,6 +18,15 @@ CACHE_HEADER = f"public, max-age={CACHE_TIME}" CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER} RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512) +if sys.version_info >= (3, 13): + # guess_type is soft-deprecated in 3.13 + # for paths and should only be used for + # URLs. guess_file_type should be used + # for paths instead. + _GUESSER = CONTENT_TYPES.guess_file_type +else: + _GUESSER = CONTENT_TYPES.guess_type + class CachingStaticResource(StaticResource): """Static Resource handler that will add cache headers.""" @@ -37,9 +47,7 @@ class CachingStaticResource(StaticResource): # Must be directory index; ignore caching return response file_path = response._path # noqa: SLF001 - response.content_type = ( - CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE - ) + response.content_type = _GUESSER(file_path)[0] or FALLBACK_CONTENT_TYPE # Cache actual header after setter construction. content_type = response.headers[CONTENT_TYPE] RESPONSE_CACHE[key] = (file_path, content_type) From 909b13809e9742ba2e6887a70e5a8da8fed03b75 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 21:23:24 -0600 Subject: [PATCH 0291/1198] Bump aioesphomeapi to 28.0.0 (#132447) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 77a3164d94c..775ffbff4c8 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -16,7 +16,7 @@ "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], "requirements": [ - "aioesphomeapi==27.0.3", + "aioesphomeapi==28.0.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.1.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index bd85008b784..e479c3a9630 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.3 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index db228c449b9..8e6375a67ad 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.3 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 From 28d6a21189f6987daa1e55d94c1e50e749f77327 Mon Sep 17 00:00:00 2001 From: Glenn Waters Date: Thu, 5 Dec 2024 22:32:33 -0500 Subject: [PATCH 0292/1198] Bump upb-lib to 0.5.9 (#132411) --- homeassistant/components/upb/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json index 6b49c859771..1e61747b3f1 100644 --- a/homeassistant/components/upb/manifest.json +++ b/homeassistant/components/upb/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/upb", "iot_class": "local_push", "loggers": ["upb_lib"], - "requirements": ["upb-lib==0.5.8"] + "requirements": ["upb-lib==0.5.9"] } diff --git a/requirements_all.txt b/requirements_all.txt index e479c3a9630..34fb994d330 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2915,7 +2915,7 @@ unifiled==0.11 universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud upcloud-api==2.6.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8e6375a67ad..dd5dda9a8e1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2325,7 +2325,7 @@ unifi-discovery==1.2.0 universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud upcloud-api==2.6.0 From 60fd9d50270472f75319ddb0294a93b98ef063e5 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 6 Dec 2024 04:34:05 +0100 Subject: [PATCH 0293/1198] Update mypy-dev to 1.14.0a6 (#132440) --- mypy.ini | 3 +-- requirements_test.txt | 2 +- script/hassfest/mypy_config.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/mypy.ini b/mypy.ini index 8e675ff6481..ce51adc3816 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,12 +11,11 @@ follow_imports = normal local_partial_types = true strict_equality = true no_implicit_optional = true -report_deprecated_as_error = true warn_incomplete_stub = true warn_redundant_casts = true warn_unused_configs = true warn_unused_ignores = true -enable_error_code = ignore-without-code, redundant-self, truthy-iterable +enable_error_code = deprecated, ignore-without-code, redundant-self, truthy-iterable disable_error_code = annotation-unchecked, import-not-found, import-untyped extra_checks = false check_untyped_defs = true diff --git a/requirements_test.txt b/requirements_test.txt index 2370bed8986..1725624a8cd 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -12,7 +12,7 @@ coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.14.0a5 +mypy-dev==1.14.0a6 pre-commit==4.0.0 pydantic==1.10.19 pylint==3.3.1 diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 25fe875e437..ec4d4b3d3a9 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -43,13 +43,13 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "local_partial_types": "true", "strict_equality": "true", "no_implicit_optional": "true", - "report_deprecated_as_error": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", "warn_unused_configs": "true", "warn_unused_ignores": "true", "enable_error_code": ", ".join( # noqa: FLY002 [ + "deprecated", "ignore-without-code", "redundant-self", "truthy-iterable", From 9058e00aefb76f5e18cd78950ffd022387583751 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Fri, 6 Dec 2024 08:20:06 +0100 Subject: [PATCH 0294/1198] Bump hass-nabucasa from 0.85.0 to 0.86.0 (#132456) Bump hass-nabucasa fro 0.85.0 to 0.86.0 --- homeassistant/components/cloud/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 60b105b401e..661edb67762 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -8,6 +8,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.85.0"], + "requirements": ["hass-nabucasa==0.86.0"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d57ed20ab6b..1bef0eb6454 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ fnv-hash-fast==1.0.2 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.4 diff --git a/pyproject.toml b/pyproject.toml index 1707b92ede9..dcfd84b0fbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.85.0", + "hass-nabucasa==0.86.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", diff --git a/requirements.txt b/requirements.txt index 761af716056..4379d51e204 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==1.0.2 -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 34fb994d330..c518282b70b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1087,7 +1087,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 # homeassistant.components.splunk hass-splunk==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dd5dda9a8e1..e4c5b6aaf7c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -925,7 +925,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 # homeassistant.components.conversation hassil==2.0.5 From ef55a8e665444c44e0f814584e6674205fe317de Mon Sep 17 00:00:00 2001 From: Blake Bryant Date: Thu, 5 Dec 2024 23:28:02 -0800 Subject: [PATCH 0295/1198] Bump pydeako to 0.6.0 (#132432) feat: update deako integration to use improved version of pydeako Some things of note: - simplified errors - pydeako has introduced some connection improvements See here: https://github.com/DeakoLights/pydeako/releases/tag/0.6.0 --- homeassistant/components/deako/__init__.py | 11 ++----- homeassistant/components/deako/config_flow.py | 2 +- homeassistant/components/deako/light.py | 2 +- homeassistant/components/deako/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/deako/test_init.py | 31 ++----------------- 7 files changed, 11 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/deako/__init__.py b/homeassistant/components/deako/__init__.py index fdcf09fad60..7a169defe01 100644 --- a/homeassistant/components/deako/__init__.py +++ b/homeassistant/components/deako/__init__.py @@ -4,8 +4,7 @@ from __future__ import annotations import logging -from pydeako.deako import Deako, DeviceListTimeout, FindDevicesTimeout -from pydeako.discover import DeakoDiscoverer +from pydeako import Deako, DeakoDiscoverer, FindDevicesError from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigEntry @@ -30,12 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> boo await connection.connect() try: await connection.find_devices() - except DeviceListTimeout as exc: # device list never received - _LOGGER.warning("Device not responding to device list") - await connection.disconnect() - raise ConfigEntryNotReady(exc) from exc - except FindDevicesTimeout as exc: # total devices expected not received - _LOGGER.warning("Device not responding to device requests") + except FindDevicesError as exc: + _LOGGER.warning("Error finding devices: %s", exc) await connection.disconnect() raise ConfigEntryNotReady(exc) from exc diff --git a/homeassistant/components/deako/config_flow.py b/homeassistant/components/deako/config_flow.py index d0676fa81d9..273cbf2795e 100644 --- a/homeassistant/components/deako/config_flow.py +++ b/homeassistant/components/deako/config_flow.py @@ -1,6 +1,6 @@ """Config flow for deako.""" -from pydeako.discover import DeakoDiscoverer, DevicesNotFoundException +from pydeako import DeakoDiscoverer, DevicesNotFoundException from homeassistant.components import zeroconf from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/deako/light.py b/homeassistant/components/deako/light.py index c7ff8765402..75b01935c9a 100644 --- a/homeassistant/components/deako/light.py +++ b/homeassistant/components/deako/light.py @@ -2,7 +2,7 @@ from typing import Any -from pydeako.deako import Deako +from pydeako import Deako from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/deako/manifest.json b/homeassistant/components/deako/manifest.json index e3099439b9d..f4f4782530b 100644 --- a/homeassistant/components/deako/manifest.json +++ b/homeassistant/components/deako/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/deako", "iot_class": "local_polling", "loggers": ["pydeako"], - "requirements": ["pydeako==0.5.4"], + "requirements": ["pydeako==0.6.0"], "single_config_entry": true, "zeroconf": ["_deako._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index c518282b70b..b4a662e8d91 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1841,7 +1841,7 @@ pydaikin==2.13.7 pydanfossair==0.1.0 # homeassistant.components.deako -pydeako==0.5.4 +pydeako==0.6.0 # homeassistant.components.deconz pydeconz==118 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e4c5b6aaf7c..1710b83fe69 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1491,7 +1491,7 @@ pycsspeechtts==1.0.8 pydaikin==2.13.7 # homeassistant.components.deako -pydeako==0.5.4 +pydeako==0.6.0 # homeassistant.components.deconz pydeconz==118 diff --git a/tests/components/deako/test_init.py b/tests/components/deako/test_init.py index b4c0e8bb1f7..c2291330feb 100644 --- a/tests/components/deako/test_init.py +++ b/tests/components/deako/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from pydeako.deako import DeviceListTimeout, FindDevicesTimeout +from pydeako import FindDevicesError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -37,7 +37,7 @@ async def test_deako_async_setup_entry( assert mock_config_entry.runtime_data == pydeako_deako_mock.return_value -async def test_deako_async_setup_entry_device_list_timeout( +async def test_deako_async_setup_entry_devices_error( hass: HomeAssistant, mock_config_entry: MockConfigEntry, pydeako_deako_mock: MagicMock, @@ -47,32 +47,7 @@ async def test_deako_async_setup_entry_device_list_timeout( mock_config_entry.add_to_hass(hass) - pydeako_deako_mock.return_value.find_devices.side_effect = DeviceListTimeout() - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - pydeako_deako_mock.assert_called_once_with( - pydeako_discoverer_mock.return_value.get_address - ) - pydeako_deako_mock.return_value.connect.assert_called_once() - pydeako_deako_mock.return_value.find_devices.assert_called_once() - pydeako_deako_mock.return_value.disconnect.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_deako_async_setup_entry_find_devices_timeout( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - pydeako_discoverer_mock: MagicMock, -) -> None: - """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises FindDevicesTimeout.""" - - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesTimeout() + pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesError() await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() From ff46b3a2bb5f7ca9b94613f7c0ea83a98741ffbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 08:29:09 +0100 Subject: [PATCH 0296/1198] Bump actions/cache from 4.1.2 to 4.2.0 (#132419) --- .github/workflows/ci.yaml | 44 +++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 34c2fa838a6..43bdc7a671b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -240,7 +240,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: path: venv key: >- @@ -256,7 +256,7 @@ jobs: uv pip install "$(cat requirements_test.txt | grep pre-commit)" - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} lookup-only: true @@ -286,7 +286,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -295,7 +295,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -326,7 +326,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -335,7 +335,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -366,7 +366,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -375,7 +375,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -482,7 +482,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: path: venv key: >- @@ -490,7 +490,7 @@ jobs: needs.info.outputs.python_cache_key }} - name: Restore uv wheel cache if: steps.cache-venv.outputs.cache-hit != 'true' - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: path: ${{ env.UV_CACHE_DIR }} key: >- @@ -578,7 +578,7 @@ jobs: check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -611,7 +611,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -649,7 +649,7 @@ jobs: check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -692,7 +692,7 @@ jobs: check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -739,7 +739,7 @@ jobs: check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -791,7 +791,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -799,7 +799,7 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore mypy cache - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: path: .mypy_cache key: >- @@ -865,7 +865,7 @@ jobs: check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -929,7 +929,7 @@ jobs: check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1050,7 +1050,7 @@ jobs: check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1179,7 +1179,7 @@ jobs: check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1325,7 +1325,7 @@ jobs: check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true From ce3db31b30c21092ab808b6b4022b9fbdea69b89 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 08:33:05 +0100 Subject: [PATCH 0297/1198] Fix nordpool dont have previous or next price (#132457) --- homeassistant/components/nordpool/sensor.py | 23 ++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index e7e655a6657..47617cc8e42 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -27,7 +27,9 @@ from .entity import NordpoolBaseEntity PARALLEL_UPDATES = 0 -def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float]]: +def get_prices( + data: DeliveryPeriodData, +) -> dict[str, tuple[float | None, float, float | None]]: """Return previous, current and next prices. Output: {"SE3": (10.0, 10.5, 12.1)} @@ -39,6 +41,7 @@ def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float] previous_time = current_time - timedelta(hours=1) next_time = current_time + timedelta(hours=1) price_data = data.entries + LOGGER.debug("Price data: %s", price_data) for entry in price_data: if entry.start <= current_time <= entry.end: current_price_entries = entry.entry @@ -46,10 +49,20 @@ def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float] last_price_entries = entry.entry if entry.start <= next_time <= entry.end: next_price_entries = entry.entry + LOGGER.debug( + "Last price %s, current price %s, next price %s", + last_price_entries, + current_price_entries, + next_price_entries, + ) result = {} for area, price in current_price_entries.items(): - result[area] = (last_price_entries[area], price, next_price_entries[area]) + result[area] = ( + last_price_entries.get(area), + price, + next_price_entries.get(area), + ) LOGGER.debug("Prices: %s", result) return result @@ -90,7 +103,7 @@ class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[tuple[float, float, float]], float | None] + value_fn: Callable[[tuple[float | None, float, float | None]], float | None] @dataclass(frozen=True, kw_only=True) @@ -136,13 +149,13 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data: data[0] / 1000, + value_fn=lambda data: data[0] / 1000 if data[0] else None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data: data[2] / 1000, + value_fn=lambda data: data[2] / 1000 if data[2] else None, suggested_display_precision=2, ), ) From 30f84f55a4c3f45e0df3fe239461169ac499d1ac Mon Sep 17 00:00:00 2001 From: Petar Petrov Date: Fri, 6 Dec 2024 10:35:48 +0200 Subject: [PATCH 0298/1198] Handle Z-Wave JS S2 inclusion via Inclusion Controller (#132073) * ZwaveJS: Handle S2 inclusion via Inclusion Controller * improved tests --- homeassistant/components/zwave_js/api.py | 35 +++++++++++++ tests/components/zwave_js/test_api.py | 62 ++++++++++++++++++++++++ 2 files changed, 97 insertions(+) diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index 88f8f25c8e2..1a1cd6ae9c1 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -396,6 +396,7 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_node_alerts) websocket_api.async_register_command(hass, websocket_add_node) websocket_api.async_register_command(hass, websocket_cancel_secure_bootstrap_s2) + websocket_api.async_register_command(hass, websocket_subscribe_s2_inclusion) websocket_api.async_register_command(hass, websocket_grant_security_classes) websocket_api.async_register_command(hass, websocket_validate_dsk_and_enter_pin) websocket_api.async_register_command(hass, websocket_provision_smart_start_node) @@ -863,6 +864,40 @@ async def websocket_cancel_secure_bootstrap_s2( connection.send_result(msg[ID]) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/subscribe_s2_inclusion", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_subscribe_s2_inclusion( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Subscribe to S2 inclusion initiated by the controller.""" + + @callback + def forward_dsk(event: dict) -> None: + connection.send_message( + websocket_api.event_message( + msg[ID], {"event": event["event"], "dsk": event["dsk"]} + ) + ) + + unsub = driver.controller.on("validate dsk and enter pin", forward_dsk) + connection.subscriptions[msg["id"]] = unsub + msg[DATA_UNSUBSCRIBE] = [unsub] + connection.send_result(msg[ID]) + + @websocket_api.require_admin @websocket_api.websocket_command( { diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 3761ba6eaa6..a3f70e92dcf 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5261,3 +5261,65 @@ async def test_cancel_secure_bootstrap_s2( assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_subscribe_s2_inclusion( + hass: HomeAssistant, integration, client, hass_ws_client: WebSocketGenerator +) -> None: + """Test the subscribe_s2_inclusion websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: entry.entry_id, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None + + # Test receiving DSK request event + event = Event( + type="validate dsk and enter pin", + data={ + "source": "controller", + "event": "validate dsk and enter pin", + "dsk": "test_dsk", + }, + ) + client.driver.receive_event(event) + + msg = await ws_client.receive_json() + assert msg["event"] == { + "event": "validate dsk and enter pin", + "dsk": "test_dsk", + } + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + # Test invalid config entry id + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: "INVALID", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND From 4a7e6bc068e75c61f5ce4333bbda46f2f1431cbf Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 09:55:00 +0100 Subject: [PATCH 0299/1198] Fix flaky CI from azure_event_hub (#132461) --- tests/components/azure_event_hub/test_init.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/components/azure_event_hub/test_init.py b/tests/components/azure_event_hub/test_init.py index 1b0550b147b..5ffc6106c11 100644 --- a/tests/components/azure_event_hub/test_init.py +++ b/tests/components/azure_event_hub/test_init.py @@ -112,6 +112,7 @@ async def test_send_batch_error( ) await hass.async_block_till_done() mock_send_batch.assert_called_once() + mock_send_batch.side_effect = None # Reset to avoid error in teardown async def test_late_event( From 0c8ebbe58850161e96f60de77ed3c2b42a39fe9d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 09:56:28 +0100 Subject: [PATCH 0300/1198] Log warning on use of deprecated light constants (#132387) --- homeassistant/components/light/__init__.py | 81 ++++++++++++++++------ tests/components/light/test_init.py | 56 +++++++++++++++ 2 files changed, 115 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 37ee6fe88fd..1a848232128 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -7,9 +7,10 @@ import csv import dataclasses from datetime import timedelta from enum import IntFlag, StrEnum +from functools import partial import logging import os -from typing import Any, Self, cast, final +from typing import Any, Final, Self, cast, final from propcache import cached_property import voluptuous as vol @@ -24,6 +25,13 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.deprecation import ( + DeprecatedConstant, + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, VolDictType @@ -51,12 +59,24 @@ class LightEntityFeature(IntFlag): # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. # Please use the LightEntityFeature enum instead. -SUPPORT_BRIGHTNESS = 1 # Deprecated, replaced by color modes -SUPPORT_COLOR_TEMP = 2 # Deprecated, replaced by color modes -SUPPORT_EFFECT = 4 -SUPPORT_FLASH = 8 -SUPPORT_COLOR = 16 # Deprecated, replaced by color modes -SUPPORT_TRANSITION = 32 +_DEPRECATED_SUPPORT_BRIGHTNESS: Final = DeprecatedConstant( + 1, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_COLOR_TEMP: Final = DeprecatedConstant( + 2, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_EFFECT: Final = DeprecatedConstantEnum( + LightEntityFeature.EFFECT, "2026.1" +) +_DEPRECATED_SUPPORT_FLASH: Final = DeprecatedConstantEnum( + LightEntityFeature.FLASH, "2026.1" +) +_DEPRECATED_SUPPORT_COLOR: Final = DeprecatedConstant( + 16, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_TRANSITION: Final = DeprecatedConstantEnum( + LightEntityFeature.TRANSITION, "2026.1" +) # Color mode of the light ATTR_COLOR_MODE = "color_mode" @@ -85,16 +105,22 @@ class ColorMode(StrEnum): # These COLOR_MODE_* constants are deprecated as of Home Assistant 2022.5. # Please use the LightEntityFeature enum instead. -COLOR_MODE_UNKNOWN = "unknown" -COLOR_MODE_ONOFF = "onoff" -COLOR_MODE_BRIGHTNESS = "brightness" -COLOR_MODE_COLOR_TEMP = "color_temp" -COLOR_MODE_HS = "hs" -COLOR_MODE_XY = "xy" -COLOR_MODE_RGB = "rgb" -COLOR_MODE_RGBW = "rgbw" -COLOR_MODE_RGBWW = "rgbww" -COLOR_MODE_WHITE = "white" +_DEPRECATED_COLOR_MODE_UNKNOWN: Final = DeprecatedConstantEnum( + ColorMode.UNKNOWN, "2026.1" +) +_DEPRECATED_COLOR_MODE_ONOFF: Final = DeprecatedConstantEnum(ColorMode.ONOFF, "2026.1") +_DEPRECATED_COLOR_MODE_BRIGHTNESS: Final = DeprecatedConstantEnum( + ColorMode.BRIGHTNESS, "2026.1" +) +_DEPRECATED_COLOR_MODE_COLOR_TEMP: Final = DeprecatedConstantEnum( + ColorMode.COLOR_TEMP, "2026.1" +) +_DEPRECATED_COLOR_MODE_HS: Final = DeprecatedConstantEnum(ColorMode.HS, "2026.1") +_DEPRECATED_COLOR_MODE_XY: Final = DeprecatedConstantEnum(ColorMode.XY, "2026.1") +_DEPRECATED_COLOR_MODE_RGB: Final = DeprecatedConstantEnum(ColorMode.RGB, "2026.1") +_DEPRECATED_COLOR_MODE_RGBW: Final = DeprecatedConstantEnum(ColorMode.RGBW, "2026.1") +_DEPRECATED_COLOR_MODE_RGBWW: Final = DeprecatedConstantEnum(ColorMode.RGBWW, "2026.1") +_DEPRECATED_COLOR_MODE_WHITE: Final = DeprecatedConstantEnum(ColorMode.WHITE, "2026.1") VALID_COLOR_MODES = { ColorMode.ONOFF, @@ -1209,7 +1235,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_BRIGHTNESS] = self.brightness else: data[ATTR_BRIGHTNESS] = None - elif supported_features_value & SUPPORT_BRIGHTNESS: + elif supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value: # Backwards compatibility for ambiguous / incomplete states # Warning is printed by supported_features_compat, remove in 2025.1 if _is_on: @@ -1230,7 +1256,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): else: data[ATTR_COLOR_TEMP_KELVIN] = None data[ATTR_COLOR_TEMP] = None - elif supported_features_value & SUPPORT_COLOR_TEMP: + elif supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value: # Backwards compatibility # Warning is printed by supported_features_compat, remove in 2025.1 if _is_on: @@ -1286,11 +1312,14 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): supported_features_value = supported_features.value supported_color_modes: set[ColorMode] = set() - if supported_features_value & SUPPORT_COLOR_TEMP: + if supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value: supported_color_modes.add(ColorMode.COLOR_TEMP) - if supported_features_value & SUPPORT_COLOR: + if supported_features_value & _DEPRECATED_SUPPORT_COLOR.value: supported_color_modes.add(ColorMode.HS) - if not supported_color_modes and supported_features_value & SUPPORT_BRIGHTNESS: + if ( + not supported_color_modes + and supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value + ): supported_color_modes = {ColorMode.BRIGHTNESS} if not supported_color_modes: @@ -1345,3 +1374,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return True # philips_js has known issues, we don't need users to open issues return self.platform.platform_name not in {"philips_js"} + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 61e7f4e6c29..280ec569d4d 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -1,5 +1,6 @@ """The tests for the Light component.""" +from types import ModuleType from typing import Literal from unittest.mock import MagicMock, mock_open, patch @@ -29,6 +30,9 @@ from tests.common import ( MockEntityPlatform, MockUser, async_mock_service, + help_test_all, + import_and_test_deprecated_constant, + import_and_test_deprecated_constant_enum, setup_test_component_platform, ) @@ -2802,3 +2806,55 @@ def test_report_invalid_color_modes( entity._async_calculate_state() expected_warning = "sets invalid supported color modes" assert (expected_warning in caplog.text) is warning_expected + + +@pytest.mark.parametrize( + "module", + [light], +) +def test_all(module: ModuleType) -> None: + """Test module.__all__ is correctly set.""" + help_test_all(module) + + +@pytest.mark.parametrize( + ("constant_name", "constant_value"), + [("SUPPORT_BRIGHTNESS", 1), ("SUPPORT_COLOR_TEMP", 2), ("SUPPORT_COLOR", 16)], +) +def test_deprecated_support_light_constants( + caplog: pytest.LogCaptureFixture, + constant_name: str, + constant_value: int, +) -> None: + """Test deprecated format constants.""" + import_and_test_deprecated_constant( + caplog, light, constant_name, "supported_color_modes", constant_value, "2026.1" + ) + + +@pytest.mark.parametrize( + "entity_feature", + list(light.LightEntityFeature), +) +def test_deprecated_support_light_constants_enums( + caplog: pytest.LogCaptureFixture, + entity_feature: light.LightEntityFeature, +) -> None: + """Test deprecated support light constants.""" + import_and_test_deprecated_constant_enum( + caplog, light, entity_feature, "SUPPORT_", "2026.1" + ) + + +@pytest.mark.parametrize( + "entity_feature", + list(light.ColorMode), +) +def test_deprecated_color_mode_constants_enums( + caplog: pytest.LogCaptureFixture, + entity_feature: light.LightEntityFeature, +) -> None: + """Test deprecated support light constants.""" + import_and_test_deprecated_constant_enum( + caplog, light, entity_feature, "COLOR_MODE_", "2026.1" + ) From b4d01dfd0c3279a87d31b5a1d9a96b25a1f436e5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 10:11:52 +0100 Subject: [PATCH 0301/1198] Adjust scope of zha global quirks fixture (#132463) --- tests/components/zha/conftest.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index a9f4c51d75d..1b280ea499a 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import warnings import pytest +import zhaquirks import zigpy from zigpy.application import ControllerApplication import zigpy.backups @@ -38,7 +39,7 @@ FIXTURE_GRP_NAME = "fixture group" COUNTER_NAMES = ["counter_1", "counter_2", "counter_3"] -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(scope="package", autouse=True) def globally_load_quirks(): """Load quirks automatically so that ZHA tests run deterministically in isolation. @@ -47,8 +48,6 @@ def globally_load_quirks(): run. """ - import zhaquirks # pylint: disable=import-outside-toplevel - zhaquirks.setup() From bd9aefda6272a4ba93bfca62ed0ebe35213fe427 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 6 Dec 2024 11:01:00 +0100 Subject: [PATCH 0302/1198] Point to the Ecovacs issue in the library for unspoorted devices (#132470) Co-authored-by: Franck Nijhof --- homeassistant/components/ecovacs/controller.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 3a70ab2af5b..69dd0f0813f 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -99,8 +99,8 @@ class EcovacsController: for device_config in devices.not_supported: _LOGGER.warning( ( - 'Device "%s" not supported. Please add support for it to ' - "https://github.com/DeebotUniverse/client.py: %s" + 'Device "%s" not supported. More information at ' + "https://github.com/DeebotUniverse/client.py/issues/612: %s" ), device_config["deviceName"], device_config, From 2eaf206562e5859bf8952dfb341b46269055117c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 11:16:03 +0100 Subject: [PATCH 0303/1198] Implement new state property for vacuum which is using an enum (#126353) * Implement new state property for vacuum which is using an enum * Mod * Mod init * Mods * Fix integrations * Tests * Fix state * Add vacuum tests * Fix last test * Litterrobot tests * Fixes * Tests * Fixes * Fix VacuumEntity * Mods * Mods * Mods * Update demo * LG * Fix vacuum * Fix Matter * Fix deprecation version * Mods * Fixes * Fix ruff * Fix tests * Fix roomba * Fix breaking dates --- .../components/alexa/capabilities.py | 2 +- homeassistant/components/demo/vacuum.py | 35 +-- homeassistant/components/ecovacs/vacuum.py | 35 ++- .../components/google_assistant/trait.py | 10 +- homeassistant/components/group/registry.py | 8 +- .../components/homekit/type_switches.py | 4 +- homeassistant/components/lg_thinq/vacuum.py | 40 ++- .../components/litterrobot/vacuum.py | 29 +-- homeassistant/components/matter/vacuum.py | 21 +- homeassistant/components/mqtt/vacuum.py | 37 ++- homeassistant/components/neato/vacuum.py | 25 +- homeassistant/components/roborock/vacuum.py | 55 ++-- homeassistant/components/romy/vacuum.py | 15 +- homeassistant/components/roomba/vacuum.py | 47 ++-- homeassistant/components/sharkiq/vacuum.py | 18 +- .../components/switchbot_cloud/vacuum.py | 31 +-- homeassistant/components/template/vacuum.py | 21 +- homeassistant/components/tuya/vacuum.py | 55 ++-- homeassistant/components/vacuum/__init__.py | 103 +++++++- homeassistant/components/vacuum/const.py | 42 ++- .../components/vacuum/device_condition.py | 6 +- .../components/vacuum/device_trigger.py | 6 +- .../components/vacuum/reproduce_state.py | 24 +- .../components/xiaomi_miio/vacuum.py | 59 ++--- tests/components/demo/test_vacuum.py | 36 ++- .../components/google_assistant/test_trait.py | 12 +- .../components/homekit/test_type_switches.py | 7 +- tests/components/litterrobot/test_init.py | 4 +- tests/components/litterrobot/test_vacuum.py | 21 +- tests/components/mqtt/test_vacuum.py | 13 +- tests/components/sharkiq/test_vacuum.py | 15 +- tests/components/template/test_vacuum.py | 63 ++--- tests/components/vacuum/__init__.py | 18 +- tests/components/vacuum/conftest.py | 112 +++++++- .../vacuum/test_device_condition.py | 15 +- .../components/vacuum/test_device_trigger.py | 16 +- tests/components/vacuum/test_init.py | 243 ++++++++++++++++-- .../components/vacuum/test_reproduce_state.py | 43 ++-- tests/components/xiaomi_miio/test_vacuum.py | 7 +- 39 files changed, 844 insertions(+), 509 deletions(-) diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index b2cda8ad76e..8672512acde 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -436,7 +436,7 @@ class AlexaPowerController(AlexaCapability): elif self.entity.domain == remote.DOMAIN: is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN) elif self.entity.domain == vacuum.DOMAIN: - is_on = self.entity.state == vacuum.STATE_CLEANING + is_on = self.entity.state == vacuum.VacuumActivity.CLEANING elif self.entity.domain == timer.DOMAIN: is_on = self.entity.state != STATE_IDLE elif self.entity.domain == water_heater.DOMAIN: diff --git a/homeassistant/components/demo/vacuum.py b/homeassistant/components/demo/vacuum.py index d4c3820d29e..3dd945ab82e 100644 --- a/homeassistant/components/demo/vacuum.py +++ b/homeassistant/components/demo/vacuum.py @@ -7,12 +7,8 @@ from typing import Any from homeassistant.components.vacuum import ( ATTR_CLEANED_AREA, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -91,16 +87,11 @@ class StateDemoVacuum(StateVacuumEntity): """Initialize the vacuum.""" self._attr_name = name self._attr_supported_features = supported_features - self._state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._fan_speed = FAN_SPEEDS[1] self._cleaned_area: float = 0 self._battery_level = 100 - @property - def state(self) -> str: - """Return the current state of the vacuum.""" - return self._state - @property def battery_level(self) -> int: """Return the current battery level of the vacuum.""" @@ -123,33 +114,33 @@ class StateDemoVacuum(StateVacuumEntity): def start(self) -> None: """Start or resume the cleaning task.""" - if self._state != STATE_CLEANING: - self._state = STATE_CLEANING + if self._attr_activity != VacuumActivity.CLEANING: + self._attr_activity = VacuumActivity.CLEANING self._cleaned_area += 1.32 self._battery_level -= 1 self.schedule_update_ha_state() def pause(self) -> None: """Pause the cleaning task.""" - if self._state == STATE_CLEANING: - self._state = STATE_PAUSED + if self._attr_activity == VacuumActivity.CLEANING: + self._attr_activity = VacuumActivity.PAUSED self.schedule_update_ha_state() def stop(self, **kwargs: Any) -> None: """Stop the cleaning task, do not return to dock.""" - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.schedule_update_ha_state() def return_to_base(self, **kwargs: Any) -> None: """Return dock to charging base.""" - self._state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING self.schedule_update_ha_state() event.call_later(self.hass, 30, self.__set_state_to_dock) def clean_spot(self, **kwargs: Any) -> None: """Perform a spot clean-up.""" - self._state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self._cleaned_area += 1.32 self._battery_level -= 1 self.schedule_update_ha_state() @@ -167,12 +158,12 @@ class StateDemoVacuum(StateVacuumEntity): "persistent_notification", service_data={"message": "I'm here!", "title": "Locate request"}, ) - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.async_write_ha_state() async def async_clean_spot(self, **kwargs: Any) -> None: """Locate the vacuum's position.""" - self._state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self.async_write_ha_state() async def async_send_command( @@ -182,9 +173,9 @@ class StateDemoVacuum(StateVacuumEntity): **kwargs: Any, ) -> None: """Send a command to the vacuum.""" - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.async_write_ha_state() def __set_state_to_dock(self, _: datetime) -> None: - self._state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self.schedule_update_ha_state() diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index 0d14267e08d..dde4fd64b56 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -13,14 +13,9 @@ from deebot_client.models import CleanAction, CleanMode, Room, State import sucks from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant, SupportsResponse @@ -123,22 +118,22 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): self.schedule_update_ha_state() @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the state of the vacuum cleaner.""" if self.error is not None: - return STATE_ERROR + return VacuumActivity.ERROR if self.device.is_cleaning: - return STATE_CLEANING + return VacuumActivity.CLEANING if self.device.is_charging: - return STATE_DOCKED + return VacuumActivity.DOCKED if self.device.vacuum_status == sucks.CLEAN_MODE_STOP: - return STATE_IDLE + return VacuumActivity.IDLE if self.device.vacuum_status == sucks.CHARGE_MODE_RETURNING: - return STATE_RETURNING + return VacuumActivity.RETURNING return None @@ -202,7 +197,7 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): def set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: """Set fan speed.""" - if self.state == STATE_CLEANING: + if self.state == VacuumActivity.CLEANING: self.device.run(sucks.Clean(mode=self.device.clean_status, speed=fan_speed)) def send_command( @@ -225,12 +220,12 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): _STATE_TO_VACUUM_STATE = { - State.IDLE: STATE_IDLE, - State.CLEANING: STATE_CLEANING, - State.RETURNING: STATE_RETURNING, - State.DOCKED: STATE_DOCKED, - State.ERROR: STATE_ERROR, - State.PAUSED: STATE_PAUSED, + State.IDLE: VacuumActivity.IDLE, + State.CLEANING: VacuumActivity.CLEANING, + State.RETURNING: VacuumActivity.RETURNING, + State.DOCKED: VacuumActivity.DOCKED, + State.ERROR: VacuumActivity.ERROR, + State.PAUSED: VacuumActivity.PAUSED, } _ATTR_ROOMS = "rooms" @@ -284,7 +279,7 @@ class EcovacsVacuum( self.async_write_ha_state() async def on_status(event: StateEvent) -> None: - self._attr_state = _STATE_TO_VACUUM_STATE[event.state] + self._attr_activity = _STATE_TO_VACUUM_STATE[event.state] self.async_write_ha_state() self._subscribe(self._capability.battery.event, on_battery) diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index f99f1574038..8025a291031 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -729,7 +729,7 @@ class DockTrait(_Trait): def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" - return {"isDocked": self.state.state == vacuum.STATE_DOCKED} + return {"isDocked": self.state.state == vacuum.VacuumActivity.DOCKED} async def execute(self, command, data, params, challenge): """Execute a dock command.""" @@ -825,8 +825,8 @@ class EnergyStorageTrait(_Trait): "capacityUntilFull": [ {"rawValue": 100 - battery_level, "unit": "PERCENTAGE"} ], - "isCharging": self.state.state == vacuum.STATE_DOCKED, - "isPluggedIn": self.state.state == vacuum.STATE_DOCKED, + "isCharging": self.state.state == vacuum.VacuumActivity.DOCKED, + "isPluggedIn": self.state.state == vacuum.VacuumActivity.DOCKED, } async def execute(self, command, data, params, challenge): @@ -882,8 +882,8 @@ class StartStopTrait(_Trait): if domain == vacuum.DOMAIN: return { - "isRunning": state == vacuum.STATE_CLEANING, - "isPaused": state == vacuum.STATE_PAUSED, + "isRunning": state == vacuum.VacuumActivity.CLEANING, + "isPaused": state == vacuum.VacuumActivity.PAUSED, } if domain in COVER_VALVE_DOMAINS: diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index 7ac5770f171..2f3c4aa5221 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -11,7 +11,7 @@ from typing import Protocol from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import HVACMode from homeassistant.components.lock import LockState -from homeassistant.components.vacuum import STATE_CLEANING, STATE_ERROR, STATE_RETURNING +from homeassistant.components.vacuum import VacuumActivity from homeassistant.components.water_heater import ( STATE_ECO, STATE_ELECTRIC, @@ -105,9 +105,9 @@ ON_OFF_STATES: dict[Platform | str, tuple[set[str], str, str]] = { Platform.VACUUM: ( { STATE_ON, - STATE_CLEANING, - STATE_RETURNING, - STATE_ERROR, + VacuumActivity.CLEANING, + VacuumActivity.RETURNING, + VacuumActivity.ERROR, }, STATE_ON, STATE_OFF, diff --git a/homeassistant/components/homekit/type_switches.py b/homeassistant/components/homekit/type_switches.py index 68df6c38ad6..0482a5956ac 100644 --- a/homeassistant/components/homekit/type_switches.py +++ b/homeassistant/components/homekit/type_switches.py @@ -21,7 +21,7 @@ from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, SERVICE_START, - STATE_CLEANING, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -213,7 +213,7 @@ class Vacuum(Switch): @callback def async_update_state(self, new_state: State) -> None: """Update switch state after state changed.""" - current_state = new_state.state in (STATE_CLEANING, STATE_ON) + current_state = new_state.state in (VacuumActivity.CLEANING, STATE_ON) _LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state) self.char_on.set_value(current_state) diff --git a/homeassistant/components/lg_thinq/vacuum.py b/homeassistant/components/lg_thinq/vacuum.py index 138b9ba55bf..6cbb731869c 100644 --- a/homeassistant/components/lg_thinq/vacuum.py +++ b/homeassistant/components/lg_thinq/vacuum.py @@ -9,15 +9,11 @@ from thinqconnect import DeviceType from thinqconnect.integration import ExtendedProperty from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) -from homeassistant.const import STATE_IDLE, STATE_PAUSED from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -46,21 +42,21 @@ class State(StrEnum): ROBOT_STATUS_TO_HA = { - "charging": STATE_DOCKED, - "diagnosis": STATE_IDLE, - "homing": STATE_RETURNING, - "initializing": STATE_IDLE, - "macrosector": STATE_IDLE, - "monitoring_detecting": STATE_IDLE, - "monitoring_moving": STATE_IDLE, - "monitoring_positioning": STATE_IDLE, - "pause": STATE_PAUSED, - "reservation": STATE_IDLE, - "setdate": STATE_IDLE, - "sleep": STATE_IDLE, - "standby": STATE_IDLE, - "working": STATE_CLEANING, - "error": STATE_ERROR, + "charging": VacuumActivity.DOCKED, + "diagnosis": VacuumActivity.IDLE, + "homing": VacuumActivity.RETURNING, + "initializing": VacuumActivity.IDLE, + "macrosector": VacuumActivity.IDLE, + "monitoring_detecting": VacuumActivity.IDLE, + "monitoring_moving": VacuumActivity.IDLE, + "monitoring_positioning": VacuumActivity.IDLE, + "pause": VacuumActivity.PAUSED, + "reservation": VacuumActivity.IDLE, + "setdate": VacuumActivity.IDLE, + "sleep": VacuumActivity.IDLE, + "standby": VacuumActivity.IDLE, + "working": VacuumActivity.CLEANING, + "error": VacuumActivity.ERROR, } ROBOT_BATT_TO_HA = { "moveless": 5, @@ -114,7 +110,7 @@ class ThinQStateVacuumEntity(ThinQEntity, StateVacuumEntity): super()._update_status() # Update state. - self._attr_state = ROBOT_STATUS_TO_HA[self.data.current_state] + self._attr_activity = ROBOT_STATUS_TO_HA[self.data.current_state] # Update battery. if (level := self.data.battery) is not None: @@ -135,7 +131,7 @@ class ThinQStateVacuumEntity(ThinQEntity, StateVacuumEntity): """Start the device.""" if self.data.current_state == State.SLEEP: value = State.WAKE_UP - elif self._attr_state == STATE_PAUSED: + elif self._attr_activity == VacuumActivity.PAUSED: value = State.RESUME else: value = State.START diff --git a/homeassistant/components/litterrobot/vacuum.py b/homeassistant/components/litterrobot/vacuum.py index f5553bf5d49..bd00c328233 100644 --- a/homeassistant/components/litterrobot/vacuum.py +++ b/homeassistant/components/litterrobot/vacuum.py @@ -10,12 +10,9 @@ from pylitterbot.enums import LitterBoxStatus import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_PAUSED, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant @@ -29,16 +26,16 @@ from .entity import LitterRobotEntity SERVICE_SET_SLEEP_MODE = "set_sleep_mode" LITTER_BOX_STATUS_STATE_MAP = { - LitterBoxStatus.CLEAN_CYCLE: STATE_CLEANING, - LitterBoxStatus.EMPTY_CYCLE: STATE_CLEANING, - LitterBoxStatus.CLEAN_CYCLE_COMPLETE: STATE_DOCKED, - LitterBoxStatus.CAT_DETECTED: STATE_DOCKED, - LitterBoxStatus.CAT_SENSOR_TIMING: STATE_DOCKED, - LitterBoxStatus.DRAWER_FULL_1: STATE_DOCKED, - LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED, - LitterBoxStatus.READY: STATE_DOCKED, - LitterBoxStatus.CAT_SENSOR_INTERRUPTED: STATE_PAUSED, - LitterBoxStatus.OFF: STATE_DOCKED, + LitterBoxStatus.CLEAN_CYCLE: VacuumActivity.CLEANING, + LitterBoxStatus.EMPTY_CYCLE: VacuumActivity.CLEANING, + LitterBoxStatus.CLEAN_CYCLE_COMPLETE: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_DETECTED: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_SENSOR_TIMING: VacuumActivity.DOCKED, + LitterBoxStatus.DRAWER_FULL_1: VacuumActivity.DOCKED, + LitterBoxStatus.DRAWER_FULL_2: VacuumActivity.DOCKED, + LitterBoxStatus.READY: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_SENSOR_INTERRUPTED: VacuumActivity.PAUSED, + LitterBoxStatus.OFF: VacuumActivity.DOCKED, } LITTER_BOX_ENTITY = StateVacuumEntityDescription( @@ -78,9 +75,9 @@ class LitterRobotCleaner(LitterRobotEntity[LitterRobot], StateVacuumEntity): ) @property - def state(self) -> str: + def activity(self) -> VacuumActivity: """Return the state of the cleaner.""" - return LITTER_BOX_STATUS_STATE_MAP.get(self.robot.status, STATE_ERROR) + return LITTER_BOX_STATUS_STATE_MAP.get(self.robot.status, VacuumActivity.ERROR) @property def status(self) -> str: diff --git a/homeassistant/components/matter/vacuum.py b/homeassistant/components/matter/vacuum.py index 2ecd7128df6..e98e1ad0bbd 100644 --- a/homeassistant/components/matter/vacuum.py +++ b/homeassistant/components/matter/vacuum.py @@ -9,16 +9,13 @@ from chip.clusters import Objects as clusters from matter_server.client.models import device_types from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_IDLE, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -127,25 +124,25 @@ class MatterVacuum(MatterEntity, StateVacuumEntity): operational_state: int = self.get_matter_attribute_value( clusters.RvcOperationalState.Attributes.OperationalState ) - state: str | None = None + state: VacuumActivity | None = None if TYPE_CHECKING: assert self._supported_run_modes is not None if operational_state in (OperationalState.CHARGING, OperationalState.DOCKED): - state = STATE_DOCKED + state = VacuumActivity.DOCKED elif operational_state == OperationalState.SEEKING_CHARGER: - state = STATE_RETURNING + state = VacuumActivity.RETURNING elif operational_state in ( OperationalState.UNABLE_TO_COMPLETE_OPERATION, OperationalState.UNABLE_TO_START_OR_RESUME, ): - state = STATE_ERROR + state = VacuumActivity.ERROR elif (run_mode := self._supported_run_modes.get(run_mode_raw)) is not None: tags = {x.value for x in run_mode.modeTags} if ModeTag.CLEANING in tags: - state = STATE_CLEANING + state = VacuumActivity.CLEANING elif ModeTag.IDLE in tags: - state = STATE_IDLE - self._attr_state = state + state = VacuumActivity.IDLE + self._attr_activity = state @callback def _calculate_features(self) -> None: diff --git a/homeassistant/components/mqtt/vacuum.py b/homeassistant/components/mqtt/vacuum.py index ac6dca3cbbc..743bfb363f3 100644 --- a/homeassistant/components/mqtt/vacuum.py +++ b/homeassistant/components/mqtt/vacuum.py @@ -10,20 +10,12 @@ import voluptuous as vol from homeassistant.components import vacuum from homeassistant.components.vacuum import ( ENTITY_ID_FORMAT, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SUPPORTED_FEATURES, - CONF_NAME, - STATE_IDLE, - STATE_PAUSED, -) +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_NAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -45,13 +37,20 @@ BATTERY = "battery_level" FAN_SPEED = "fan_speed" STATE = "state" -POSSIBLE_STATES: dict[str, str] = { - STATE_IDLE: STATE_IDLE, - STATE_DOCKED: STATE_DOCKED, - STATE_ERROR: STATE_ERROR, - STATE_PAUSED: STATE_PAUSED, - STATE_RETURNING: STATE_RETURNING, - STATE_CLEANING: STATE_CLEANING, +STATE_IDLE = "idle" +STATE_DOCKED = "docked" +STATE_ERROR = "error" +STATE_PAUSED = "paused" +STATE_RETURNING = "returning" +STATE_CLEANING = "cleaning" + +POSSIBLE_STATES: dict[str, VacuumActivity] = { + STATE_IDLE: VacuumActivity.IDLE, + STATE_DOCKED: VacuumActivity.DOCKED, + STATE_ERROR: VacuumActivity.ERROR, + STATE_PAUSED: VacuumActivity.PAUSED, + STATE_RETURNING: VacuumActivity.RETURNING, + STATE_CLEANING: VacuumActivity.CLEANING, } CONF_SUPPORTED_FEATURES = ATTR_SUPPORTED_FEATURES @@ -265,7 +264,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity): if STATE in payload and ( (state := payload[STATE]) in POSSIBLE_STATES or state is None ): - self._attr_state = ( + self._attr_activity = ( POSSIBLE_STATES[cast(str, state)] if payload[STATE] else None ) del payload[STATE] @@ -277,7 +276,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity): self.add_subscription( CONF_STATE_TOPIC, self._state_message_received, - {"_attr_battery_level", "_attr_fan_speed", "_attr_state"}, + {"_attr_battery_level", "_attr_fan_speed", "_attr_activity"}, ) async def _subscribe_topics(self) -> None: diff --git a/homeassistant/components/neato/vacuum.py b/homeassistant/components/neato/vacuum.py index 77ca5346b10..1a9285964a2 100644 --- a/homeassistant/components/neato/vacuum.py +++ b/homeassistant/components/neato/vacuum.py @@ -12,15 +12,12 @@ import voluptuous as vol from homeassistant.components.vacuum import ( ATTR_STATUS, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_MODE, STATE_IDLE, STATE_PAUSED +from homeassistant.const import ATTR_MODE from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.device_registry import DeviceInfo @@ -169,23 +166,23 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): robot_alert = None if self._state["state"] == 1: if self._state["details"]["isCharging"]: - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._status_state = "Charging" elif ( self._state["details"]["isDocked"] and not self._state["details"]["isCharging"] ): - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._status_state = "Docked" else: - self._attr_state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self._status_state = "Stopped" if robot_alert is not None: self._status_state = robot_alert elif self._state["state"] == 2: if robot_alert is None: - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self._status_state = ( f"{MODE.get(self._state['cleaning']['mode'])} " f"{ACTION.get(self._state['action'])}" @@ -200,10 +197,10 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): else: self._status_state = robot_alert elif self._state["state"] == 3: - self._attr_state = STATE_PAUSED + self._attr_activity = VacuumActivity.PAUSED self._status_state = "Paused" elif self._state["state"] == 4: - self._attr_state = STATE_ERROR + self._attr_activity = VacuumActivity.ERROR self._status_state = ERRORS.get(self._state["error"]) self._attr_battery_level = self._state["details"]["charge"] @@ -326,9 +323,9 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): def return_to_base(self, **kwargs: Any) -> None: """Set the vacuum cleaner to return to the dock.""" try: - if self._attr_state == STATE_CLEANING: + if self._attr_activity == VacuumActivity.CLEANING: self.robot.pause_cleaning() - self._attr_state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING self.robot.send_to_base() except NeatoRobotException as ex: _LOGGER.error( @@ -380,7 +377,7 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): "Start cleaning zone '%s' with robot %s", zone, self.entity_id ) - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING try: self.robot.start_cleaning(mode, navigation, category, boundary_id) except NeatoRobotException as ex: diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index 3b873f259e4..d3413bd7cbd 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -8,13 +8,8 @@ from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse @@ -27,29 +22,29 @@ from .coordinator import RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 STATE_CODE_TO_STATE = { - RoborockStateCode.starting: STATE_IDLE, # "Starting" - RoborockStateCode.charger_disconnected: STATE_IDLE, # "Charger disconnected" - RoborockStateCode.idle: STATE_IDLE, # "Idle" - RoborockStateCode.remote_control_active: STATE_CLEANING, # "Remote control active" - RoborockStateCode.cleaning: STATE_CLEANING, # "Cleaning" - RoborockStateCode.returning_home: STATE_RETURNING, # "Returning home" - RoborockStateCode.manual_mode: STATE_CLEANING, # "Manual mode" - RoborockStateCode.charging: STATE_DOCKED, # "Charging" - RoborockStateCode.charging_problem: STATE_ERROR, # "Charging problem" - RoborockStateCode.paused: STATE_PAUSED, # "Paused" - RoborockStateCode.spot_cleaning: STATE_CLEANING, # "Spot cleaning" - RoborockStateCode.error: STATE_ERROR, # "Error" - RoborockStateCode.shutting_down: STATE_IDLE, # "Shutting down" - RoborockStateCode.updating: STATE_DOCKED, # "Updating" - RoborockStateCode.docking: STATE_RETURNING, # "Docking" - RoborockStateCode.going_to_target: STATE_CLEANING, # "Going to target" - RoborockStateCode.zoned_cleaning: STATE_CLEANING, # "Zoned cleaning" - RoborockStateCode.segment_cleaning: STATE_CLEANING, # "Segment cleaning" - RoborockStateCode.emptying_the_bin: STATE_DOCKED, # "Emptying the bin" on s7+ - RoborockStateCode.washing_the_mop: STATE_DOCKED, # "Washing the mop" on s7maxV - RoborockStateCode.going_to_wash_the_mop: STATE_RETURNING, # "Going to wash the mop" on s7maxV - RoborockStateCode.charging_complete: STATE_DOCKED, # "Charging complete" - RoborockStateCode.device_offline: STATE_ERROR, # "Device offline" + RoborockStateCode.starting: VacuumActivity.IDLE, # "Starting" + RoborockStateCode.charger_disconnected: VacuumActivity.IDLE, # "Charger disconnected" + RoborockStateCode.idle: VacuumActivity.IDLE, # "Idle" + RoborockStateCode.remote_control_active: VacuumActivity.CLEANING, # "Remote control active" + RoborockStateCode.cleaning: VacuumActivity.CLEANING, # "Cleaning" + RoborockStateCode.returning_home: VacuumActivity.RETURNING, # "Returning home" + RoborockStateCode.manual_mode: VacuumActivity.CLEANING, # "Manual mode" + RoborockStateCode.charging: VacuumActivity.DOCKED, # "Charging" + RoborockStateCode.charging_problem: VacuumActivity.ERROR, # "Charging problem" + RoborockStateCode.paused: VacuumActivity.PAUSED, # "Paused" + RoborockStateCode.spot_cleaning: VacuumActivity.CLEANING, # "Spot cleaning" + RoborockStateCode.error: VacuumActivity.ERROR, # "Error" + RoborockStateCode.shutting_down: VacuumActivity.IDLE, # "Shutting down" + RoborockStateCode.updating: VacuumActivity.DOCKED, # "Updating" + RoborockStateCode.docking: VacuumActivity.RETURNING, # "Docking" + RoborockStateCode.going_to_target: VacuumActivity.CLEANING, # "Going to target" + RoborockStateCode.zoned_cleaning: VacuumActivity.CLEANING, # "Zoned cleaning" + RoborockStateCode.segment_cleaning: VacuumActivity.CLEANING, # "Segment cleaning" + RoborockStateCode.emptying_the_bin: VacuumActivity.DOCKED, # "Emptying the bin" on s7+ + RoborockStateCode.washing_the_mop: VacuumActivity.DOCKED, # "Washing the mop" on s7maxV + RoborockStateCode.going_to_wash_the_mop: VacuumActivity.RETURNING, # "Going to wash the mop" on s7maxV + RoborockStateCode.charging_complete: VacuumActivity.DOCKED, # "Charging complete" + RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline" } @@ -112,7 +107,7 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): self._attr_fan_speed_list = self._device_status.fan_power_options @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" assert self._device_status.state is not None return STATE_CODE_TO_STATE.get(self._device_status.state) diff --git a/homeassistant/components/romy/vacuum.py b/homeassistant/components/romy/vacuum.py index de74d371f0e..49129daabbd 100644 --- a/homeassistant/components/romy/vacuum.py +++ b/homeassistant/components/romy/vacuum.py @@ -6,7 +6,11 @@ https://home-assistant.io/components/vacuum.romy/. from typing import Any -from homeassistant.components.vacuum import StateVacuumEntity, VacuumEntityFeature +from homeassistant.components.vacuum import ( + StateVacuumEntity, + VacuumActivity, + VacuumEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -75,7 +79,14 @@ class RomyVacuumEntity(RomyEntity, StateVacuumEntity): """Handle updated data from the coordinator.""" self._attr_fan_speed = FAN_SPEEDS[self.romy.fan_speed] self._attr_battery_level = self.romy.battery_level - self._attr_state = self.romy.status + if (status := self.romy.status) is None: + self._attr_activity = None + self.async_write_ha_state() + return + try: + self._attr_activity = VacuumActivity(status) + except ValueError: + self._attr_activity = None self.async_write_ha_state() diff --git a/homeassistant/components/roomba/vacuum.py b/homeassistant/components/roomba/vacuum.py index 9024e54087d..92063f74afa 100644 --- a/homeassistant/components/roomba/vacuum.py +++ b/homeassistant/components/roomba/vacuum.py @@ -8,15 +8,11 @@ from typing import Any from homeassistant.components.vacuum import ( ATTR_STATUS, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_IDLE, STATE_PAUSED from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util @@ -39,16 +35,16 @@ SUPPORT_IROBOT = ( ) STATE_MAP = { - "": STATE_IDLE, - "charge": STATE_DOCKED, - "evac": STATE_RETURNING, # Emptying at cleanbase - "hmMidMsn": STATE_CLEANING, # Recharging at the middle of a cycle - "hmPostMsn": STATE_RETURNING, # Cycle finished - "hmUsrDock": STATE_RETURNING, - "pause": STATE_PAUSED, - "run": STATE_CLEANING, - "stop": STATE_IDLE, - "stuck": STATE_ERROR, + "": VacuumActivity.IDLE, + "charge": VacuumActivity.DOCKED, + "evac": VacuumActivity.RETURNING, # Emptying at cleanbase + "hmMidMsn": VacuumActivity.CLEANING, # Recharging at the middle of a cycle + "hmPostMsn": VacuumActivity.RETURNING, # Cycle finished + "hmUsrDock": VacuumActivity.RETURNING, + "pause": VacuumActivity.PAUSED, + "run": VacuumActivity.CLEANING, + "stop": VacuumActivity.IDLE, + "stuck": VacuumActivity.ERROR, } _LOGGER = logging.getLogger(__name__) @@ -130,7 +126,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity): self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1 @property - def _robot_state(self): + def activity(self): """Return the state of the vacuum cleaner.""" clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {}) cycle = clean_mission_status.get("cycle") @@ -138,16 +134,11 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity): try: state = STATE_MAP[phase] except KeyError: - return STATE_ERROR - if cycle != "none" and state in (STATE_IDLE, STATE_DOCKED): - state = STATE_PAUSED + return VacuumActivity.ERROR + if cycle != "none" and state in (VacuumActivity.IDLE, VacuumActivity.DOCKED): + state = VacuumActivity.PAUSED return state - @property - def state(self) -> str: - """Return the state of the vacuum cleaner.""" - return self._robot_state - @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the device.""" @@ -164,7 +155,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity): # Only add cleaning time and cleaned area attrs when the vacuum is # currently on - if self.state == STATE_CLEANING: + if self.state == VacuumActivity.CLEANING: # Get clean mission status ( state_attrs[ATTR_CLEANING_TIME], @@ -218,7 +209,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity): async def async_start(self) -> None: """Start or resume the cleaning task.""" - if self.state == STATE_PAUSED: + if self.state == VacuumActivity.PAUSED: await self.hass.async_add_executor_job(self.vacuum.send_command, "resume") else: await self.hass.async_add_executor_job(self.vacuum.send_command, "start") @@ -233,10 +224,10 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity): async def async_return_to_base(self, **kwargs): """Set the vacuum cleaner to return to the dock.""" - if self.state == STATE_CLEANING: + if self.state == VacuumActivity.CLEANING: await self.async_pause() for _ in range(10): - if self.state == STATE_PAUSED: + if self.state == VacuumActivity.PAUSED: break await asyncio.sleep(1) await self.hass.async_add_executor_job(self.vacuum.send_command, "dock") diff --git a/homeassistant/components/sharkiq/vacuum.py b/homeassistant/components/sharkiq/vacuum.py index 997d229e6b9..873d3fbd290 100644 --- a/homeassistant/components/sharkiq/vacuum.py +++ b/homeassistant/components/sharkiq/vacuum.py @@ -9,12 +9,8 @@ from sharkiq import OperatingModes, PowerModes, Properties, SharkIqVacuum import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -30,10 +26,10 @@ from .const import DOMAIN, LOGGER, SERVICE_CLEAN_ROOM, SHARK from .coordinator import SharkIqUpdateCoordinator OPERATING_STATE_MAP = { - OperatingModes.PAUSE: STATE_PAUSED, - OperatingModes.START: STATE_CLEANING, - OperatingModes.STOP: STATE_IDLE, - OperatingModes.RETURN: STATE_RETURNING, + OperatingModes.PAUSE: VacuumActivity.PAUSED, + OperatingModes.START: VacuumActivity.CLEANING, + OperatingModes.STOP: VacuumActivity.IDLE, + OperatingModes.RETURN: VacuumActivity.RETURNING, } FAN_SPEEDS_MAP = { @@ -156,7 +152,7 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum return self.sharkiq.get_property_value(Properties.RECHARGING_TO_RESUME) @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Get the current vacuum state. NB: Currently, we do not return an error state because they can be very, very stale. @@ -164,7 +160,7 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum user a notification. """ if self.sharkiq.get_property_value(Properties.CHARGING_STATUS): - return STATE_DOCKED + return VacuumActivity.DOCKED op_mode = self.sharkiq.get_property_value(Properties.OPERATING_MODE) return OPERATING_STATE_MAP.get(op_mode) diff --git a/homeassistant/components/switchbot_cloud/vacuum.py b/homeassistant/components/switchbot_cloud/vacuum.py index f9236507037..2d2a1783d73 100644 --- a/homeassistant/components/switchbot_cloud/vacuum.py +++ b/homeassistant/components/switchbot_cloud/vacuum.py @@ -5,13 +5,8 @@ from typing import Any from switchbot_api import Device, Remote, SwitchBotAPI, VacuumCommands from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -43,17 +38,17 @@ async def async_setup_entry( ) -VACUUM_SWITCHBOT_STATE_TO_HA_STATE: dict[str, str] = { - "StandBy": STATE_IDLE, - "Clearing": STATE_CLEANING, - "Paused": STATE_PAUSED, - "GotoChargeBase": STATE_RETURNING, - "Charging": STATE_DOCKED, - "ChargeDone": STATE_DOCKED, - "Dormant": STATE_IDLE, - "InTrouble": STATE_ERROR, - "InRemoteControl": STATE_CLEANING, - "InDustCollecting": STATE_DOCKED, +VACUUM_SWITCHBOT_STATE_TO_HA_STATE: dict[str, VacuumActivity] = { + "StandBy": VacuumActivity.IDLE, + "Clearing": VacuumActivity.CLEANING, + "Paused": VacuumActivity.PAUSED, + "GotoChargeBase": VacuumActivity.RETURNING, + "Charging": VacuumActivity.DOCKED, + "ChargeDone": VacuumActivity.DOCKED, + "Dormant": VacuumActivity.IDLE, + "InTrouble": VacuumActivity.ERROR, + "InRemoteControl": VacuumActivity.CLEANING, + "InDustCollecting": VacuumActivity.DOCKED, } VACUUM_FAN_SPEED_TO_SWITCHBOT_FAN_SPEED: dict[str, str] = { @@ -114,7 +109,7 @@ class SwitchBotCloudVacuum(SwitchBotCloudEntity, StateVacuumEntity): self._attr_available = self.coordinator.data.get("onlineStatus") == "online" switchbot_state = str(self.coordinator.data.get("workingStatus")) - self._attr_state = VACUUM_SWITCHBOT_STATE_TO_HA_STATE.get(switchbot_state) + self._attr_activity = VACUUM_SWITCHBOT_STATE_TO_HA_STATE.get(switchbot_state) self.async_write_ha_state() diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index 1d021bcb571..19029cc708b 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -17,13 +17,8 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -58,12 +53,12 @@ CONF_FAN_SPEED_TEMPLATE = "fan_speed_template" ENTITY_ID_FORMAT = VACUUM_DOMAIN + ".{}" _VALID_STATES = [ - STATE_CLEANING, - STATE_DOCKED, - STATE_PAUSED, - STATE_IDLE, - STATE_RETURNING, - STATE_ERROR, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.PAUSED, + VacuumActivity.IDLE, + VacuumActivity.RETURNING, + VacuumActivity.ERROR, ] VACUUM_SCHEMA = vol.All( @@ -202,7 +197,7 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): self._attr_fan_speed_list = config[CONF_FAN_SPEED_LIST] @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" return self._state diff --git a/homeassistant/components/tuya/vacuum.py b/homeassistant/components/tuya/vacuum.py index 2e0a154e670..738492102a1 100644 --- a/homeassistant/components/tuya/vacuum.py +++ b/homeassistant/components/tuya/vacuum.py @@ -7,13 +7,10 @@ from typing import Any from tuya_sharing import CustomerDevice, Manager from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) -from homeassistant.const import STATE_IDLE, STATE_PAUSED from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -24,29 +21,29 @@ from .entity import EnumTypeData, IntegerTypeData, TuyaEntity TUYA_MODE_RETURN_HOME = "chargego" TUYA_STATUS_TO_HA = { - "charge_done": STATE_DOCKED, - "chargecompleted": STATE_DOCKED, - "chargego": STATE_DOCKED, - "charging": STATE_DOCKED, - "cleaning": STATE_CLEANING, - "docking": STATE_RETURNING, - "goto_charge": STATE_RETURNING, - "goto_pos": STATE_CLEANING, - "mop_clean": STATE_CLEANING, - "part_clean": STATE_CLEANING, - "paused": STATE_PAUSED, - "pick_zone_clean": STATE_CLEANING, - "pos_arrived": STATE_CLEANING, - "pos_unarrive": STATE_CLEANING, - "random": STATE_CLEANING, - "sleep": STATE_IDLE, - "smart_clean": STATE_CLEANING, - "smart": STATE_CLEANING, - "spot_clean": STATE_CLEANING, - "standby": STATE_IDLE, - "wall_clean": STATE_CLEANING, - "wall_follow": STATE_CLEANING, - "zone_clean": STATE_CLEANING, + "charge_done": VacuumActivity.DOCKED, + "chargecompleted": VacuumActivity.DOCKED, + "chargego": VacuumActivity.DOCKED, + "charging": VacuumActivity.DOCKED, + "cleaning": VacuumActivity.CLEANING, + "docking": VacuumActivity.RETURNING, + "goto_charge": VacuumActivity.RETURNING, + "goto_pos": VacuumActivity.CLEANING, + "mop_clean": VacuumActivity.CLEANING, + "part_clean": VacuumActivity.CLEANING, + "paused": VacuumActivity.PAUSED, + "pick_zone_clean": VacuumActivity.CLEANING, + "pos_arrived": VacuumActivity.CLEANING, + "pos_unarrive": VacuumActivity.CLEANING, + "random": VacuumActivity.CLEANING, + "sleep": VacuumActivity.IDLE, + "smart_clean": VacuumActivity.CLEANING, + "smart": VacuumActivity.CLEANING, + "spot_clean": VacuumActivity.CLEANING, + "standby": VacuumActivity.IDLE, + "wall_clean": VacuumActivity.CLEANING, + "wall_follow": VacuumActivity.CLEANING, + "zone_clean": VacuumActivity.CLEANING, } @@ -137,12 +134,12 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity): return self.device.status.get(DPCode.SUCTION) @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return Tuya vacuum device state.""" if self.device.status.get(DPCode.PAUSE) and not ( self.device.status.get(DPCode.STATUS) ): - return STATE_PAUSED + return VacuumActivity.PAUSED if not (status := self.device.status.get(DPCode.STATUS)): return None return TUYA_STATUS_TO_HA.get(status) diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index a81dbeacee1..6fe2c3e2a5b 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -2,11 +2,12 @@ from __future__ import annotations +import asyncio from datetime import timedelta from enum import IntFlag from functools import partial import logging -from typing import Any +from typing import TYPE_CHECKING, Any, final from propcache import cached_property import voluptuous as vol @@ -18,11 +19,9 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, STATE_ON, - STATE_PAUSED, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.deprecation import ( DeprecatedConstantEnum, @@ -32,12 +31,21 @@ from homeassistant.helpers.deprecation import ( ) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.icon import icon_for_battery_level from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.hass_dict import HassKey -from .const import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_ERROR, STATE_RETURNING +from .const import ( # noqa: F401 + _DEPRECATED_STATE_CLEANING, + _DEPRECATED_STATE_DOCKED, + _DEPRECATED_STATE_ERROR, + _DEPRECATED_STATE_RETURNING, + DOMAIN, + VacuumActivity, +) _LOGGER = logging.getLogger(__name__) @@ -64,11 +72,13 @@ SERVICE_START = "start" SERVICE_PAUSE = "pause" SERVICE_STOP = "stop" - -STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR] - DEFAULT_NAME = "Vacuum cleaner robot" +# These STATE_* constants are deprecated as of Home Assistant 2025.1. +# Please use the VacuumActivity enum instead. +_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(VacuumActivity.IDLE, "2026.1") +_DEPRECATED_STATE_PAUSED = DeprecatedConstantEnum(VacuumActivity.PAUSED, "2026.1") + class VacuumEntityFeature(IntFlag): """Supported features of the vacuum entity.""" @@ -216,7 +226,7 @@ STATE_VACUUM_CACHED_PROPERTIES_WITH_ATTR_ = { "battery_icon", "fan_speed", "fan_speed_list", - "state", + "activity", } @@ -233,9 +243,58 @@ class StateVacuumEntity( _attr_battery_level: int | None = None _attr_fan_speed: str | None = None _attr_fan_speed_list: list[str] - _attr_state: str | None = None + _attr_activity: VacuumActivity | None = None _attr_supported_features: VacuumEntityFeature = VacuumEntityFeature(0) + __vacuum_legacy_state: bool = False + + def __init_subclass__(cls, **kwargs: Any) -> None: + """Post initialisation processing.""" + super().__init_subclass__(**kwargs) + if any(method in cls.__dict__ for method in ("_attr_state", "state")): + # Integrations should use the 'activity' property instead of + # setting the state directly. + cls.__vacuum_legacy_state = True + + def __setattr__(self, name: str, value: Any) -> None: + """Set attribute. + + Deprecation warning if setting '_attr_state' directly + unless already reported. + """ + if name == "_attr_state": + self._report_deprecated_activity_handling() + return super().__setattr__(name, value) + + @callback + def add_to_platform_start( + self, + hass: HomeAssistant, + platform: EntityPlatform, + parallel_updates: asyncio.Semaphore | None, + ) -> None: + """Start adding an entity to a platform.""" + super().add_to_platform_start(hass, platform, parallel_updates) + if self.__vacuum_legacy_state: + self._report_deprecated_activity_handling() + + @callback + def _report_deprecated_activity_handling(self) -> None: + """Report on deprecated handling of vacuum state. + + Integrations should implement activity instead of using state directly. + """ + report_usage( + "is setting state directly." + f" Entity {self.entity_id} ({type(self)}) should implement the 'activity'" + " property and return its state using the VacuumActivity enum", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2026.1", + integration_domain=self.platform.platform_name if self.platform else None, + exclude_integrations={DOMAIN}, + ) + @cached_property def battery_level(self) -> int | None: """Return the battery level of the vacuum cleaner.""" @@ -244,7 +303,7 @@ class StateVacuumEntity( @property def battery_icon(self) -> str: """Return the battery icon for the vacuum cleaner.""" - charging = bool(self.state == STATE_DOCKED) + charging = bool(self.activity == VacuumActivity.DOCKED) return icon_for_battery_level( battery_level=self.battery_level, charging=charging @@ -282,10 +341,28 @@ class StateVacuumEntity( return data - @cached_property + @final + @property def state(self) -> str | None: """Return the state of the vacuum cleaner.""" - return self._attr_state + if (activity := self.activity) is not None: + return activity + if self._attr_state is not None: + # Backwards compatibility for integrations that set state directly + # Should be removed in 2026.1 + if TYPE_CHECKING: + assert isinstance(self._attr_state, str) + return self._attr_state + return None + + @cached_property + def activity(self) -> VacuumActivity | None: + """Return the current vacuum activity. + + Integrations should overwrite this or use the '_attr_activity' + attribute to set the vacuum activity using the 'VacuumActivity' enum. + """ + return self._attr_activity @cached_property def supported_features(self) -> VacuumEntityFeature: diff --git a/homeassistant/components/vacuum/const.py b/homeassistant/components/vacuum/const.py index af1558f8570..f153a11dcb9 100644 --- a/homeassistant/components/vacuum/const.py +++ b/homeassistant/components/vacuum/const.py @@ -1,10 +1,42 @@ """Support for vacuum cleaner robots (botvacs).""" +from __future__ import annotations + +from enum import StrEnum +from functools import partial + +from homeassistant.helpers.deprecation import ( + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) + DOMAIN = "vacuum" -STATE_CLEANING = "cleaning" -STATE_DOCKED = "docked" -STATE_RETURNING = "returning" -STATE_ERROR = "error" -STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR] +class VacuumActivity(StrEnum): + """Vacuum activity states.""" + + CLEANING = "cleaning" + DOCKED = "docked" + IDLE = "idle" + PAUSED = "paused" + RETURNING = "returning" + ERROR = "error" + + +# These STATE_* constants are deprecated as of Home Assistant 2025.1. +# Please use the VacuumActivity enum instead. +_DEPRECATED_STATE_CLEANING = DeprecatedConstantEnum(VacuumActivity.CLEANING, "2026.1") +_DEPRECATED_STATE_DOCKED = DeprecatedConstantEnum(VacuumActivity.DOCKED, "2026.1") +_DEPRECATED_STATE_RETURNING = DeprecatedConstantEnum(VacuumActivity.RETURNING, "2026.1") +_DEPRECATED_STATE_ERROR = DeprecatedConstantEnum(VacuumActivity.ERROR, "2026.1") + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/vacuum/device_condition.py b/homeassistant/components/vacuum/device_condition.py index f528b0918a1..4da64484bf7 100644 --- a/homeassistant/components/vacuum/device_condition.py +++ b/homeassistant/components/vacuum/device_condition.py @@ -20,7 +20,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_RETURNING +from . import DOMAIN, VacuumActivity CONDITION_TYPES = {"is_cleaning", "is_docked"} @@ -62,9 +62,9 @@ def async_condition_from_config( ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == "is_docked": - test_states = [STATE_DOCKED] + test_states = [VacuumActivity.DOCKED] else: - test_states = [STATE_CLEANING, STATE_RETURNING] + test_states = [VacuumActivity.CLEANING, VacuumActivity.RETURNING] registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[CONF_ENTITY_ID]) diff --git a/homeassistant/components/vacuum/device_trigger.py b/homeassistant/components/vacuum/device_trigger.py index 45b0696f871..fe682ef21d3 100644 --- a/homeassistant/components/vacuum/device_trigger.py +++ b/homeassistant/components/vacuum/device_trigger.py @@ -19,7 +19,7 @@ from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN, STATE_CLEANING, STATE_DOCKED +from . import DOMAIN, VacuumActivity TRIGGER_TYPES = {"cleaning", "docked"} @@ -77,9 +77,9 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] == "cleaning": - to_state = STATE_CLEANING + to_state = VacuumActivity.CLEANING else: - to_state = STATE_DOCKED + to_state = VacuumActivity.DOCKED state_config = { CONF_PLATFORM: "state", diff --git a/homeassistant/components/vacuum/reproduce_state.py b/homeassistant/components/vacuum/reproduce_state.py index 762cd6f2e90..ef3fb329686 100644 --- a/homeassistant/components/vacuum/reproduce_state.py +++ b/homeassistant/components/vacuum/reproduce_state.py @@ -11,10 +11,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, STATE_OFF, STATE_ON, - STATE_PAUSED, ) from homeassistant.core import Context, HomeAssistant, State @@ -26,20 +24,18 @@ from . import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, + VacuumActivity, ) _LOGGER = logging.getLogger(__name__) VALID_STATES_TOGGLE = {STATE_ON, STATE_OFF} VALID_STATES_STATE = { - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.IDLE, + VacuumActivity.PAUSED, + VacuumActivity.RETURNING, } @@ -75,13 +71,13 @@ async def _async_reproduce_state( service = SERVICE_TURN_ON elif state.state == STATE_OFF: service = SERVICE_TURN_OFF - elif state.state == STATE_CLEANING: + elif state.state == VacuumActivity.CLEANING: service = SERVICE_START - elif state.state in [STATE_DOCKED, STATE_RETURNING]: + elif state.state in [VacuumActivity.DOCKED, VacuumActivity.RETURNING]: service = SERVICE_RETURN_TO_BASE - elif state.state == STATE_IDLE: + elif state.state == VacuumActivity.IDLE: service = SERVICE_STOP - elif state.state == STATE_PAUSED: + elif state.state == VacuumActivity.PAUSED: service = SERVICE_PAUSE await hass.services.async_call( diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index b720cc90d2c..532eb9581cd 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -10,13 +10,8 @@ from miio import DeviceException import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -55,29 +50,29 @@ ATTR_ZONE_REPEATER = "repeats" ATTR_TIMERS = "timers" STATE_CODE_TO_STATE = { - 1: STATE_IDLE, # "Starting" - 2: STATE_IDLE, # "Charger disconnected" - 3: STATE_IDLE, # "Idle" - 4: STATE_CLEANING, # "Remote control active" - 5: STATE_CLEANING, # "Cleaning" - 6: STATE_RETURNING, # "Returning home" - 7: STATE_CLEANING, # "Manual mode" - 8: STATE_DOCKED, # "Charging" - 9: STATE_ERROR, # "Charging problem" - 10: STATE_PAUSED, # "Paused" - 11: STATE_CLEANING, # "Spot cleaning" - 12: STATE_ERROR, # "Error" - 13: STATE_IDLE, # "Shutting down" - 14: STATE_DOCKED, # "Updating" - 15: STATE_RETURNING, # "Docking" - 16: STATE_CLEANING, # "Going to target" - 17: STATE_CLEANING, # "Zoned cleaning" - 18: STATE_CLEANING, # "Segment cleaning" - 22: STATE_DOCKED, # "Emptying the bin" on s7+ - 23: STATE_DOCKED, # "Washing the mop" on s7maxV - 26: STATE_RETURNING, # "Going to wash the mop" on s7maxV - 100: STATE_DOCKED, # "Charging complete" - 101: STATE_ERROR, # "Device offline" + 1: VacuumActivity.IDLE, # "Starting" + 2: VacuumActivity.IDLE, # "Charger disconnected" + 3: VacuumActivity.IDLE, # "Idle" + 4: VacuumActivity.CLEANING, # "Remote control active" + 5: VacuumActivity.CLEANING, # "Cleaning" + 6: VacuumActivity.RETURNING, # "Returning home" + 7: VacuumActivity.CLEANING, # "Manual mode" + 8: VacuumActivity.DOCKED, # "Charging" + 9: VacuumActivity.ERROR, # "Charging problem" + 10: VacuumActivity.PAUSED, # "Paused" + 11: VacuumActivity.CLEANING, # "Spot cleaning" + 12: VacuumActivity.ERROR, # "Error" + 13: VacuumActivity.IDLE, # "Shutting down" + 14: VacuumActivity.DOCKED, # "Updating" + 15: VacuumActivity.RETURNING, # "Docking" + 16: VacuumActivity.CLEANING, # "Going to target" + 17: VacuumActivity.CLEANING, # "Zoned cleaning" + 18: VacuumActivity.CLEANING, # "Segment cleaning" + 22: VacuumActivity.DOCKED, # "Emptying the bin" on s7+ + 23: VacuumActivity.DOCKED, # "Washing the mop" on s7maxV + 26: VacuumActivity.RETURNING, # "Going to wash the mop" on s7maxV + 100: VacuumActivity.DOCKED, # "Charging complete" + 101: VacuumActivity.ERROR, # "Device offline" } @@ -211,7 +206,7 @@ class MiroboVacuum( ) -> None: """Initialize the Xiaomi vacuum cleaner robot handler.""" super().__init__(device, entry, unique_id, coordinator) - self._state: str | None = None + self._state: VacuumActivity | None = None async def async_added_to_hass(self) -> None: """Run when entity is about to be added to hass.""" @@ -219,12 +214,12 @@ class MiroboVacuum( self._handle_coordinator_update() @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" # The vacuum reverts back to an idle state after erroring out. # We want to keep returning an error until it has been cleared. if self.coordinator.data.status.got_error: - return STATE_ERROR + return VacuumActivity.ERROR return self._state diff --git a/tests/components/demo/test_vacuum.py b/tests/components/demo/test_vacuum.py index a4e4d6f0e1f..f910e6e53ac 100644 --- a/tests/components/demo/test_vacuum.py +++ b/tests/components/demo/test_vacuum.py @@ -22,11 +22,7 @@ from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -75,35 +71,35 @@ async def test_supported_features(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 12412 assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 12360 assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED async def test_methods(hass: HomeAssistant) -> None: @@ -111,29 +107,29 @@ async def test_methods(hass: HomeAssistant) -> None: await common.async_start(hass, ENTITY_VACUUM_BASIC) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_BASIC) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING await common.async_stop(hass, ENTITY_VACUUM_BASIC) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_BASIC) - assert state.state == STATE_IDLE + assert state.state == VacuumActivity.IDLE state = hass.states.get(ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED await async_setup_component(hass, "notify", {}) await hass.async_block_till_done() await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_IDLE + assert state.state == VacuumActivity.IDLE await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_RETURNING + assert state.state == VacuumActivity.RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE @@ -145,21 +141,21 @@ async def test_methods(hass: HomeAssistant) -> None: await common.async_clean_spot(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_PAUSED + assert state.state == VacuumActivity.PAUSED await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_RETURNING + assert state.state == VacuumActivity.RETURNING async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=31)) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED async def test_unsupported_methods(hass: HomeAssistant) -> None: @@ -251,4 +247,4 @@ async def test_send_command(hass: HomeAssistant) -> None: new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_complete != new_state_complete - assert new_state_complete.state == STATE_IDLE + assert new_state_complete.state == VacuumActivity.IDLE diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 1e42edf8e7b..9e9c7015674 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -431,7 +431,9 @@ async def test_dock_vacuum(hass: HomeAssistant) -> None: assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None, None) - trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) + trt = trait.DockTrait( + hass, State("vacuum.bla", vacuum.VacuumActivity.IDLE), BASIC_CONFIG + ) assert trt.sync_attributes() == {} @@ -454,7 +456,7 @@ async def test_locate_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_IDLE, + vacuum.VacuumActivity.IDLE, {ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.LOCATE}, ), BASIC_CONFIG, @@ -485,7 +487,7 @@ async def test_energystorage_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_DOCKED, + vacuum.VacuumActivity.DOCKED, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.BATTERY, ATTR_BATTERY_LEVEL: 100, @@ -511,7 +513,7 @@ async def test_energystorage_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_CLEANING, + vacuum.VacuumActivity.CLEANING, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.BATTERY, ATTR_BATTERY_LEVEL: 20, @@ -551,7 +553,7 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_PAUSED, + vacuum.VacuumActivity.PAUSED, {ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.PAUSE}, ), BASIC_CONFIG, diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 9b708f18b8a..0d19763e4c7 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -26,8 +26,7 @@ from homeassistant.components.vacuum import ( SERVICE_START, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_CLEANING, - STATE_DOCKED, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -295,7 +294,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( hass.states.async_set( entity_id, - STATE_CLEANING, + VacuumActivity.CLEANING, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.RETURN_HOME | VacuumEntityFeature.START @@ -306,7 +305,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( hass.states.async_set( entity_id, - STATE_DOCKED, + VacuumActivity.DOCKED, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.RETURN_HOME | VacuumEntityFeature.START diff --git a/tests/components/litterrobot/test_init.py b/tests/components/litterrobot/test_init.py index 21b16097603..1c8e0742b26 100644 --- a/tests/components/litterrobot/test_init.py +++ b/tests/components/litterrobot/test_init.py @@ -9,7 +9,7 @@ from homeassistant.components import litterrobot from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_START, - STATE_DOCKED, + VacuumActivity, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID @@ -30,7 +30,7 @@ async def test_unload_entry(hass: HomeAssistant, mock_account: MagicMock) -> Non vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED await hass.services.async_call( VACUUM_DOMAIN, diff --git a/tests/components/litterrobot/test_vacuum.py b/tests/components/litterrobot/test_vacuum.py index 735ee6653aa..f18098ccf1d 100644 --- a/tests/components/litterrobot/test_vacuum.py +++ b/tests/components/litterrobot/test_vacuum.py @@ -15,9 +15,7 @@ from homeassistant.components.vacuum import ( DOMAIN as PLATFORM_DOMAIN, SERVICE_START, SERVICE_STOP, - STATE_DOCKED, - STATE_ERROR, - STATE_PAUSED, + VacuumActivity, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -53,7 +51,7 @@ async def test_vacuum( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED assert vacuum.attributes["is_sleeping"] is False ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) @@ -95,18 +93,21 @@ async def test_vacuum_with_error( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_ERROR + assert vacuum.state == VacuumActivity.ERROR @pytest.mark.parametrize( ("robot_data", "expected_state"), [ - ({"displayCode": "DC_CAT_DETECT"}, STATE_DOCKED), - ({"isDFIFull": True}, STATE_ERROR), - ({"robotCycleState": "CYCLE_STATE_CAT_DETECT"}, STATE_PAUSED), + ({"displayCode": "DC_CAT_DETECT"}, VacuumActivity.DOCKED), + ({"isDFIFull": True}, VacuumActivity.ERROR), + ( + {"robotCycleState": "CYCLE_STATE_CAT_DETECT"}, + VacuumActivity.PAUSED, + ), ], ) -async def test_vacuum_states( +async def test_activities( hass: HomeAssistant, mock_account_with_litterrobot_4: MagicMock, robot_data: dict[str, str | bool], @@ -150,7 +151,7 @@ async def test_commands( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED extra = extra or {} data = {ATTR_ENTITY_ID: VACUUM_ENTITY_ID, **extra.get("data", {})} diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index fef62c33a93..c1c662048d7 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -27,8 +27,7 @@ from homeassistant.components.vacuum import ( SERVICE_RETURN_TO_BASE, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, + VacuumActivity, ) from homeassistant.const import CONF_NAME, ENTITY_MATCH_ALL, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -313,7 +312,7 @@ async def test_status( }""" async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54 assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50" assert state.attributes.get(ATTR_FAN_SPEED) == "max" @@ -326,7 +325,7 @@ async def test_status( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61 assert state.attributes.get(ATTR_FAN_SPEED) == "min" @@ -366,7 +365,7 @@ async def test_no_fan_vacuum( }""" async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54 @@ -380,7 +379,7 @@ async def test_no_fan_vacuum( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None @@ -394,7 +393,7 @@ async def test_no_fan_vacuum( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61 diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index 3748cfd6dc4..bfb2176026b 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -35,10 +35,7 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -160,7 +157,7 @@ async def test_simple_properties( assert entity assert state - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert entity.unique_id == "AC000Wxxxxxxxxx" @@ -189,10 +186,10 @@ async def test_initial_attributes( @pytest.mark.parametrize( ("service", "target_state"), [ - (SERVICE_STOP, STATE_IDLE), - (SERVICE_PAUSE, STATE_PAUSED), - (SERVICE_RETURN_TO_BASE, STATE_RETURNING), - (SERVICE_START, STATE_CLEANING), + (SERVICE_STOP, VacuumActivity.IDLE), + (SERVICE_PAUSE, VacuumActivity.PAUSED), + (SERVICE_RETURN_TO_BASE, VacuumActivity.RETURNING), + (SERVICE_START, VacuumActivity.CLEANING), ], ) async def test_cleaning_states( diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index ff428c5d4b4..6053a2bd9ec 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -3,14 +3,7 @@ import pytest from homeassistant import setup -from homeassistant.components.vacuum import ( - ATTR_BATTERY_LEVEL, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, -) +from homeassistant.components.vacuum import ATTR_BATTERY_LEVEL, VacuumActivity from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError @@ -44,7 +37,7 @@ _BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level" }, ), ( - STATE_CLEANING, + VacuumActivity.CLEANING, 100, { "vacuum": { @@ -149,10 +142,10 @@ async def test_templates_with_entities(hass: HomeAssistant) -> None: """Test templates with values from other entities.""" _verify(hass, STATE_UNKNOWN, None) - hass.states.async_set(_STATE_INPUT_SELECT, STATE_CLEANING) + hass.states.async_set(_STATE_INPUT_SELECT, VacuumActivity.CLEANING) hass.states.async_set(_BATTERY_LEVEL_INPUT_NUMBER, 100) await hass.async_block_till_done() - _verify(hass, STATE_CLEANING, 100) + _verify(hass, VacuumActivity.CLEANING, 100) @pytest.mark.parametrize( @@ -370,8 +363,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_CLEANING - _verify(hass, STATE_CLEANING, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.CLEANING + _verify(hass, VacuumActivity.CLEANING, None) assert len(calls) == 1 assert calls[-1].data["action"] == "start" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -381,8 +374,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_PAUSED - _verify(hass, STATE_PAUSED, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.PAUSED + _verify(hass, VacuumActivity.PAUSED, None) assert len(calls) == 2 assert calls[-1].data["action"] == "pause" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -392,8 +385,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_IDLE - _verify(hass, STATE_IDLE, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.IDLE + _verify(hass, VacuumActivity.IDLE, None) assert len(calls) == 3 assert calls[-1].data["action"] == "stop" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -403,8 +396,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_RETURNING - _verify(hass, STATE_RETURNING, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.RETURNING + _verify(hass, VacuumActivity.RETURNING, None) assert len(calls) == 4 assert calls[-1].data["action"] == "return_to_base" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -506,7 +499,11 @@ async def _register_basic_vacuum(hass: HomeAssistant) -> None: assert await setup.async_setup_component( hass, "input_select", - {"input_select": {"state": {"name": "State", "options": [STATE_CLEANING]}}}, + { + "input_select": { + "state": {"name": "State", "options": [VacuumActivity.CLEANING]} + } + }, ) with assert_setup_component(1, "vacuum"): @@ -522,7 +519,7 @@ async def _register_basic_vacuum(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_CLEANING, + "option": VacuumActivity.CLEANING, }, } } @@ -554,11 +551,11 @@ async def _register_components(hass: HomeAssistant) -> None: "state": { "name": "State", "options": [ - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.IDLE, + VacuumActivity.PAUSED, + VacuumActivity.RETURNING, ], }, "fan_speed": { @@ -578,7 +575,7 @@ async def _register_components(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_CLEANING, + "option": VacuumActivity.CLEANING, }, }, { @@ -592,7 +589,10 @@ async def _register_components(hass: HomeAssistant) -> None: "pause": [ { "service": "input_select.select_option", - "data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_PAUSED}, + "data": { + "entity_id": _STATE_INPUT_SELECT, + "option": VacuumActivity.PAUSED, + }, }, { "service": "test.automation", @@ -605,7 +605,10 @@ async def _register_components(hass: HomeAssistant) -> None: "stop": [ { "service": "input_select.select_option", - "data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_IDLE}, + "data": { + "entity_id": _STATE_INPUT_SELECT, + "option": VacuumActivity.IDLE, + }, }, { "service": "test.automation", @@ -620,7 +623,7 @@ async def _register_components(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_RETURNING, + "option": VacuumActivity.RETURNING, }, }, { diff --git a/tests/components/vacuum/__init__.py b/tests/components/vacuum/__init__.py index 0a681730cb2..26e31a87eee 100644 --- a/tests/components/vacuum/__init__.py +++ b/tests/components/vacuum/__init__.py @@ -4,12 +4,8 @@ from typing import Any from homeassistant.components.vacuum import ( DOMAIN, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -39,20 +35,20 @@ class MockVacuum(MockEntity, StateVacuumEntity): def __init__(self, **values: Any) -> None: """Initialize a mock vacuum entity.""" super().__init__(**values) - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._attr_fan_speed = "slow" def stop(self, **kwargs: Any) -> None: """Stop cleaning.""" - self._attr_state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE def return_to_base(self, **kwargs: Any) -> None: """Return to base.""" - self._attr_state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING def clean_spot(self, **kwargs: Any) -> None: """Clean a spot.""" - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING def set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: """Set the fan speed.""" @@ -60,11 +56,11 @@ class MockVacuum(MockEntity, StateVacuumEntity): def start(self) -> None: """Start cleaning.""" - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING def pause(self) -> None: """Pause cleaning.""" - self._attr_state = STATE_PAUSED + self._attr_activity = VacuumActivity.PAUSED async def help_async_setup_entry_init( diff --git a/tests/components/vacuum/conftest.py b/tests/components/vacuum/conftest.py index d298260c575..6e6639431d0 100644 --- a/tests/components/vacuum/conftest.py +++ b/tests/components/vacuum/conftest.py @@ -1,13 +1,28 @@ """Fixtures for Vacuum platform tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch import pytest -from homeassistant.config_entries import ConfigFlow +from homeassistant.components.vacuum import DOMAIN as VACUUM_DOMAIN, VacuumEntityFeature +from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, frame +from homeassistant.helpers.entity_platform import AddEntitiesCallback -from tests.common import mock_config_flow, mock_platform +from . import MockVacuum + +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + mock_config_flow, + mock_integration, + mock_platform, +) + +TEST_DOMAIN = "test" class MockFlow(ConfigFlow): @@ -17,7 +32,94 @@ class MockFlow(ConfigFlow): @pytest.fixture def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: """Mock config flow.""" - mock_platform(hass, "test.config_flow") + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - with mock_config_flow("test", MockFlow): + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(name="supported_features") +async def vacuum_supported_features() -> VacuumEntityFeature: + """Return the supported features for the test vacuum entity.""" + return ( + VacuumEntityFeature.PAUSE + | VacuumEntityFeature.STOP + | VacuumEntityFeature.RETURN_HOME + | VacuumEntityFeature.FAN_SPEED + | VacuumEntityFeature.BATTERY + | VacuumEntityFeature.CLEAN_SPOT + | VacuumEntityFeature.MAP + | VacuumEntityFeature.STATE + | VacuumEntityFeature.START + ) + + +@pytest.fixture(name="mock_vacuum_entity") +async def setup_vacuum_platform_test_entity( + hass: HomeAssistant, + config_flow_fixture: None, + entity_registry: er.EntityRegistry, + supported_features: VacuumEntityFeature, +) -> MagicMock: + """Set up vacuum entity using an entity platform.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [VACUUM_DOMAIN] + ) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + ), + ) + + entity = MockVacuum( + supported_features=supported_features, + ) + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test vacuum platform via config entry.""" + async_add_entities([entity]) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{VACUUM_DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + + return entity + + +@pytest.fixture(name="mock_as_custom_component") +async def mock_frame(hass: HomeAssistant) -> AsyncGenerator[None]: + """Mock frame.""" + with patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=frame.IntegrationFrame( + custom_integration=True, + integration="alarm_control_panel", + module="test_init.py", + relative_filename="test_init.py", + frame=frame.get_current_frame(), + ), + ): yield diff --git a/tests/components/vacuum/test_device_condition.py b/tests/components/vacuum/test_device_condition.py index 9a2a67f7141..5a1b1fea7de 100644 --- a/tests/components/vacuum/test_device_condition.py +++ b/tests/components/vacuum/test_device_condition.py @@ -5,12 +5,7 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.vacuum import ( - DOMAIN, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, -) +from homeassistant.components.vacuum import DOMAIN, VacuumActivity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -122,7 +117,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -174,7 +169,7 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_docked - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() @@ -182,7 +177,7 @@ async def test_if_state( assert service_calls[1].data["some"] == "is_cleaning - event - test_event1" # Returning means it's still cleaning - hass.states.async_set(entry.entity_id, STATE_RETURNING) + hass.states.async_set(entry.entity_id, VacuumActivity.RETURNING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() @@ -207,7 +202,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) assert await async_setup_component( hass, diff --git a/tests/components/vacuum/test_device_trigger.py b/tests/components/vacuum/test_device_trigger.py index c186bd4d9eb..3a0cbafb4a1 100644 --- a/tests/components/vacuum/test_device_trigger.py +++ b/tests/components/vacuum/test_device_trigger.py @@ -7,7 +7,7 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.vacuum import DOMAIN, STATE_CLEANING, STATE_DOCKED +from homeassistant.components.vacuum import DOMAIN, VacuumActivity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -188,7 +188,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -238,7 +238,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is cleaning - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -247,7 +247,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is docked - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -273,7 +273,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -304,7 +304,7 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is cleaning - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -330,7 +330,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -365,7 +365,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index d03f1d28b58..8babd9fa265 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -5,12 +5,13 @@ from __future__ import annotations from enum import Enum from types import ModuleType from typing import Any +from unittest.mock import patch import pytest from homeassistant.components import vacuum from homeassistant.components.vacuum import ( - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -19,19 +20,19 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import frame from . import MockVacuum, help_async_setup_entry_init, help_async_unload_entry +from .common import async_start from tests.common import ( MockConfigEntry, + MockEntity, MockModule, help_test_all, import_and_test_deprecated_constant_enum, @@ -72,14 +73,33 @@ def test_deprecated_constants( ) +@pytest.mark.parametrize( + ("enum", "constant_prefix"), _create_tuples(vacuum.VacuumActivity, "STATE_") +) +@pytest.mark.parametrize( + "module", + [vacuum], +) +def test_deprecated_constants_for_state( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + module: ModuleType, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, module, enum, constant_prefix, "2026.1" + ) + + @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_CLEAN_SPOT, STATE_CLEANING), - (SERVICE_PAUSE, STATE_PAUSED), - (SERVICE_RETURN_TO_BASE, STATE_RETURNING), - (SERVICE_START, STATE_CLEANING), - (SERVICE_STOP, STATE_IDLE), + (SERVICE_CLEAN_SPOT, VacuumActivity.CLEANING), + (SERVICE_PAUSE, VacuumActivity.PAUSED), + (SERVICE_RETURN_TO_BASE, VacuumActivity.RETURNING), + (SERVICE_START, VacuumActivity.CLEANING), + (SERVICE_STOP, VacuumActivity.IDLE), ], ) async def test_state_services( @@ -101,18 +121,20 @@ async def test_state_services( async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, service, {"entity_id": mock_vacuum.entity_id}, blocking=True, ) - vacuum_state = hass.states.get(mock_vacuum.entity_id) + activity = hass.states.get(mock_vacuum.entity_id) - assert vacuum_state.state == expected_state + assert activity.state == expected_state async def test_fan_speed(hass: HomeAssistant, config_flow_fixture: None) -> None: @@ -132,14 +154,16 @@ async def test_fan_speed(hass: HomeAssistant, config_flow_fixture: None) -> None async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": mock_vacuum.entity_id, "fan_speed": "high"}, blocking=True, @@ -178,11 +202,13 @@ async def test_locate(hass: HomeAssistant, config_flow_fixture: None) -> None: async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_LOCATE, {"entity_id": mock_vacuum.entity_id}, blocking=True, @@ -227,11 +253,13 @@ async def test_send_command(hass: HomeAssistant, config_flow_fixture: None) -> N async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, { "entity_id": mock_vacuum.entity_id, @@ -278,3 +306,178 @@ async def test_supported_features_compat(hass: HomeAssistant) -> None: "fan_speed_list": ["silent", "normal", "pet hair"] } assert entity._deprecated_supported_features_reported + + +async def test_vacuum_not_log_deprecated_state_warning( + hass: HomeAssistant, + mock_vacuum_entity: MockVacuum, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test correctly using activity doesn't log issue or raise repair.""" + state = hass.states.get(mock_vacuum_entity.entity_id) + assert state is not None + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_vacuum_log_deprecated_state_warning_using_state_prop( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using state property does log issue and raise repair.""" + + class MockLegacyVacuum(MockVacuum): + """Mocked vacuum entity.""" + + @property + def state(self) -> str: + """Return the state of the entity.""" + return VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_vacuum_log_deprecated_state_warning_using_attr_state_attr( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using _attr_state attribute does log issue and raise repair.""" + + class MockLegacyVacuum(MockVacuum): + """Mocked vacuum entity.""" + + def start(self) -> None: + """Start cleaning.""" + self._attr_state = VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text + ) + + await async_start(hass, entity.entity_id) + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + in caplog.text + ) + caplog.clear() + await async_start(hass, entity.entity_id) + # Test we only log once + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_deprecated_state_does_not_break_state( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test using _attr_state attribute does not break state.""" + + class MockLegacyVacuum(MockEntity, StateVacuumEntity): + """Mocked vacuum entity.""" + + _attr_supported_features = VacuumEntityFeature.STATE | VacuumEntityFeature.START + + def __init__(self, **values: Any) -> None: + """Initialize a mock vacuum entity.""" + super().__init__(**values) + self._attr_state = VacuumActivity.DOCKED + + def start(self) -> None: + """Start cleaning.""" + self._attr_state = VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "docked" + + await hass.services.async_call( + VACUUM_DOMAIN, + SERVICE_START, + { + "entity_id": entity.entity_id, + }, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "cleaning" diff --git a/tests/components/vacuum/test_reproduce_state.py b/tests/components/vacuum/test_reproduce_state.py index ff8da28e98c..dc5d81e8f08 100644 --- a/tests/components/vacuum/test_reproduce_state.py +++ b/tests/components/vacuum/test_reproduce_state.py @@ -9,18 +9,9 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, -) -from homeassistant.const import ( - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_IDLE, - STATE_OFF, - STATE_ON, - STATE_PAUSED, + VacuumActivity, ) +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -39,11 +30,11 @@ async def test_reproducing_states( hass.states.async_set( "vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_LOW} ) - hass.states.async_set("vacuum.entity_cleaning", STATE_CLEANING, {}) - hass.states.async_set("vacuum.entity_docked", STATE_DOCKED, {}) - hass.states.async_set("vacuum.entity_idle", STATE_IDLE, {}) - hass.states.async_set("vacuum.entity_returning", STATE_RETURNING, {}) - hass.states.async_set("vacuum.entity_paused", STATE_PAUSED, {}) + hass.states.async_set("vacuum.entity_cleaning", VacuumActivity.CLEANING, {}) + hass.states.async_set("vacuum.entity_docked", VacuumActivity.DOCKED, {}) + hass.states.async_set("vacuum.entity_idle", VacuumActivity.IDLE, {}) + hass.states.async_set("vacuum.entity_returning", VacuumActivity.RETURNING, {}) + hass.states.async_set("vacuum.entity_paused", VacuumActivity.PAUSED, {}) turn_on_calls = async_mock_service(hass, "vacuum", SERVICE_TURN_ON) turn_off_calls = async_mock_service(hass, "vacuum", SERVICE_TURN_OFF) @@ -60,11 +51,11 @@ async def test_reproducing_states( State("vacuum.entity_off", STATE_OFF), State("vacuum.entity_on", STATE_ON), State("vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_LOW}), - State("vacuum.entity_cleaning", STATE_CLEANING), - State("vacuum.entity_docked", STATE_DOCKED), - State("vacuum.entity_idle", STATE_IDLE), - State("vacuum.entity_returning", STATE_RETURNING), - State("vacuum.entity_paused", STATE_PAUSED), + State("vacuum.entity_cleaning", VacuumActivity.CLEANING), + State("vacuum.entity_docked", VacuumActivity.DOCKED), + State("vacuum.entity_idle", VacuumActivity.IDLE), + State("vacuum.entity_returning", VacuumActivity.RETURNING), + State("vacuum.entity_paused", VacuumActivity.PAUSED), ], ) @@ -95,11 +86,11 @@ async def test_reproducing_states( State("vacuum.entity_off", STATE_ON), State("vacuum.entity_on", STATE_OFF), State("vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_HIGH}), - State("vacuum.entity_cleaning", STATE_PAUSED), - State("vacuum.entity_docked", STATE_CLEANING), - State("vacuum.entity_idle", STATE_DOCKED), - State("vacuum.entity_returning", STATE_CLEANING), - State("vacuum.entity_paused", STATE_IDLE), + State("vacuum.entity_cleaning", VacuumActivity.PAUSED), + State("vacuum.entity_docked", VacuumActivity.CLEANING), + State("vacuum.entity_idle", VacuumActivity.DOCKED), + State("vacuum.entity_returning", VacuumActivity.CLEANING), + State("vacuum.entity_paused", VacuumActivity.IDLE), # Should not raise State("vacuum.non_existing", STATE_ON), ], diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 76321a1a0a8..e58f21e387b 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -21,8 +21,7 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_ERROR, + VacuumActivity, ) from homeassistant.components.xiaomi_miio.const import ( CONF_FLOW_TYPE, @@ -264,7 +263,7 @@ async def test_xiaomi_vacuum_services( # Check state attributes state = hass.states.get(entity_id) - assert state.state == STATE_ERROR + assert state.state == VacuumActivity.ERROR assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204 assert state.attributes.get(ATTR_ERROR) == "Error message" assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-80" @@ -450,7 +449,7 @@ async def test_xiaomi_specific_services( # Check state attributes state = hass.states.get(entity_id) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204 assert state.attributes.get(ATTR_ERROR) is None assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-30" From 773ad6529ce211508b80312565ab4084cdf846c5 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 6 Dec 2024 12:22:05 +0100 Subject: [PATCH 0304/1198] Bump deebot-client to 9.2.0 (#132467) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 546aba01d90..ad154b8f284 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.1.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b4a662e8d91..1d244f28316 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -735,7 +735,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.1.0 +deebot-client==9.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1710b83fe69..45c63376bb9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -625,7 +625,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==9.1.0 +deebot-client==9.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 0d1abc31b5a10a87cfaa5a5f72a98bb9ec677ed1 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 6 Dec 2024 12:22:42 +0100 Subject: [PATCH 0305/1198] Update frontend to 20241127.5 (#132475) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 97a67cbc082..b8033f3f1fd 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.4"] + "requirements": ["home-assistant-frontend==20241127.5"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1bef0eb6454..cf23e058d78 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 1d244f28316..ff2fea84fe0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 # homeassistant.components.conversation home-assistant-intents==2024.12.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 45c63376bb9..e01193c0cda 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 # homeassistant.components.conversation home-assistant-intents==2024.12.4 From 4b4c886438e3036a9ca09aee4b850465f91f809d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 12:23:07 +0100 Subject: [PATCH 0306/1198] Bump samsungtvws to 2.7.2 (#132474) --- homeassistant/components/samsungtv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 041e9b8fe9b..1a6b5ed5313 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -37,7 +37,7 @@ "requirements": [ "getmac==0.9.4", "samsungctl[websocket]==0.7.1", - "samsungtvws[async,encrypted]==2.7.1", + "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", "async-upnp-client==0.41.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index ff2fea84fe0..dfdd11f26ce 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2610,7 +2610,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.1 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e01193c0cda..eba507b39e6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2089,7 +2089,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.1 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 From 1a0a2ebdb1c6584c94ce6d2492fcc5d212cdadd3 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 6 Dec 2024 14:27:52 +0000 Subject: [PATCH 0307/1198] Bump tplink python-kasa dependency to 0.8.1 (#132472) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 3f19f50cdb6..6ce46c0d488 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,5 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "requirements": ["python-kasa[speedups]==0.8.0"] + "requirements": ["python-kasa[speedups]==0.8.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index dfdd11f26ce..2fdfbea31ad 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2362,7 +2362,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.0 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay python-linkplay==0.0.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eba507b39e6..ea5d92841fd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1892,7 +1892,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.0 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay python-linkplay==0.0.20 From 35438f65e5a36564e63205963845f5f431e360ed Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 6 Dec 2024 06:54:21 -0800 Subject: [PATCH 0308/1198] Update exception handling for python3.13 for getpass.getuser() (#132449) * Update exception handling for python3.13 for getpass.getuser() * Add comment Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Cleanup trailing space --------- Co-authored-by: Franck Nijhof Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/helpers/system_info.py | 5 ++++- tests/helpers/test_system_info.py | 9 ++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index df4c45cd5ed..53866428332 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -71,7 +71,10 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: try: info_object["user"] = cached_get_user() - except KeyError: + except (KeyError, OSError): + # OSError on python >= 3.13, KeyError on python < 3.13 + # KeyError can be removed when 3.12 support is dropped + # see https://docs.python.org/3/whatsnew/3.13.html info_object["user"] = None if platform.system() == "Darwin": diff --git a/tests/helpers/test_system_info.py b/tests/helpers/test_system_info.py index 16b5b8b652b..2c4b95302fc 100644 --- a/tests/helpers/test_system_info.py +++ b/tests/helpers/test_system_info.py @@ -93,10 +93,9 @@ async def test_container_installationtype(hass: HomeAssistant) -> None: assert info["installation_type"] == "Unsupported Third Party Container" -async def test_getuser_keyerror(hass: HomeAssistant) -> None: - """Test getuser keyerror.""" - with patch( - "homeassistant.helpers.system_info.cached_get_user", side_effect=KeyError - ): +@pytest.mark.parametrize("error", [KeyError, OSError]) +async def test_getuser_oserror(hass: HomeAssistant, error: Exception) -> None: + """Test getuser oserror.""" + with patch("homeassistant.helpers.system_info.cached_get_user", side_effect=error): info = await async_get_system_info(hass) assert info["user"] is None From 20e09132867a103d231cfd420321b15450f7f754 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 6 Dec 2024 16:58:09 +0100 Subject: [PATCH 0309/1198] Update frontend to 20241127.6 (#132494) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index b8033f3f1fd..e68b9312081 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.5"] + "requirements": ["home-assistant-frontend==20241127.6"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cf23e058d78..34974b5e146 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 2fdfbea31ad..4185c4be60c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 # homeassistant.components.conversation home-assistant-intents==2024.12.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ea5d92841fd..46d84f17fe0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 # homeassistant.components.conversation home-assistant-intents==2024.12.4 From 7630ea4f096ebe660f2e0a4b218a9bf07ff5eedd Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 6 Dec 2024 07:58:48 -0800 Subject: [PATCH 0310/1198] Fix google tasks due date timezone handling (#132498) --- homeassistant/components/google_tasks/todo.py | 10 +++-- .../google_tasks/snapshots/test_todo.ambr | 31 ++++++++++++++- tests/components/google_tasks/test_todo.py | 38 ++++++++++++++++++- 3 files changed, 73 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 5196f89728d..86cb5e09300 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import date, datetime, timedelta +from datetime import UTC, date, datetime, timedelta from typing import Any, cast from homeassistant.components.todo import ( @@ -39,8 +39,10 @@ def _convert_todo_item(item: TodoItem) -> dict[str, str | None]: else: result["status"] = TodoItemStatus.NEEDS_ACTION if (due := item.due) is not None: - # due API field is a timestamp string, but with only date resolution - result["due"] = dt_util.start_of_local_day(due).isoformat() + # due API field is a timestamp string, but with only date resolution. + # The time portion of the date is always discarded by the API, so we + # always set to UTC. + result["due"] = dt_util.start_of_local_day(due).replace(tzinfo=UTC).isoformat() else: result["due"] = None result["notes"] = item.description @@ -51,6 +53,8 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: """Convert tasks API items into a TodoItem.""" due: date | None = None if (due_str := item.get("due")) is not None: + # Due dates are returned always in UTC so we only need to + # parse the date portion which will be interpreted as a a local date. due = datetime.fromisoformat(due_str).date() return TodoItem( summary=item["title"], diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index 76611ba4a31..f32441354fc 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -15,7 +15,7 @@ ) # --- # name: test_create_todo_list_item[due].1 - '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_create_todo_list_item[summary] tuple( @@ -137,7 +137,7 @@ ) # --- # name: test_partial_update[due_date].1 - '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_partial_update[empty_description] tuple( @@ -166,6 +166,33 @@ # name: test_partial_update_status[api_responses0].1 '{"title": "Water", "status": "needsAction", "due": null, "notes": null}' # --- +# name: test_update_due_date[api_responses0-America/Regina] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-America/Regina].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-UTC] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-UTC].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- # name: test_update_todo_list_item[api_responses0] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index b0ee135d4a9..c5ecc0ca2cf 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -239,6 +239,7 @@ def mock_http_response(response_handler: list | Callable) -> Mock: yield mock_response +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) @pytest.mark.parametrize( "api_responses", [ @@ -251,7 +252,7 @@ def mock_http_response(response_handler: list | Callable) -> Mock: "title": "Task 1", "status": "needsAction", "position": "0000000000000001", - "due": "2023-11-18T00:00:00+00:00", + "due": "2023-11-18T00:00:00Z", }, { "id": "task-2", @@ -271,8 +272,10 @@ async def test_get_items( integration_setup: Callable[[], Awaitable[bool]], hass_ws_client: WebSocketGenerator, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + timezone: str, ) -> None: """Test getting todo list items.""" + await hass.config.async_set_time_zone(timezone) assert await integration_setup() @@ -484,6 +487,39 @@ async def test_update_todo_list_item( assert call.kwargs.get("body") == snapshot +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) +@pytest.mark.parametrize("api_responses", [UPDATE_API_RESPONSES]) +async def test_update_due_date( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Any, + snapshot: SnapshotAssertion, + timezone: str, +) -> None: + """Test for updating the due date of a To-do item and timezone.""" + await hass.config.async_set_time_zone(timezone) + + assert await integration_setup() + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_DUE_DATE: "2024-12-5"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, + blocking=True, + ) + assert len(mock_http_response.call_args_list) == 4 + call = mock_http_response.call_args_list[2] + assert call + assert call.args == snapshot + assert call.kwargs.get("body") == snapshot + + @pytest.mark.parametrize( "api_responses", [ From 4de179c4c115a4b64f33bdfe534ea2d8be51eb00 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 18:43:13 +0100 Subject: [PATCH 0311/1198] Bump codecov/codecov-action from 5.0.7 to 5.1.1 (#132455) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Martin Hjelmare --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 43bdc7a671b..9d6f207382d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1273,7 +1273,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v5.0.7 + uses: codecov/codecov-action@v5.1.1 with: fail_ci_if_error: true flags: full-suite @@ -1411,7 +1411,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v5.0.7 + uses: codecov/codecov-action@v5.1.1 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} From 49621aedb0a95c4ac0c0edc3aa28e9774b3cd294 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:22:48 +0100 Subject: [PATCH 0312/1198] Set parallel updates in Bring integration (#132504) --- homeassistant/components/bring/quality_scale.yaml | 2 +- homeassistant/components/bring/todo.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml index b99c1ed24a9..5d47a3577cc 100644 --- a/homeassistant/components/bring/quality_scale.yaml +++ b/homeassistant/components/bring/quality_scale.yaml @@ -35,7 +35,7 @@ rules: log-when-unavailable: status: done comment: handled by coordinator - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: done diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index 319aedc6b80..c53b5788b68 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -34,6 +34,8 @@ from .const import ( from .coordinator import BringData, BringDataUpdateCoordinator from .entity import BringBaseEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, From 3c06fe1e21557cb7a46dd1def0d22c0cff33e144 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:25:17 +0100 Subject: [PATCH 0313/1198] Move light constants to separate module (#132473) --- homeassistant/components/light/__init__.py | 67 ++++-------------- homeassistant/components/light/const.py | 68 +++++++++++++++++++ .../components/light/device_action.py | 3 +- .../components/light/device_condition.py | 2 +- .../components/light/device_trigger.py | 2 +- homeassistant/components/light/intent.py | 3 +- .../components/light/reproduce_state.py | 3 +- 7 files changed, 86 insertions(+), 62 deletions(-) create mode 100644 homeassistant/components/light/const.py diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 1a848232128..60ea34cc754 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -5,8 +5,6 @@ from __future__ import annotations from collections.abc import Iterable import csv import dataclasses -from datetime import timedelta -from enum import IntFlag, StrEnum from functools import partial import logging import os @@ -37,24 +35,22 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass import homeassistant.util.color as color_util -from homeassistant.util.hass_dict import HassKey -DOMAIN = "light" -DATA_COMPONENT: HassKey[EntityComponent[LightEntity]] = HassKey(DOMAIN) +from .const import ( # noqa: F401 + COLOR_MODES_BRIGHTNESS, + COLOR_MODES_COLOR, + DATA_COMPONENT, + DATA_PROFILES, + DOMAIN, + SCAN_INTERVAL, + VALID_COLOR_MODES, + ColorMode, + LightEntityFeature, +) + ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE -SCAN_INTERVAL = timedelta(seconds=30) - -DATA_PROFILES: HassKey[Profiles] = HassKey(f"{DOMAIN}_profiles") - - -class LightEntityFeature(IntFlag): - """Supported features of the light entity.""" - - EFFECT = 4 - FLASH = 8 - TRANSITION = 32 # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. @@ -83,26 +79,6 @@ ATTR_COLOR_MODE = "color_mode" # List of color modes supported by the light ATTR_SUPPORTED_COLOR_MODES = "supported_color_modes" - -class ColorMode(StrEnum): - """Possible light color modes.""" - - UNKNOWN = "unknown" - """Ambiguous color mode""" - ONOFF = "onoff" - """Must be the only supported mode""" - BRIGHTNESS = "brightness" - """Must be the only supported mode""" - COLOR_TEMP = "color_temp" - HS = "hs" - XY = "xy" - RGB = "rgb" - RGBW = "rgbw" - RGBWW = "rgbww" - WHITE = "white" - """Must *NOT* be the only supported mode""" - - # These COLOR_MODE_* constants are deprecated as of Home Assistant 2022.5. # Please use the LightEntityFeature enum instead. _DEPRECATED_COLOR_MODE_UNKNOWN: Final = DeprecatedConstantEnum( @@ -122,25 +98,6 @@ _DEPRECATED_COLOR_MODE_RGBW: Final = DeprecatedConstantEnum(ColorMode.RGBW, "202 _DEPRECATED_COLOR_MODE_RGBWW: Final = DeprecatedConstantEnum(ColorMode.RGBWW, "2026.1") _DEPRECATED_COLOR_MODE_WHITE: Final = DeprecatedConstantEnum(ColorMode.WHITE, "2026.1") -VALID_COLOR_MODES = { - ColorMode.ONOFF, - ColorMode.BRIGHTNESS, - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ColorMode.RGB, - ColorMode.RGBW, - ColorMode.RGBWW, - ColorMode.WHITE, -} -COLOR_MODES_BRIGHTNESS = VALID_COLOR_MODES - {ColorMode.ONOFF} -COLOR_MODES_COLOR = { - ColorMode.HS, - ColorMode.RGB, - ColorMode.RGBW, - ColorMode.RGBWW, - ColorMode.XY, -} # mypy: disallow-any-generics diff --git a/homeassistant/components/light/const.py b/homeassistant/components/light/const.py new file mode 100644 index 00000000000..19b8734038e --- /dev/null +++ b/homeassistant/components/light/const.py @@ -0,0 +1,68 @@ +"""Provides constants for lights.""" + +from __future__ import annotations + +from datetime import timedelta +from enum import IntFlag, StrEnum +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import LightEntity, Profiles + +DOMAIN = "light" +DATA_COMPONENT: HassKey[EntityComponent[LightEntity]] = HassKey(DOMAIN) +SCAN_INTERVAL = timedelta(seconds=30) + +DATA_PROFILES: HassKey[Profiles] = HassKey(f"{DOMAIN}_profiles") + + +class LightEntityFeature(IntFlag): + """Supported features of the light entity.""" + + EFFECT = 4 + FLASH = 8 + TRANSITION = 32 + + +class ColorMode(StrEnum): + """Possible light color modes.""" + + UNKNOWN = "unknown" + """Ambiguous color mode""" + ONOFF = "onoff" + """Must be the only supported mode""" + BRIGHTNESS = "brightness" + """Must be the only supported mode""" + COLOR_TEMP = "color_temp" + HS = "hs" + XY = "xy" + RGB = "rgb" + RGBW = "rgbw" + RGBWW = "rgbww" + WHITE = "white" + """Must *NOT* be the only supported mode""" + + +VALID_COLOR_MODES = { + ColorMode.ONOFF, + ColorMode.BRIGHTNESS, + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.XY, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ColorMode.WHITE, +} +COLOR_MODES_BRIGHTNESS = VALID_COLOR_MODES - {ColorMode.ONOFF} +COLOR_MODES_COLOR = { + ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ColorMode.XY, +} diff --git a/homeassistant/components/light/device_action.py b/homeassistant/components/light/device_action.py index 45e9731c5b8..56bf7485e68 100644 --- a/homeassistant/components/light/device_action.py +++ b/homeassistant/components/light/device_action.py @@ -27,14 +27,13 @@ from . import ( ATTR_BRIGHTNESS_PCT, ATTR_BRIGHTNESS_STEP_PCT, ATTR_FLASH, - DOMAIN, FLASH_SHORT, VALID_BRIGHTNESS_PCT, VALID_FLASH, - LightEntityFeature, brightness_supported, get_supported_color_modes, ) +from .const import DOMAIN, LightEntityFeature # mypy: disallow-any-generics diff --git a/homeassistant/components/light/device_condition.py b/homeassistant/components/light/device_condition.py index f9bb7c30bd7..6dc702f8551 100644 --- a/homeassistant/components/light/device_condition.py +++ b/homeassistant/components/light/device_condition.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.condition import ConditionCheckerType from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from .const import DOMAIN # mypy: disallow-any-generics diff --git a/homeassistant/components/light/device_trigger.py b/homeassistant/components/light/device_trigger.py index 033ea75357e..1f6bfdbe6e9 100644 --- a/homeassistant/components/light/device_trigger.py +++ b/homeassistant/components/light/device_trigger.py @@ -10,7 +10,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from .const import DOMAIN TRIGGER_SCHEMA = vol.All( toggle_entity.TRIGGER_SCHEMA, diff --git a/homeassistant/components/light/intent.py b/homeassistant/components/light/intent.py index 458dbbde770..e496255029a 100644 --- a/homeassistant/components/light/intent.py +++ b/homeassistant/components/light/intent.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, intent import homeassistant.util.color as color_util -from . import ATTR_BRIGHTNESS_PCT, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, DOMAIN +from . import ATTR_BRIGHTNESS_PCT, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index 4024f2f84ba..c933b517ccc 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -28,9 +28,8 @@ from . import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, - DOMAIN, - ColorMode, ) +from .const import DOMAIN, ColorMode _LOGGER = logging.getLogger(__name__) From 23461d2cfd0ee3daebea2cf7ea21c7976b927cac Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:26:50 +0100 Subject: [PATCH 0314/1198] Add tests for media player support_* properties (#132458) --- tests/components/media_player/test_init.py | 40 ++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/tests/components/media_player/test_init.py b/tests/components/media_player/test_init.py index 47f0530f0ff..a45fa5b6668 100644 --- a/tests/components/media_player/test_init.py +++ b/tests/components/media_player/test_init.py @@ -97,6 +97,46 @@ def test_deprecated_constants_const( ) +@pytest.mark.parametrize( + "property_suffix", + [ + "play", + "pause", + "stop", + "seek", + "volume_set", + "volume_mute", + "previous_track", + "next_track", + "play_media", + "select_source", + "select_sound_mode", + "clear_playlist", + "shuffle_set", + "grouping", + ], +) +def test_support_properties(property_suffix: str) -> None: + """Test support_*** properties explicitly.""" + + all_features = media_player.MediaPlayerEntityFeature(653887) + feature = media_player.MediaPlayerEntityFeature[property_suffix.upper()] + + entity1 = MediaPlayerEntity() + entity1._attr_supported_features = media_player.MediaPlayerEntityFeature(0) + entity2 = MediaPlayerEntity() + entity2._attr_supported_features = all_features + entity3 = MediaPlayerEntity() + entity3._attr_supported_features = feature + entity4 = MediaPlayerEntity() + entity4._attr_supported_features = all_features - feature + + assert getattr(entity1, f"support_{property_suffix}") is False + assert getattr(entity2, f"support_{property_suffix}") is True + assert getattr(entity3, f"support_{property_suffix}") is True + assert getattr(entity4, f"support_{property_suffix}") is False + + async def test_get_image_http( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> None: From 1f8913d6cd45d3ef9992a6fafc33ea88a4dd8173 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:29:30 +0100 Subject: [PATCH 0315/1198] Remove deprecated supported features warning in LightEntity (#132371) --- homeassistant/components/light/__init__.py | 81 +---- tests/components/light/common.py | 3 +- tests/components/light/test_init.py | 347 +++------------------ 3 files changed, 50 insertions(+), 381 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 60ea34cc754..121732c918f 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -331,7 +331,7 @@ def filter_turn_off_params( if not params: return params - supported_features = light.supported_features_compat + supported_features = light.supported_features if LightEntityFeature.FLASH not in supported_features: params.pop(ATTR_FLASH, None) @@ -343,7 +343,7 @@ def filter_turn_off_params( def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[str, Any]: """Filter out params not supported by the light.""" - supported_features = light.supported_features_compat + supported_features = light.supported_features if LightEntityFeature.EFFECT not in supported_features: params.pop(ATTR_EFFECT, None) @@ -1006,7 +1006,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def capability_attributes(self) -> dict[str, Any]: """Return capability attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features supported_color_modes = self._light_internal_supported_color_modes if ColorMode.COLOR_TEMP in supported_color_modes: @@ -1168,12 +1168,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def state_attributes(self) -> dict[str, Any] | None: """Return state attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features supported_color_modes = self.supported_color_modes legacy_supported_color_modes = ( supported_color_modes or self._light_internal_supported_color_modes ) - supported_features_value = supported_features.value _is_on = self.is_on color_mode = self._light_internal_color_mode if _is_on else None @@ -1192,13 +1191,6 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_BRIGHTNESS] = self.brightness else: data[ATTR_BRIGHTNESS] = None - elif supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value: - # Backwards compatibility for ambiguous / incomplete states - # Warning is printed by supported_features_compat, remove in 2025.1 - if _is_on: - data[ATTR_BRIGHTNESS] = self.brightness - else: - data[ATTR_BRIGHTNESS] = None if color_temp_supported(supported_color_modes): if color_mode == ColorMode.COLOR_TEMP: @@ -1213,21 +1205,6 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): else: data[ATTR_COLOR_TEMP_KELVIN] = None data[ATTR_COLOR_TEMP] = None - elif supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value: - # Backwards compatibility - # Warning is printed by supported_features_compat, remove in 2025.1 - if _is_on: - color_temp_kelvin = self.color_temp_kelvin - data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin - if color_temp_kelvin: - data[ATTR_COLOR_TEMP] = ( - color_util.color_temperature_kelvin_to_mired(color_temp_kelvin) - ) - else: - data[ATTR_COLOR_TEMP] = None - else: - data[ATTR_COLOR_TEMP_KELVIN] = None - data[ATTR_COLOR_TEMP] = None if color_supported(legacy_supported_color_modes) or color_temp_supported( legacy_supported_color_modes @@ -1265,24 +1242,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): type(self), report_issue, ) - supported_features = self.supported_features_compat - supported_features_value = supported_features.value - supported_color_modes: set[ColorMode] = set() - - if supported_features_value & _DEPRECATED_SUPPORT_COLOR_TEMP.value: - supported_color_modes.add(ColorMode.COLOR_TEMP) - if supported_features_value & _DEPRECATED_SUPPORT_COLOR.value: - supported_color_modes.add(ColorMode.HS) - if ( - not supported_color_modes - and supported_features_value & _DEPRECATED_SUPPORT_BRIGHTNESS.value - ): - supported_color_modes = {ColorMode.BRIGHTNESS} - - if not supported_color_modes: - supported_color_modes = {ColorMode.ONOFF} - - return supported_color_modes + return {ColorMode.ONOFF} @cached_property def supported_color_modes(self) -> set[ColorMode] | set[str] | None: @@ -1294,37 +1254,6 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> LightEntityFeature: - """Return the supported features as LightEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is not int: # noqa: E721 - return features - new_features = LightEntityFeature(features) - if self._deprecated_supported_features_reported is True: - return new_features - self._deprecated_supported_features_reported = True - report_issue = self._suggest_report_issue() - report_issue += ( - " and reference " - "https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation" - ) - _LOGGER.warning( - ( - "Entity %s (%s) is using deprecated supported features" - " values which will be removed in HA Core 2025.1. Instead it should use" - " %s and color modes, please %s" - ), - self.entity_id, - type(self), - repr(new_features), - report_issue, - ) - return new_features - def __should_report_light_issue(self) -> bool: """Return if light color mode issues should be reported.""" if not self.platform: diff --git a/tests/components/light/common.py b/tests/components/light/common.py index ba095a03642..147f2336876 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -25,6 +25,7 @@ from homeassistant.components.light import ( DOMAIN, ColorMode, LightEntity, + LightEntityFeature, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -251,7 +252,7 @@ class MockLight(MockToggleEntity, LightEntity): _attr_max_color_temp_kelvin = 6500 _attr_min_color_temp_kelvin = 2000 - supported_features = 0 + supported_features = LightEntityFeature(0) brightness = None color_temp_kelvin = None diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 280ec569d4d..bf09774073b 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -1,7 +1,6 @@ """The tests for the Light component.""" from types import ModuleType -from typing import Literal from unittest.mock import MagicMock, mock_open, patch import pytest @@ -137,13 +136,8 @@ async def test_services( ent3.supported_color_modes = [light.ColorMode.HS] ent1.supported_features = light.LightEntityFeature.TRANSITION ent2.supported_features = ( - light.SUPPORT_COLOR - | light.LightEntityFeature.EFFECT - | light.LightEntityFeature.TRANSITION + light.LightEntityFeature.EFFECT | light.LightEntityFeature.TRANSITION ) - # Set color modes to none to trigger backwards compatibility in LightEntity - ent2.supported_color_modes = None - ent2.color_mode = None ent3.supported_features = ( light.LightEntityFeature.FLASH | light.LightEntityFeature.TRANSITION ) @@ -259,10 +253,7 @@ async def test_services( } _, data = ent2.last_call("turn_on") - assert data == { - light.ATTR_EFFECT: "fun_effect", - light.ATTR_HS_COLOR: (0, 0), - } + assert data == {light.ATTR_EFFECT: "fun_effect"} _, data = ent3.last_call("turn_on") assert data == {light.ATTR_FLASH: "short", light.ATTR_HS_COLOR: (71.059, 100)} @@ -346,8 +337,6 @@ async def test_services( _, data = ent2.last_call("turn_on") assert data == { - light.ATTR_BRIGHTNESS: 100, - light.ATTR_HS_COLOR: profile.hs_color, light.ATTR_TRANSITION: 1, } @@ -925,16 +914,12 @@ async def test_light_brightness_step(hass: HomeAssistant) -> None: setup_test_component_platform(hass, light.DOMAIN, entities) entity0 = entities[0] - entity0.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity0.supported_color_modes = None - entity0.color_mode = None + entity0.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity0.color_mode = light.ColorMode.BRIGHTNESS entity0.brightness = 100 entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None + entity1.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity1.color_mode = light.ColorMode.BRIGHTNESS entity1.brightness = 50 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -995,10 +980,8 @@ async def test_light_brightness_pct_conversion( setup_test_component_platform(hass, light.DOMAIN, mock_light_entities) entity = mock_light_entities[0] - entity.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity.supported_color_modes = None - entity.color_mode = None + entity.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity.color_mode = light.ColorMode.BRIGHTNESS entity.brightness = 100 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1147,167 +1130,6 @@ invalid_no_brightness_no_color_no_transition,,, assert invalid_profile_name not in profiles.data -@pytest.mark.parametrize("light_state", [STATE_ON, STATE_OFF]) -async def test_light_backwards_compatibility_supported_color_modes( - hass: HomeAssistant, light_state: Literal["on", "off"] -) -> None: - """Test supported_color_modes if not implemented by the entity.""" - entities = [ - MockLight("Test_0", light_state), - MockLight("Test_1", light_state), - MockLight("Test_2", light_state), - MockLight("Test_3", light_state), - MockLight("Test_4", light_state), - ] - - entity0 = entities[0] - - entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None - - entity2 = entities[2] - entity2.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR_TEMP - # Set color modes to none to trigger backwards compatibility in LightEntity - entity2.supported_color_modes = None - entity2.color_mode = None - - entity3 = entities[3] - entity3.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - - entity4 = entities[4] - entity4.supported_features = ( - light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR | light.SUPPORT_COLOR_TEMP - ) - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None - - setup_test_component_platform(hass, light.DOMAIN, entities) - - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) - await hass.async_block_till_done() - - state = hass.states.get(entity0.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.ONOFF] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.ONOFF - - state = hass.states.get(entity1.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.BRIGHTNESS] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity2.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity3.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [ - light.ColorMode.COLOR_TEMP, - light.ColorMode.HS, - ] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - -async def test_light_backwards_compatibility_color_mode(hass: HomeAssistant) -> None: - """Test color_mode if not implemented by the entity.""" - entities = [ - MockLight("Test_0", STATE_ON), - MockLight("Test_1", STATE_ON), - MockLight("Test_2", STATE_ON), - MockLight("Test_3", STATE_ON), - MockLight("Test_4", STATE_ON), - ] - - entity0 = entities[0] - - entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None - entity1.brightness = 100 - - entity2 = entities[2] - entity2.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR_TEMP - # Set color modes to none to trigger backwards compatibility in LightEntity - entity2.supported_color_modes = None - entity2.color_mode = None - entity2.color_temp_kelvin = 10000 - - entity3 = entities[3] - entity3.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - entity3.hs_color = (240, 100) - - entity4 = entities[4] - entity4.supported_features = ( - light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR | light.SUPPORT_COLOR_TEMP - ) - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None - entity4.hs_color = (240, 100) - entity4.color_temp_kelvin = 10000 - - setup_test_component_platform(hass, light.DOMAIN, entities) - - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) - await hass.async_block_till_done() - - state = hass.states.get(entity0.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.ONOFF] - assert state.attributes["color_mode"] == light.ColorMode.ONOFF - - state = hass.states.get(entity1.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.BRIGHTNESS] - assert state.attributes["color_mode"] == light.ColorMode.BRIGHTNESS - - state = hass.states.get(entity2.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] - assert state.attributes["color_mode"] == light.ColorMode.COLOR_TEMP - assert state.attributes["rgb_color"] == (202, 218, 255) - assert state.attributes["hs_color"] == (221.575, 20.9) - assert state.attributes["xy_color"] == (0.278, 0.287) - - state = hass.states.get(entity3.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - assert state.attributes["color_mode"] == light.ColorMode.HS - - state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [ - light.ColorMode.COLOR_TEMP, - light.ColorMode.HS, - ] - # hs color prioritized over color_temp, light should report mode ColorMode.HS - assert state.attributes["color_mode"] == light.ColorMode.HS - - async def test_light_service_call_rgbw(hass: HomeAssistant) -> None: """Test rgbw functionality in service calls.""" entity0 = MockLight("Test_rgbw", STATE_ON) @@ -1363,7 +1185,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_onoff", "supported_color_modes": [light.ColorMode.ONOFF], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), } state = hass.states.get(entity1.entity_id) @@ -1371,7 +1193,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_brightness", "supported_color_modes": [light.ColorMode.BRIGHTNESS], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, } @@ -1380,7 +1202,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_ct", "supported_color_modes": [light.ColorMode.COLOR_TEMP], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, "color_temp": None, "color_temp_kelvin": None, @@ -1398,7 +1220,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_rgbw", "supported_color_modes": [light.ColorMode.RGBW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, "rgbw_color": None, "hs_color": None, @@ -1429,7 +1251,7 @@ async def test_light_state_rgbw(hass: HomeAssistant) -> None: "color_mode": light.ColorMode.RGBW, "friendly_name": "Test_rgbw", "supported_color_modes": [light.ColorMode.RGBW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "hs_color": (240.0, 25.0), "rgb_color": (3, 3, 4), "rgbw_color": (1, 2, 3, 4), @@ -1460,7 +1282,7 @@ async def test_light_state_rgbww(hass: HomeAssistant) -> None: "color_mode": light.ColorMode.RGBWW, "friendly_name": "Test_rgbww", "supported_color_modes": [light.ColorMode.RGBWW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "hs_color": (60.0, 20.0), "rgb_color": (5, 5, 4), "rgbww_color": (1, 2, 3, 4, 5), @@ -1476,7 +1298,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: MockLight("Test_rgb", STATE_ON), MockLight("Test_xy", STATE_ON), MockLight("Test_all", STATE_ON), - MockLight("Test_legacy", STATE_ON), MockLight("Test_rgbw", STATE_ON), MockLight("Test_rgbww", STATE_ON), MockLight("Test_temperature", STATE_ON), @@ -1500,19 +1321,13 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: } entity4 = entities[4] - entity4.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None + entity4.supported_color_modes = {light.ColorMode.RGBW} entity5 = entities[5] - entity5.supported_color_modes = {light.ColorMode.RGBW} + entity5.supported_color_modes = {light.ColorMode.RGBWW} entity6 = entities[6] - entity6.supported_color_modes = {light.ColorMode.RGBWW} - - entity7 = entities[7] - entity7.supported_color_modes = {light.ColorMode.COLOR_TEMP} + entity6.supported_color_modes = {light.ColorMode.COLOR_TEMP} assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1534,15 +1349,12 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: ] state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - - state = hass.states.get(entity5.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBW] - state = hass.states.get(entity6.entity_id) + state = hass.states.get(entity5.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] - state = hass.states.get(entity7.entity_id) + state = hass.states.get(entity6.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] await hass.services.async_call( @@ -1557,7 +1369,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 100), @@ -1573,12 +1384,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 255, "hs_color": (240.0, 100.0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 255, "hs_color": (240.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbw_color": (0, 0, 255, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbww_color": (0, 0, 255, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 255, "color_temp_kelvin": 1739, "color_temp": 575} await hass.services.async_call( @@ -1593,7 +1402,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 0), @@ -1609,13 +1417,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 255, "hs_color": (240.0, 0.0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 255, "hs_color": (240.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbw_color": (0, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint of the white channels is warm, compensated by adding green + blue assert data == {"brightness": 255, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 255, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1630,7 +1436,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (128, 0, 0), @@ -1645,13 +1450,12 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: assert data == {"brightness": 128, "xy_color": (0.701, 0.299)} _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 0, 0)} + _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 0, 0, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 6279, "color_temp": 159} await hass.services.async_call( @@ -1666,7 +1470,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (255, 255, 255), @@ -1682,13 +1485,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 255, 255)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (0, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1703,7 +1504,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.1, 0.8), @@ -1719,12 +1519,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "xy_color": (0.1, 0.8)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (125.176, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (0, 255, 22, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (0, 255, 22, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 8645, "color_temp": 115} await hass.services.async_call( @@ -1739,7 +1537,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.323, 0.329), @@ -1755,13 +1552,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "xy_color": (0.323, 0.329)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.392)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (1, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 75, 140, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1776,7 +1571,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (128, 0, 0, 64), @@ -1792,13 +1586,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 43, 43)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 66.406)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 0, 0, 64)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (128, 0, 30, 117, 117)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3011, "color_temp": 332} await hass.services.async_call( @@ -1813,7 +1605,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (255, 255, 255, 255), @@ -1829,13 +1620,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 255, 255)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (255, 255, 255, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1850,7 +1639,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (128, 0, 0, 64, 32), @@ -1866,12 +1654,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 33, 26)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (4.118, 79.688)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 9, 0, 33)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 64, 32)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3845, "color_temp": 260} await hass.services.async_call( @@ -1886,7 +1672,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (255, 255, 255, 255, 255), @@ -1902,13 +1687,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 217, 185)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (27.429, 27.451)} - _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by decreasing green + blue assert data == {"brightness": 128, "rgbw_color": (96, 44, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (255, 255, 255, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3451, "color_temp": 289} @@ -1921,7 +1704,6 @@ async def test_light_service_call_color_conversion_named_tuple( MockLight("Test_rgb", STATE_ON), MockLight("Test_xy", STATE_ON), MockLight("Test_all", STATE_ON), - MockLight("Test_legacy", STATE_ON), MockLight("Test_rgbw", STATE_ON), MockLight("Test_rgbww", STATE_ON), ] @@ -1944,16 +1726,10 @@ async def test_light_service_call_color_conversion_named_tuple( } entity4 = entities[4] - entity4.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None + entity4.supported_color_modes = {light.ColorMode.RGBW} entity5 = entities[5] - entity5.supported_color_modes = {light.ColorMode.RGBW} - - entity6 = entities[6] - entity6.supported_color_modes = {light.ColorMode.RGBWW} + entity5.supported_color_modes = {light.ColorMode.RGBWW} assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1969,7 +1745,6 @@ async def test_light_service_call_color_conversion_named_tuple( entity3.entity_id, entity4.entity_id, entity5.entity_id, - entity6.entity_id, ], "brightness_pct": 25, "rgb_color": color_util.RGBColor(128, 0, 0), @@ -1985,10 +1760,8 @@ async def test_light_service_call_color_conversion_named_tuple( _, data = entity3.last_call("turn_on") assert data == {"brightness": 64, "rgb_color": (128, 0, 0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 64, "hs_color": (0.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 64, "rgbw_color": (128, 0, 0, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 64, "rgbww_color": (128, 0, 0, 0, 0)} @@ -2357,13 +2130,6 @@ async def test_light_state_color_conversion(hass: HomeAssistant) -> None: entity2.rgb_color = "Invalid" # Should be ignored entity2.xy_color = (0.1, 0.8) - entity3 = entities[3] - entity3.hs_color = (240, 100) - entity3.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -2385,12 +2151,6 @@ async def test_light_state_color_conversion(hass: HomeAssistant) -> None: assert state.attributes["rgb_color"] == (0, 255, 22) assert state.attributes["xy_color"] == (0.1, 0.8) - state = hass.states.get(entity3.entity_id) - assert state.attributes["color_mode"] == light.ColorMode.HS - assert state.attributes["hs_color"] == (240, 100) - assert state.attributes["rgb_color"] == (0, 0, 255) - assert state.attributes["xy_color"] == (0.136, 0.04) - async def test_services_filter_parameters( hass: HomeAssistant, @@ -2625,27 +2385,6 @@ def test_filter_supported_color_modes() -> None: assert light.filter_supported_color_modes(supported) == {light.ColorMode.BRIGHTNESS} -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockLightEntityEntity(light.LightEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockLightEntityEntity() - assert entity.supported_features_compat is light.LightEntityFeature(1) - assert "MockLightEntityEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "LightEntityFeature" in caplog.text - assert "and color modes" in caplog.text - caplog.clear() - assert entity.supported_features_compat is light.LightEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - @pytest.mark.parametrize( ("color_mode", "supported_color_modes", "warning_expected"), [ From 2fd3aac268fb7f57653a39c68f564a7b9e6d31f2 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:39:50 +0100 Subject: [PATCH 0316/1198] Add check for unique id mismatch in reauth of Bring integration (#132499) --- homeassistant/components/bring/config_flow.py | 1 + .../components/bring/quality_scale.yaml | 4 +-- homeassistant/components/bring/strings.json | 3 ++- tests/components/bring/test_config_flow.py | 26 +++++++++++++++++++ 4 files changed, 30 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bring/config_flow.py b/homeassistant/components/bring/config_flow.py index 606c280cf8d..b8ee9d1e6ae 100644 --- a/homeassistant/components/bring/config_flow.py +++ b/homeassistant/components/bring/config_flow.py @@ -85,6 +85,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: if not (errors := await self.validate_input(user_input)): + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( self.reauth_entry, data=user_input ) diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml index 5d47a3577cc..922306930f2 100644 --- a/homeassistant/components/bring/quality_scale.yaml +++ b/homeassistant/components/bring/quality_scale.yaml @@ -7,9 +7,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: Check uuid match in reauth + config-flow: done dependency-transparency: done docs-actions: done docs-high-level-description: todo diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index c8c12090118..7331f68a161 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -26,7 +26,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The login details correspond to a different account. Please re-authenticate to the previously configured account." } }, "entity": { diff --git a/tests/components/bring/test_config_flow.py b/tests/components/bring/test_config_flow.py index 8d215a5d3ee..93e86051a75 100644 --- a/tests/components/bring/test_config_flow.py +++ b/tests/components/bring/test_config_flow.py @@ -188,3 +188,29 @@ async def test_flow_reauth_error_and_recover( assert result["reason"] == "reauth_successful" assert len(hass.config_entries.async_entries()) == 1 + + +async def test_flow_reauth_unique_id_mismatch( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test we abort reauth if unique id mismatch.""" + + mock_bring_client.uuid = "11111111-11111111-11111111-11111111" + + bring_config_entry.add_to_hass(hass) + + result = await bring_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" From b30795e1f4433c58ce9db9cce49d10be4d1fc823 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sat, 7 Dec 2024 05:42:52 +1000 Subject: [PATCH 0317/1198] Add more models to Tesla Fleet (#132430) --- homeassistant/components/tesla_fleet/const.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index c70cc3291f7..9b3baf49bfb 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -33,6 +33,8 @@ MODELS = { "3": "Model 3", "X": "Model X", "Y": "Model Y", + "C": "Cybertruck", + "T": "Tesla Semi", } From 71f5f4bcddf8a98636f09eabb3219913fb9ddc45 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sat, 7 Dec 2024 05:43:37 +1000 Subject: [PATCH 0318/1198] Remove default OAuth implementation from Tesla Fleet (#132431) --- .../components/tesla_fleet/__init__.py | 7 -- .../components/tesla_fleet/config_flow.py | 6 -- homeassistant/components/tesla_fleet/oauth.py | 56 +------------ .../tesla_fleet/test_config_flow.py | 84 ++----------------- 4 files changed, 10 insertions(+), 143 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index e7030b568b3..bc837aa4cac 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -34,7 +34,6 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .config_flow import OAuth2FlowHandler from .const import DOMAIN, LOGGER, MODELS from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, @@ -42,7 +41,6 @@ from .coordinator import ( TeslaFleetVehicleDataCoordinator, ) from .models import TeslaFleetData, TeslaFleetEnergyData, TeslaFleetVehicleData -from .oauth import TeslaSystemImplementation PLATFORMS: Final = [ Platform.BINARY_SENSOR, @@ -73,11 +71,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - scopes: list[Scope] = [Scope(s) for s in token["scp"]] region: str = token["ou_code"].lower() - OAuth2FlowHandler.async_register_implementation( - hass, - TeslaSystemImplementation(hass), - ) - implementation = await async_get_config_entry_implementation(hass, entry) oauth_session = OAuth2Session(hass, entry, implementation) refresh_lock = asyncio.Lock() diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index ca36c6f511b..feeb5e74ca6 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -12,7 +12,6 @@ from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, LOGGER -from .oauth import TeslaSystemImplementation class OAuth2FlowHandler( @@ -31,11 +30,6 @@ class OAuth2FlowHandler( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow start.""" - self.async_register_implementation( - self.hass, - TeslaSystemImplementation(self.hass), - ) - return await super().async_step_user() async def async_oauth_create_entry( diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py index 8b43460436b..b25c5216009 100644 --- a/homeassistant/components/tesla_fleet/oauth.py +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -1,8 +1,5 @@ """Provide oauth implementations for the Tesla Fleet integration.""" -import base64 -import hashlib -import secrets from typing import Any from homeassistant.components.application_credentials import ( @@ -11,59 +8,8 @@ from homeassistant.components.application_credentials import ( ClientCredential, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow -from .const import AUTHORIZE_URL, CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL - - -class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation): - """Tesla Fleet API open source Oauth2 implementation.""" - - code_verifier: str - code_challenge: str - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize open source Oauth2 implementation.""" - - # Setup PKCE - self.code_verifier = secrets.token_urlsafe(32) - hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() - self.code_challenge = ( - base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") - ) - super().__init__( - hass, - DOMAIN, - CLIENT_ID, - "", - AUTHORIZE_URL, - TOKEN_URL, - ) - - @property - def name(self) -> str: - """Name of the implementation.""" - return "Built-in open source client ID" - - @property - def extra_authorize_data(self) -> dict[str, Any]: - """Extra data that needs to be appended to the authorize url.""" - return { - "prompt": "login", - "scope": " ".join(SCOPES), - "code_challenge": self.code_challenge, # PKCE - } - - async def async_resolve_external_data(self, external_data: Any) -> dict: - """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - "code_verifier": self.code_verifier, # PKCE - } - ) +from .const import AUTHORIZE_URL, SCOPES, TOKEN_URL class TeslaUserImplementation(AuthImplementation): diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index b49e090cd5d..6cb8c60ac0c 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -11,7 +11,6 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, - CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL, @@ -52,69 +51,18 @@ async def access_token(hass: HomeAssistant) -> str: ) -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - state = config_entry_oauth2_flow._encode_jwt( +@pytest.fixture(autouse=True) +async def create_credential(hass: HomeAssistant) -> None: + """Create a user credential.""" + # Create user application credential + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, + DOMAIN, + ClientCredential("user_client_id", "user_client_secret"), + "user_cred", ) - assert result["type"] is FlowResultType.EXTERNAL_STEP - - assert result["url"].startswith(AUTHORIZE_URL) - parsed_url = urlparse(result["url"]) - parsed_query = parse_qs(parsed_url.query) - assert parsed_query["response_type"][0] == "code" - assert parsed_query["client_id"][0] == CLIENT_ID - assert parsed_query["redirect_uri"][0] == REDIRECT - assert parsed_query["state"][0] == state - assert parsed_query["scope"][0] == " ".join(SCOPES) - assert parsed_query["code_challenge"][0] is not None - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == UNIQUE_ID - assert "result" in result - assert result["result"].unique_id == UNIQUE_ID - assert "token" in result["result"].data - assert result["result"].data["token"]["access_token"] == access_token - assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" - @pytest.mark.usefixtures("current_request_with_host") async def test_full_flow_user_cred( @@ -125,24 +73,10 @@ async def test_full_flow_user_cred( ) -> None: """Check full flow.""" - # Create user application credential - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("user_client_id", "user_client_secret"), - "user_cred", - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"implementation": "user_cred"} - ) assert result["type"] is FlowResultType.EXTERNAL_STEP state = config_entry_oauth2_flow._encode_jwt( From a661e60511ea3b50aee2b9380eea1876b88798d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:50:13 +0100 Subject: [PATCH 0319/1198] Bump actions/attest-build-provenance from 1.4.4 to 2.0.0 (#132332) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index f4e4de97e78..a6da4a05fa2 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4 + uses: actions/attest-build-provenance@619dbb2e03e0189af0c55118e7d3c5e129e99726 # v2.0.0 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} From 0111205f816b811cb005d683f1ada463ef634d5b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 20:54:05 +0100 Subject: [PATCH 0320/1198] Remove migration for tag (#132200) --- homeassistant/components/tag/__init__.py | 6 +---- tests/components/tag/snapshots/test_init.ambr | 23 +++++++++++++++++-- tests/components/tag/test_init.py | 9 ++++++++ 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index 95efae3d386..47c1d14ce60 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -106,7 +106,6 @@ class TagStore(Store[collection.SerializedStorageCollection]): for tag in data["items"]: # Copy name in tag store to the entity registry _create_entry(entity_registry, tag[CONF_ID], tag.get(CONF_NAME)) - tag["migrated"] = True if old_major_version == 1 and old_minor_version < 3: # Version 1.3 removes tag_id from the store for tag in data["items"]: @@ -178,10 +177,7 @@ class TagStorageCollection(collection.DictStorageCollection): We don't store the name, it's stored in the entity registry. """ - # Preserve the name of migrated entries to allow downgrading to 2024.5 - # without losing tag names. This can be removed in HA Core 2025.1. - migrated = item_id in self.data and "migrated" in self.data[item_id] - return {k: v for k, v in item.items() if k != CONF_NAME or migrated} + return {k: v for k, v in item.items() if k != CONF_NAME} class TagDictStorageCollectionWebsocket( diff --git a/tests/components/tag/snapshots/test_init.ambr b/tests/components/tag/snapshots/test_init.ambr index 29a9a2665b8..caa88b8ca9a 100644 --- a/tests/components/tag/snapshots/test_init.ambr +++ b/tests/components/tag/snapshots/test_init.ambr @@ -5,8 +5,6 @@ 'items': list([ dict({ 'id': 'test tag id', - 'migrated': True, - 'name': 'test tag name', }), dict({ 'device_id': 'some_scanner', @@ -23,3 +21,24 @@ 'version': 1, }) # --- +# name: test_tag_scanned + dict({ + 'data': dict({ + 'items': list([ + dict({ + 'id': 'test tag id', + }), + dict({ + 'id': 'test tag id 2', + }), + dict({ + 'device_id': 'some_scanner', + 'id': 'new tag', + }), + ]), + }), + 'key': 'tag', + 'minor_version': 3, + 'version': 1, + }) +# --- diff --git a/tests/components/tag/test_init.py b/tests/components/tag/test_init.py index 5c1e80c2d8b..ac862e59f2d 100644 --- a/tests/components/tag/test_init.py +++ b/tests/components/tag/test_init.py @@ -6,6 +6,7 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.tag import DOMAIN, _create_entry, async_scan_tag from homeassistant.const import CONF_NAME, STATE_UNKNOWN @@ -165,7 +166,9 @@ async def test_tag_scanned( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], storage_setup, + snapshot: SnapshotAssertion, ) -> None: """Test scanning tags.""" assert await storage_setup() @@ -205,6 +208,12 @@ async def test_tag_scanned( }, ] + # Trigger store + freezer.tick(11) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass_storage[DOMAIN] == snapshot(exclude=props("last_scanned")) + def track_changes(coll: collection.ObservableCollection): """Create helper to track changes in a collection.""" From e54d929573fc48a81e2ce00e8c5ceb949316cece Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 20:54:50 +0100 Subject: [PATCH 0321/1198] Small cleanup in sensibo (#132118) --- homeassistant/components/sensibo/climate.py | 35 +++++++-------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/sensibo/climate.py b/homeassistant/components/sensibo/climate.py index 181b02e84ad..5bf455c3631 100644 --- a/homeassistant/components/sensibo/climate.py +++ b/homeassistant/components/sensibo/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from bisect import bisect_left -from typing import TYPE_CHECKING, Any +from typing import Any import voluptuous as vol @@ -231,10 +231,9 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): @property def hvac_modes(self) -> list[HVACMode]: """Return the list of available hvac operation modes.""" - if TYPE_CHECKING: - assert self.device_data.hvac_modes - hvac_modes = [SENSIBO_TO_HA[mode] for mode in self.device_data.hvac_modes] - return hvac_modes if hvac_modes else [HVACMode.OFF] + if not self.device_data.hvac_modes: + return [HVACMode.OFF] + return [SENSIBO_TO_HA[mode] for mode in self.device_data.hvac_modes] @property def current_temperature(self) -> float | None: @@ -259,52 +258,42 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - target_temp: int | None = self.device_data.target_temp - return target_temp + return self.device_data.target_temp @property def target_temperature_step(self) -> float | None: """Return the supported step of target temperature.""" - target_temp_step: int = self.device_data.temp_step - return target_temp_step + return self.device_data.temp_step @property def fan_mode(self) -> str | None: """Return the fan setting.""" - fan_mode: str | None = self.device_data.fan_mode - return fan_mode + return self.device_data.fan_mode @property def fan_modes(self) -> list[str] | None: """Return the list of available fan modes.""" - if self.device_data.fan_modes: - return self.device_data.fan_modes - return None + return self.device_data.fan_modes @property def swing_mode(self) -> str | None: """Return the swing setting.""" - swing_mode: str | None = self.device_data.swing_mode - return swing_mode + return self.device_data.swing_mode @property def swing_modes(self) -> list[str] | None: """Return the list of available swing modes.""" - if self.device_data.swing_modes: - return self.device_data.swing_modes - return None + return self.device_data.swing_modes @property def min_temp(self) -> float: """Return the minimum temperature.""" - min_temp: int = self.device_data.temp_list[0] - return min_temp + return self.device_data.temp_list[0] @property def max_temp(self) -> float: """Return the maximum temperature.""" - max_temp: int = self.device_data.temp_list[-1] - return max_temp + return self.device_data.temp_list[-1] @property def available(self) -> bool: From 9771998415a5be3e419ada2e8547956acf105fd6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 20:55:34 +0100 Subject: [PATCH 0322/1198] Cache AST module parsing in hassfest (#132244) --- script/hassfest/__init__.py | 13 +++++++++++++ script/hassfest/config_schema.py | 5 +++-- script/hassfest/dependencies.py | 3 ++- .../config_entry_unloading.py | 3 ++- .../quality_scale_validation/diagnostics.py | 3 ++- .../hassfest/quality_scale_validation/discovery.py | 3 ++- .../reauthentication_flow.py | 3 ++- .../reconfiguration_flow.py | 3 ++- .../quality_scale_validation/runtime_data.py | 3 ++- .../quality_scale_validation/unique_config_entry.py | 3 ++- 10 files changed, 32 insertions(+), 10 deletions(-) diff --git a/script/hassfest/__init__.py b/script/hassfest/__init__.py index 2fa7997162f..c8c9aa9ef39 100644 --- a/script/hassfest/__init__.py +++ b/script/hassfest/__init__.py @@ -1 +1,14 @@ """Manifest validator.""" + +import ast +from functools import lru_cache +from pathlib import Path + + +@lru_cache +def ast_parse_module(file_path: Path) -> ast.Module: + """Parse a module. + + Cached to avoid parsing the same file for each plugin. + """ + return ast.parse(file_path.read_text()) diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 6b863ab9ecd..70dff1194bc 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -6,6 +6,7 @@ import ast from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN +from . import ast_parse_module from .model import Config, Integration CONFIG_SCHEMA_IGNORE = { @@ -60,7 +61,7 @@ def _validate_integration(config: Config, integration: Integration) -> None: # Virtual integrations don't have any implementation return - init = ast.parse(init_file.read_text()) + init = ast_parse_module(init_file) # No YAML Support if not _has_function( @@ -81,7 +82,7 @@ def _validate_integration(config: Config, integration: Integration) -> None: config_file = integration.path / "config.py" if config_file.is_file(): - config_module = ast.parse(config_file.read_text()) + config_module = ast_parse_module(config_file) if _has_function(config_module, ast.AsyncFunctionDef, "async_validate_config"): return diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 0c7f4f11a8c..62644e19c5e 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -10,6 +10,7 @@ from pathlib import Path from homeassistant.const import Platform from homeassistant.requirements import DISCOVERY_INTEGRATIONS +from . import ast_parse_module from .model import Config, Integration @@ -33,7 +34,7 @@ class ImportCollector(ast.NodeVisitor): self._cur_fil_dir = fil.relative_to(self.integration.path) self.referenced[self._cur_fil_dir] = set() try: - self.visit(ast.parse(fil.read_text())) + self.visit(ast_parse_module(fil)) except SyntaxError as e: e.add_note(f"File: {fil}") raise diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index 50f42752bf6..b25a72e427f 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/c import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -20,7 +21,7 @@ def validate(integration: Integration) -> list[str] | None: """Validate that the integration has a config flow.""" init_file = integration.path / "__init__.py" - init = ast.parse(init_file.read_text()) + init = ast_parse_module(init_file) if not _has_unload_entry_function(init): return [ diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py index 99f067d6500..d3ef38474f8 100644 --- a/script/hassfest/quality_scale_validation/diagnostics.py +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration DIAGNOSTICS_FUNCTIONS = { @@ -31,7 +32,7 @@ def validate(integration: Integration) -> list[str] | None: "(is missing diagnostics.py)", ] - diagnostics = ast.parse(diagnostics_file.read_text()) + diagnostics = ast_parse_module(diagnostics_file) if not _has_diagnostics_function(diagnostics): return [ diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py index d24005b6373..66a08456314 100644 --- a/script/hassfest/quality_scale_validation/discovery.py +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration MANIFEST_KEYS = [ @@ -49,7 +50,7 @@ def validate(integration: Integration) -> list[str] | None: return None # Fallback => check config_flow step - config_flow = ast.parse(config_flow_file.read_text()) + config_flow = ast_parse_module(config_flow_file) if not (_has_discovery_function(config_flow)): return [ f"Integration is missing one of {CONFIG_FLOW_STEPS} " diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py index 311f8a2429d..4ae8fed5696 100644 --- a/script/hassfest/quality_scale_validation/reauthentication_flow.py +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -20,7 +21,7 @@ def validate(integration: Integration) -> list[str] | None: """Validate that the integration has a reauthentication flow.""" config_flow_file = integration.path / "config_flow.py" - config_flow = ast.parse(config_flow_file.read_text()) + config_flow = ast_parse_module(config_flow_file) if not _has_step_reauth_function(config_flow): return [ diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py index de3b5dcba62..19192cb28d0 100644 --- a/script/hassfest/quality_scale_validation/reconfiguration_flow.py +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -20,7 +21,7 @@ def validate(integration: Integration) -> list[str] | None: """Validate that the integration has a reconfiguration flow.""" config_flow_file = integration.path / "config_flow.py" - config_flow = ast.parse(config_flow_file.read_text()) + config_flow = ast_parse_module(config_flow_file) if not _has_step_reconfigure_function(config_flow): return [ diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py index 765db43d1e3..c426496636b 100644 --- a/script/hassfest/quality_scale_validation/runtime_data.py +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -35,7 +36,7 @@ def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None def validate(integration: Integration) -> list[str] | None: """Validate correct use of ConfigEntry.runtime_data.""" init_file = integration.path / "__init__.py" - init = ast.parse(init_file.read_text()) + init = ast_parse_module(init_file) # Should not happen, but better to be safe if not (async_setup_entry := _get_setup_entry_function(init)): diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py index eaa879bb05e..bf9991d5635 100644 --- a/script/hassfest/quality_scale_validation/unique_config_entry.py +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -5,6 +5,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/u import ast +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -36,7 +37,7 @@ def validate(integration: Integration) -> list[str] | None: return None config_flow_file = integration.path / "config_flow.py" - config_flow = ast.parse(config_flow_file.read_text()) + config_flow = ast_parse_module(config_flow_file) if not ( _has_abort_entries_match(config_flow) From 40239945c1fbef878bcdb26451526d22f18d582f Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 21:01:41 +0100 Subject: [PATCH 0323/1198] Remove not needed name from yale_smart_alarm (#132204) --- .../components/yale_smart_alarm/__init__.py | 12 ++- .../yale_smart_alarm/alarm_control_panel.py | 3 +- .../yale_smart_alarm/config_flow.py | 6 +- .../components/yale_smart_alarm/entity.py | 4 +- tests/components/yale_smart_alarm/conftest.py | 66 ++++++++----- .../snapshots/test_alarm_control_panel.ambr | 10 +- .../snapshots/test_binary_sensor.ambr | 40 ++++---- .../snapshots/test_button.ambr | 10 +- .../yale_smart_alarm/test_button.py | 4 +- .../yale_smart_alarm/test_config_flow.py | 58 ++++------- .../yale_smart_alarm/test_coordinator.py | 19 ++-- .../components/yale_smart_alarm/test_init.py | 99 +++++++++++++++++++ 12 files changed, 211 insertions(+), 120 deletions(-) create mode 100644 tests/components/yale_smart_alarm/test_init.py diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index b3fcc28ad49..d67e136be4a 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_CODE +from homeassistant.const import CONF_CODE, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -42,6 +42,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bo LOGGER.debug("Migrating from version %s", entry.version) if entry.version == 1: + new_options = entry.options.copy() if config_entry_default_code := entry.options.get(CONF_CODE): entity_reg = er.async_get(hass) entries = er.async_entries_for_config_entry(entity_reg, entry.entry_id) @@ -52,12 +53,15 @@ async def async_migrate_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bo LOCK_DOMAIN, {CONF_DEFAULT_CODE: config_entry_default_code}, ) - new_options = entry.options.copy() del new_options[CONF_CODE] - hass.config_entries.async_update_entry(entry, options=new_options) + hass.config_entries.async_update_entry(entry, options=new_options, version=2) - hass.config_entries.async_update_entry(entry, version=2) + if entry.version == 2 and entry.minor_version == 1: + # Removes name from entry data + new_data = entry.data.copy() + del new_data[CONF_NAME] + hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2) LOGGER.debug("Migration to version %s successful", entry.version) diff --git a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py index 868b186be9d..8244d96064a 100644 --- a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py +++ b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py @@ -15,7 +15,6 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, AlarmControlPanelState, ) -from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -84,7 +83,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): translation_domain=DOMAIN, translation_key="set_alarm", translation_placeholders={ - "name": self.coordinator.config_entry.data[CONF_NAME], + "name": self.coordinator.config_entry.title, "error": str(error), }, ) from error diff --git a/homeassistant/components/yale_smart_alarm/config_flow.py b/homeassistant/components/yale_smart_alarm/config_flow.py index c71b7b33a08..3ceee367284 100644 --- a/homeassistant/components/yale_smart_alarm/config_flow.py +++ b/homeassistant/components/yale_smart_alarm/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback import homeassistant.helpers.config_validation as cv @@ -23,7 +23,6 @@ from .const import ( CONF_AREA_ID, CONF_LOCK_CODE_DIGITS, DEFAULT_AREA_ID, - DEFAULT_NAME, DOMAIN, YALE_BASE_ERRORS, ) @@ -67,6 +66,7 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Yale integration.""" VERSION = 2 + MINOR_VERSION = 2 @staticmethod @callback @@ -146,7 +146,6 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] - name = DEFAULT_NAME area = user_input.get(CONF_AREA_ID, DEFAULT_AREA_ID) errors = await self.hass.async_add_executor_job( @@ -161,7 +160,6 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): data={ CONF_USERNAME: username, CONF_PASSWORD: password, - CONF_NAME: name, CONF_AREA_ID: area, }, ) diff --git a/homeassistant/components/yale_smart_alarm/entity.py b/homeassistant/components/yale_smart_alarm/entity.py index 4020c93de4e..2610f54f0a9 100644 --- a/homeassistant/components/yale_smart_alarm/entity.py +++ b/homeassistant/components/yale_smart_alarm/entity.py @@ -2,7 +2,7 @@ from yalesmartalarmclient import YaleLock -from homeassistant.const import CONF_NAME, CONF_USERNAME +from homeassistant.const import CONF_USERNAME from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -61,7 +61,7 @@ class YaleAlarmEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): identifiers={(DOMAIN, coordinator.config_entry.data[CONF_USERNAME])}, manufacturer=MANUFACTURER, model=MODEL, - name=coordinator.config_entry.data[CONF_NAME], + name=coordinator.config_entry.title, connections={(CONNECTION_NETWORK_MAC, panel_info["mac"])}, sw_version=panel_info["version"], ) diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 7a7abcac67c..91c64c7a7a7 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -20,7 +20,6 @@ from tests.common import MockConfigEntry, load_fixture ENTRY_CONFIG = { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } OPTIONS_CONFIG = {"lock_code_digits": 6} @@ -35,51 +34,64 @@ async def patch_platform_constant() -> list[Platform]: @pytest.fixture async def load_config_entry( hass: HomeAssistant, - get_data: YaleSmartAlarmData, - get_all_data: YaleSmartAlarmData, + get_client: Mock, load_platforms: list[Platform], ) -> tuple[MockConfigEntry, Mock]: """Set up the Yale Smart Living integration in Home Assistant.""" with patch("homeassistant.components.yale_smart_alarm.PLATFORMS", load_platforms): config_entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], domain=DOMAIN, source=SOURCE_USER, data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", unique_id="username", - version=1, + version=2, + minor_version=2, ) config_entry.add_to_hass(hass) - - cycle = get_data.cycle["data"] - data = {"data": cycle["device_status"]} - with patch( "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", - autospec=True, - ) as mock_client_class: - client = mock_client_class.return_value - client.auth = Mock() - client.auth.get_authenticated = Mock(return_value=data) - client.auth.post_authenticated = Mock(return_value={"code": "000"}) - client.auth.put_authenticated = Mock(return_value={"code": "000"}) - client.lock_api = YaleDoorManAPI(client.auth) - locks = [ - YaleLock(device, lock_api=client.lock_api) - for device in cycle["device_status"] - if device["type"] == YaleLock.DEVICE_TYPE - ] - client.get_locks.return_value = locks - client.get_all.return_value = get_all_data - client.get_information.return_value = get_data - client.get_armed_status.return_value = YALE_STATE_ARM_FULL - + return_value=get_client, + ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - return (config_entry, client) + return (config_entry, get_client) + + +@pytest.fixture(name="get_client") +async def mock_client( + get_data: YaleSmartAlarmData, + get_all_data: YaleSmartAlarmData, +) -> Mock: + """Mock the Yale client.""" + cycle = get_data.cycle["data"] + data = {"data": cycle["device_status"]} + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + autospec=True, + ) as mock_client_class: + client = mock_client_class.return_value + client.auth = Mock() + client.auth.get_authenticated = Mock(return_value=data) + client.auth.post_authenticated = Mock(return_value={"code": "000"}) + client.auth.put_authenticated = Mock(return_value={"code": "000"}) + client.lock_api = YaleDoorManAPI(client.auth) + locks = [ + YaleLock(device, lock_api=client.lock_api) + for device in cycle["device_status"] + if device["type"] == YaleLock.DEVICE_TYPE + ] + client.get_locks.return_value = locks + client.get_all.return_value = get_all_data + client.get_information.return_value = get_data + client.get_armed_status.return_value = YALE_STATE_ARM_FULL + + return client @pytest.fixture(name="loaded_fixture", scope="package") diff --git a/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr b/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr index 749e62252f3..fcdb7baca03 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.yale_smart_alarm-entry] +# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.test_username-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'alarm_control_panel', 'entity_category': None, - 'entity_id': 'alarm_control_panel.yale_smart_alarm', + 'entity_id': 'alarm_control_panel.test_username', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -32,17 +32,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.yale_smart_alarm-state] +# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.test_username-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'changed_by': None, 'code_arm_required': False, 'code_format': None, - 'friendly_name': 'Yale Smart Alarm', + 'friendly_name': 'test-username', 'supported_features': , }), 'context': , - 'entity_id': 'alarm_control_panel.yale_smart_alarm', + 'entity_id': 'alarm_control_panel.test_username', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr index ed7e847439c..e519a880de9 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr @@ -281,7 +281,7 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_battery-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -293,7 +293,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_battery', + 'entity_id': 'binary_sensor.test_username_battery', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -314,21 +314,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_battery-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Battery', + 'friendly_name': 'test-username Battery', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_battery', + 'entity_id': 'binary_sensor.test_username_battery', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_jam-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_jam-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -340,7 +340,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_jam', + 'entity_id': 'binary_sensor.test_username_jam', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -361,21 +361,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_jam-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_jam-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Jam', + 'friendly_name': 'test-username Jam', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_jam', + 'entity_id': 'binary_sensor.test_username_jam', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_power_loss-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_power_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -387,7 +387,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_power_loss', + 'entity_id': 'binary_sensor.test_username_power_loss', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -408,21 +408,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_power_loss-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_power_loss-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Power loss', + 'friendly_name': 'test-username Power loss', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_power_loss', + 'entity_id': 'binary_sensor.test_username_power_loss', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_tamper-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_tamper-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -434,7 +434,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_tamper', + 'entity_id': 'binary_sensor.test_username_tamper', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -455,14 +455,14 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_tamper-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_tamper-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Tamper', + 'friendly_name': 'test-username Tamper', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_tamper', + 'entity_id': 'binary_sensor.test_username_tamper', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/snapshots/test_button.ambr b/tests/components/yale_smart_alarm/snapshots/test_button.ambr index 8abceb0affa..951caced170 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_button.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_button.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_button[load_platforms0][button.yale_smart_alarm_panic_button-entry] +# name: test_button[load_platforms0][button.test_username_panic_button-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.yale_smart_alarm_panic_button', + 'entity_id': 'button.test_username_panic_button', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -32,13 +32,13 @@ 'unit_of_measurement': None, }) # --- -# name: test_button[load_platforms0][button.yale_smart_alarm_panic_button-state] +# name: test_button[load_platforms0][button.test_username_panic_button-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Yale Smart Alarm Panic button', + 'friendly_name': 'test-username Panic button', }), 'context': , - 'entity_id': 'button.yale_smart_alarm_panic_button', + 'entity_id': 'button.test_username_panic_button', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/test_button.py b/tests/components/yale_smart_alarm/test_button.py index ad6074345d3..cb28e60ab22 100644 --- a/tests/components/yale_smart_alarm/test_button.py +++ b/tests/components/yale_smart_alarm/test_button.py @@ -37,7 +37,7 @@ async def test_button( BUTTON_DOMAIN, SERVICE_PRESS, { - ATTR_ENTITY_ID: "button.yale_smart_alarm_panic_button", + ATTR_ENTITY_ID: "button.test_username_panic_button", }, blocking=True, ) @@ -50,7 +50,7 @@ async def test_button( BUTTON_DOMAIN, SERVICE_PRESS, { - ATTR_ENTITY_ID: "button.yale_smart_alarm_panic_button", + ATTR_ENTITY_ID: "button.test_username_panic_button", }, blocking=True, ) diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index e5b59f79463..51106751f03 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError @@ -48,7 +48,6 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", } assert len(mock_setup_entry.mock_calls) == 1 @@ -112,7 +111,6 @@ async def test_form_invalid_auth( assert result2["data"] == { "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", } @@ -120,15 +118,16 @@ async def test_form_invalid_auth( async def test_reauth_flow(hass: HomeAssistant) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) @@ -159,7 +158,6 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: assert entry.data == { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } @@ -181,15 +179,16 @@ async def test_reauth_flow_error( ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) @@ -234,7 +233,6 @@ async def test_reauth_flow_error( assert entry.data == { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } @@ -242,15 +240,16 @@ async def test_reauth_flow_error( async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfigure config flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) @@ -281,7 +280,6 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert entry.data == { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "2", } @@ -289,27 +287,29 @@ async def test_reconfigure(hass: HomeAssistant) -> None: async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: """Test reconfigure config flow abort other username already exist.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) entry2 = MockConfigEntry( + title="other-username", domain=DOMAIN, unique_id="other-username", data={ "username": "other-username", "password": "test-password", - "name": "Yale Smart Alarm 2", "area_id": "1", }, version=2, + minor_version=2, ) entry2.add_to_hass(hass) @@ -362,7 +362,6 @@ async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: assert result["reason"] == "reconfigure_successful" assert entry.data == { "username": "other-new-username", - "name": "Yale Smart Alarm", "password": "test-password", "area_id": "1", } @@ -382,15 +381,16 @@ async def test_reconfigure_flow_error( ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) @@ -438,39 +438,17 @@ async def test_reconfigure_flow_error( assert result["reason"] == "reconfigure_successful" assert entry.data == { "username": "test-username", - "name": "Yale Smart Alarm", "password": "new-test-password", "area_id": "1", } -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow( + hass: HomeAssistant, + load_config_entry: tuple[MockConfigEntry, Mock], +) -> None: """Test options config flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-username", - data={ - "username": "test-username", - "password": "test-password", - "name": "Yale Smart Alarm", - "area_id": "1", - }, - version=2, - ) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value=True, - ), - patch( - "homeassistant.components.yale_smart_alarm.async_setup_entry", - return_value=True, - ), - ): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + entry = load_config_entry[0] result = await hass.config_entries.options.async_init(entry.entry_id) diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 386e4ad72f7..8d30e8ad21a 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -48,7 +48,8 @@ async def test_coordinator_setup_errors( options=OPTIONS_CONFIG, entry_id="1", unique_id="username", - version=1, + version=2, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -61,7 +62,7 @@ async def test_coordinator_setup_errors( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert not state @@ -74,7 +75,7 @@ async def test_coordinator_setup_and_update_errors( client = load_config_entry[1] - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() @@ -82,7 +83,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -90,7 +91,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -98,7 +99,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -106,7 +107,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -116,7 +117,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() @@ -124,5 +125,5 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_init.py b/tests/components/yale_smart_alarm/test_init.py new file mode 100644 index 00000000000..c499320c29c --- /dev/null +++ b/tests/components/yale_smart_alarm/test_init.py @@ -0,0 +1,99 @@ +"""Test for Yale Smart Alarm component Init.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.yale_smart_alarm.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import ENTRY_CONFIG, OPTIONS_CONFIG + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, + get_client: Mock, +) -> None: + """Test setup entry.""" + entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + entry_id="1", + unique_id="username", + version=2, + minor_version=2, + ) + entry.add_to_hass(hass) + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=get_client, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(entry.entry_id) + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_migrate_entry( + hass: HomeAssistant, + get_client: Mock, + entity_registry: er.EntityRegistry, +) -> None: + """Test migrate entry unique id.""" + config = { + "username": "test-username", + "password": "new-test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + } + options = {"lock_code_digits": 6, "code": "123456"} + entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], + domain=DOMAIN, + source=SOURCE_USER, + data=config, + options=options, + entry_id="1", + unique_id="username", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + lock = entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "1111", + config_entry=entry, + has_entity_name=True, + original_name="Device1", + ) + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=get_client, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.minor_version == 2 + assert entry.data == ENTRY_CONFIG + assert entry.options == OPTIONS_CONFIG + + lock_entity_id = entity_registry.async_get_entity_id(LOCK_DOMAIN, DOMAIN, "1111") + lock = entity_registry.async_get(lock_entity_id) + + assert lock.options == {"lock": {"default_code": "123456"}} From d26d483a2f503147255c5d77495d335c911ba5e4 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Dec 2024 21:06:56 +0100 Subject: [PATCH 0324/1198] Improve recorder util resolve_period (#132264) --- homeassistant/components/recorder/util.py | 13 ++++++------- tests/components/recorder/test_util.py | 21 +++++++++++++++++++++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 125b354211e..2e7ac0c092d 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -892,15 +892,14 @@ def resolve_period( start_time += timedelta(days=cal_offset * 7) end_time = start_time + timedelta(weeks=1) elif calendar_period == "month": - start_time = start_of_day.replace(day=28) - # This works for up to 48 months of offset - start_time = (start_time + timedelta(days=cal_offset * 31)).replace(day=1) + month_now = start_of_day.month + new_month = (month_now - 1 + cal_offset) % 12 + 1 + new_year = start_of_day.year + (month_now - 1 + cal_offset) // 12 + start_time = start_of_day.replace(year=new_year, month=new_month, day=1) end_time = (start_time + timedelta(days=31)).replace(day=1) else: # calendar_period = "year" - start_time = start_of_day.replace(month=12, day=31) - # This works for 100+ years of offset - start_time = (start_time + timedelta(days=cal_offset * 366)).replace( - month=1, day=1 + start_time = start_of_day.replace( + year=start_of_day.year + cal_offset, month=1, day=1 ) end_time = (start_time + timedelta(days=366)).replace(day=1) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 2514c38e105..99bd5083489 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1062,14 +1062,25 @@ async def test_execute_stmt_lambda_element( { ("hour", 0): ("2022-10-21T07:00:00", "2022-10-21T08:00:00"), ("hour", -1): ("2022-10-21T06:00:00", "2022-10-21T07:00:00"), + ("hour", 1): ("2022-10-21T08:00:00", "2022-10-21T09:00:00"), ("day", 0): ("2022-10-21T07:00:00", "2022-10-22T07:00:00"), ("day", -1): ("2022-10-20T07:00:00", "2022-10-21T07:00:00"), + ("day", 1): ("2022-10-22T07:00:00", "2022-10-23T07:00:00"), ("week", 0): ("2022-10-17T07:00:00", "2022-10-24T07:00:00"), ("week", -1): ("2022-10-10T07:00:00", "2022-10-17T07:00:00"), + ("week", 1): ("2022-10-24T07:00:00", "2022-10-31T07:00:00"), ("month", 0): ("2022-10-01T07:00:00", "2022-11-01T07:00:00"), ("month", -1): ("2022-09-01T07:00:00", "2022-10-01T07:00:00"), + ("month", -12): ("2021-10-01T07:00:00", "2021-11-01T07:00:00"), + ("month", 1): ("2022-11-01T07:00:00", "2022-12-01T08:00:00"), + ("month", 2): ("2022-12-01T08:00:00", "2023-01-01T08:00:00"), + ("month", 3): ("2023-01-01T08:00:00", "2023-02-01T08:00:00"), + ("month", 12): ("2023-10-01T07:00:00", "2023-11-01T07:00:00"), + ("month", 13): ("2023-11-01T07:00:00", "2023-12-01T08:00:00"), + ("month", 14): ("2023-12-01T08:00:00", "2024-01-01T08:00:00"), ("year", 0): ("2022-01-01T08:00:00", "2023-01-01T08:00:00"), ("year", -1): ("2021-01-01T08:00:00", "2022-01-01T08:00:00"), + ("year", 1): ("2023-01-01T08:00:00", "2024-01-01T08:00:00"), }, ), ( @@ -1078,14 +1089,24 @@ async def test_execute_stmt_lambda_element( { ("hour", 0): ("2024-02-28T08:00:00", "2024-02-28T09:00:00"), ("hour", -1): ("2024-02-28T07:00:00", "2024-02-28T08:00:00"), + ("hour", 1): ("2024-02-28T09:00:00", "2024-02-28T10:00:00"), ("day", 0): ("2024-02-28T08:00:00", "2024-02-29T08:00:00"), ("day", -1): ("2024-02-27T08:00:00", "2024-02-28T08:00:00"), + ("day", 1): ("2024-02-29T08:00:00", "2024-03-01T08:00:00"), ("week", 0): ("2024-02-26T08:00:00", "2024-03-04T08:00:00"), ("week", -1): ("2024-02-19T08:00:00", "2024-02-26T08:00:00"), + ("week", 1): ("2024-03-04T08:00:00", "2024-03-11T07:00:00"), ("month", 0): ("2024-02-01T08:00:00", "2024-03-01T08:00:00"), ("month", -1): ("2024-01-01T08:00:00", "2024-02-01T08:00:00"), + ("month", -2): ("2023-12-01T08:00:00", "2024-01-01T08:00:00"), + ("month", -3): ("2023-11-01T07:00:00", "2023-12-01T08:00:00"), + ("month", -12): ("2023-02-01T08:00:00", "2023-03-01T08:00:00"), + ("month", -13): ("2023-01-01T08:00:00", "2023-02-01T08:00:00"), + ("month", -14): ("2022-12-01T08:00:00", "2023-01-01T08:00:00"), + ("month", 1): ("2024-03-01T08:00:00", "2024-04-01T07:00:00"), ("year", 0): ("2024-01-01T08:00:00", "2025-01-01T08:00:00"), ("year", -1): ("2023-01-01T08:00:00", "2024-01-01T08:00:00"), + ("year", 1): ("2025-01-01T08:00:00", "2026-01-01T08:00:00"), }, ), ], From 552613d9492e3f96d5d47e8486b77e6637fdb603 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Dec 2024 21:08:08 +0100 Subject: [PATCH 0325/1198] Remove support for live recorder data migration of event type IDs (#131826) --- .../components/recorder/migration.py | 13 +- homeassistant/components/recorder/purge.py | 3 +- .../recorder/table_managers/event_types.py | 2 - .../recorder/test_migration_from_schema_32.py | 117 +++++++++++++----- tests/components/recorder/test_purge.py | 2 - 5 files changed, 86 insertions(+), 51 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 750b4adc563..ec9d290049f 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2508,15 +2508,11 @@ class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): return has_events_context_ids_to_migrate() -class EventTypeIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): +class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate event_type to event_type_ids.""" required_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION migration_id = "event_type_id_migration" - task = CommitBeforeMigrationTask - # We have to commit before to make sure there are - # no new pending event_types about to be added to - # the db since this happens live def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate event_type to event_type_ids, return True if completed.""" @@ -2576,11 +2572,6 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): _LOGGER.debug("Migrating event_types done=%s", is_done) return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) - def migration_done(self, instance: Recorder, session: Session) -> None: - """Will be called after migrate returns True.""" - _LOGGER.debug("Activating event_types manager as all data is migrated") - instance.event_type_manager.active = True - def needs_migrate_query(self) -> StatementLambdaElement: """Check if the data is migrated.""" return has_event_type_to_migrate() @@ -2770,11 +2761,11 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): NON_LIVE_DATA_MIGRATORS = ( StatesContextIDMigration, # Introduced in HA Core 2023.4 EventsContextIDMigration, # Introduced in HA Core 2023.4 + EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 ) LIVE_DATA_MIGRATORS = ( - EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 28a5a2ed32d..11f5accc978 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -116,8 +116,7 @@ def purge_old_data( # This purge cycle is finished, clean up old event types and # recorder runs - if instance.event_type_manager.active: - _purge_old_event_types(instance, session) + _purge_old_event_types(instance, session) if instance.states_meta_manager.active: _purge_old_entity_ids(instance, session) diff --git a/homeassistant/components/recorder/table_managers/event_types.py b/homeassistant/components/recorder/table_managers/event_types.py index 81bddce948d..266c970fe1f 100644 --- a/homeassistant/components/recorder/table_managers/event_types.py +++ b/homeassistant/components/recorder/table_managers/event_types.py @@ -28,8 +28,6 @@ CACHE_SIZE = 2048 class EventTypeManager(BaseLRUTableManager[EventTypes]): """Manage the EventTypes table.""" - active = False - def __init__(self, recorder: Recorder) -> None: """Initialize the event type manager.""" super().__init__(recorder, CACHE_SIZE) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index e77fae7ffad..e42cd22e952 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -824,13 +824,13 @@ async def test_finish_migrate_states_context_ids( await hass.async_block_till_done() +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate event_types to the EventTypes table.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -856,14 +856,24 @@ async def test_migrate_event_type_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -894,23 +904,38 @@ async def test_migrate_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) - assert len(events_by_type["event_type_one"]) == 2 - assert len(events_by_type["event_type_two"]) == 1 - def _get_many(): with session_scope(hass=hass, read_only=True) as session: - return recorder_mock.event_type_manager.get_many( + return instance.event_type_manager.get_many( ("event_type_one", "event_type_two"), session ) - mapped = await recorder_mock.async_add_executor_job(_get_many) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + mapped = await instance.async_add_executor_job(_get_many) + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type["event_type_two"]) == 1 + assert mapped["event_type_one"] is not None assert mapped["event_type_two"] is not None - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -1214,13 +1239,13 @@ async def test_migrate_null_entity_ids( ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -1249,14 +1274,24 @@ async def test_migrate_null_event_type_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -1287,15 +1322,29 @@ async def test_migrate_null_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) - assert len(events_by_type["event_type_one"]) == 2 - assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 - def _get_migration_id(): with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + migration_changes = await instance.async_add_executor_job(_get_migration_id) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 076f6ae8bab..c3ff5027b70 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1930,8 +1930,6 @@ async def test_purge_old_events_purges_the_event_type_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events purges event type ids.""" - assert recorder_mock.event_type_manager.active is True - utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) eleven_days_ago = utcnow - timedelta(days=11) From 3fb1b8e79ae91342de5516bd94187da3b43afc36 Mon Sep 17 00:00:00 2001 From: Erwin Douna Date: Fri, 6 Dec 2024 21:13:26 +0100 Subject: [PATCH 0326/1198] Fix PyTado dependency (#132510) --- homeassistant/components/tado/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index 652d51f0261..b0c00c888b7 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.17.7"] + "requirements": ["python-tado==0.17.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4185c4be60c..fbd865dab6d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2411,7 +2411,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove python-technove==1.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 46d84f17fe0..839e0849a41 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1935,7 +1935,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove python-technove==1.3.1 From 4fe8a43cc9c75cb47afcc9f976cbeba38359ad12 Mon Sep 17 00:00:00 2001 From: Jan Rieger <271149+jrieger@users.noreply.github.com> Date: Fri, 6 Dec 2024 21:23:45 +0100 Subject: [PATCH 0327/1198] Remove native_unit_of_measurement from Onewire counters (#132076) --- homeassistant/components/onewire/sensor.py | 1 - tests/components/onewire/snapshots/test_sensor.ambr | 12 ++++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/onewire/sensor.py b/homeassistant/components/onewire/sensor.py index c9030cab8ea..2dca53af1cf 100644 --- a/homeassistant/components/onewire/sensor.py +++ b/homeassistant/components/onewire/sensor.py @@ -233,7 +233,6 @@ DEVICE_SENSORS: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { "1D": tuple( OneWireSensorEntityDescription( key=f"counter.{device_key}", - native_unit_of_measurement="count", read_mode=READ_MODE_INT, state_class=SensorStateClass.TOTAL_INCREASING, translation_key="counter_id", diff --git a/tests/components/onewire/snapshots/test_sensor.ambr b/tests/components/onewire/snapshots/test_sensor.ambr index 5ad4cf2ef4b..261b081060c 100644 --- a/tests/components/onewire/snapshots/test_sensor.ambr +++ b/tests/components/onewire/snapshots/test_sensor.ambr @@ -363,7 +363,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.A', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -396,7 +396,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.B', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), ]) # --- @@ -408,7 +408,6 @@ 'friendly_name': '1D.111111111111 Counter A', 'raw_value': 251123.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_a', @@ -423,7 +422,6 @@ 'friendly_name': '1D.111111111111 Counter B', 'raw_value': 248125.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_b', @@ -531,7 +529,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.A', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -564,7 +562,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.B', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), ]) # --- @@ -576,7 +574,6 @@ 'friendly_name': '1D.111111111111 Counter A', 'raw_value': 251123.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_a', @@ -591,7 +588,6 @@ 'friendly_name': '1D.111111111111 Counter B', 'raw_value': 248125.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_b', From f02989e631f747694a001a67989290406dfc8c51 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 21:54:01 +0100 Subject: [PATCH 0328/1198] Removes previously deprecated simulated integration (#132111) --- .../components/simulated/__init__.py | 1 - .../components/simulated/manifest.json | 8 - homeassistant/components/simulated/sensor.py | 175 ------------------ .../components/simulated/strings.json | 8 - homeassistant/generated/integrations.json | 6 - tests/components/simulated/__init__.py | 1 - tests/components/simulated/test_sensor.py | 50 ----- 7 files changed, 249 deletions(-) delete mode 100644 homeassistant/components/simulated/__init__.py delete mode 100644 homeassistant/components/simulated/manifest.json delete mode 100644 homeassistant/components/simulated/sensor.py delete mode 100644 homeassistant/components/simulated/strings.json delete mode 100644 tests/components/simulated/__init__.py delete mode 100644 tests/components/simulated/test_sensor.py diff --git a/homeassistant/components/simulated/__init__.py b/homeassistant/components/simulated/__init__.py deleted file mode 100644 index 35c6d106d03..00000000000 --- a/homeassistant/components/simulated/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The simulated component.""" diff --git a/homeassistant/components/simulated/manifest.json b/homeassistant/components/simulated/manifest.json deleted file mode 100644 index e76bf142086..00000000000 --- a/homeassistant/components/simulated/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "simulated", - "name": "Simulated", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/simulated", - "iot_class": "local_polling", - "quality_scale": "internal" -} diff --git a/homeassistant/components/simulated/sensor.py b/homeassistant/components/simulated/sensor.py deleted file mode 100644 index 22ce4bd7cea..00000000000 --- a/homeassistant/components/simulated/sensor.py +++ /dev/null @@ -1,175 +0,0 @@ -"""Adds a simulated sensor.""" - -from __future__ import annotations - -from datetime import datetime -import math -from random import Random - -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -import homeassistant.util.dt as dt_util - -CONF_AMP = "amplitude" -CONF_FWHM = "spread" -CONF_MEAN = "mean" -CONF_PERIOD = "period" -CONF_PHASE = "phase" -CONF_SEED = "seed" -CONF_UNIT = "unit" -CONF_RELATIVE_TO_EPOCH = "relative_to_epoch" - -DEFAULT_AMP = 1 -DEFAULT_FWHM = 0 -DEFAULT_MEAN = 0 -DEFAULT_NAME = "simulated" -DEFAULT_PERIOD = 60 -DEFAULT_PHASE = 0 -DEFAULT_SEED = 999 -DEFAULT_UNIT = "value" -DEFAULT_RELATIVE_TO_EPOCH = True - -DOMAIN = "simulated" - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_AMP, default=DEFAULT_AMP): vol.Coerce(float), - vol.Optional(CONF_FWHM, default=DEFAULT_FWHM): vol.Coerce(float), - vol.Optional(CONF_MEAN, default=DEFAULT_MEAN): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_PERIOD, default=DEFAULT_PERIOD): cv.positive_int, - vol.Optional(CONF_PHASE, default=DEFAULT_PHASE): vol.Coerce(float), - vol.Optional(CONF_SEED, default=DEFAULT_SEED): cv.positive_int, - vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): cv.string, - vol.Optional( - CONF_RELATIVE_TO_EPOCH, default=DEFAULT_RELATIVE_TO_EPOCH - ): cv.boolean, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the simulated sensor.""" - # Simulated has been deprecated and will be removed in 2025.1 - - ir.async_create_issue( - hass, - DOMAIN, - DOMAIN, - breaks_in_ha_version="2025.1.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="simulated_deprecation", - translation_placeholders={"integration": DOMAIN}, - learn_more_url="https://www.home-assistant.io/integrations/simulated", - ) - - name = config.get(CONF_NAME) - unit = config.get(CONF_UNIT) - amp = config.get(CONF_AMP) - mean = config.get(CONF_MEAN) - period = config.get(CONF_PERIOD) - phase = config.get(CONF_PHASE) - fwhm = config.get(CONF_FWHM) - seed = config.get(CONF_SEED) - relative_to_epoch = config.get(CONF_RELATIVE_TO_EPOCH) - - sensor = SimulatedSensor( - name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch - ) - async_add_entities([sensor], True) - - -class SimulatedSensor(SensorEntity): - """Class for simulated sensor.""" - - _attr_icon = "mdi:chart-line" - - def __init__( - self, name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch - ): - """Init the class.""" - self._name = name - self._unit = unit - self._amp = amp - self._mean = mean - self._period = period - self._phase = phase # phase in degrees - self._fwhm = fwhm - self._seed = seed - self._random = Random(seed) # A local seeded Random - self._start_time = ( - datetime(1970, 1, 1, tzinfo=dt_util.UTC) - if relative_to_epoch - else dt_util.utcnow() - ) - self._relative_to_epoch = relative_to_epoch - self._state = None - - def time_delta(self): - """Return the time delta.""" - dt0 = self._start_time - dt1 = dt_util.utcnow() - return dt1 - dt0 - - def signal_calc(self): - """Calculate the signal.""" - mean = self._mean - amp = self._amp - time_delta = self.time_delta().total_seconds() * 1e6 # to milliseconds - period = self._period * 1e6 # to milliseconds - fwhm = self._fwhm / 2 - phase = math.radians(self._phase) - if period == 0: - periodic = 0 - else: - periodic = amp * (math.sin((2 * math.pi * time_delta / period) + phase)) - noise = self._random.gauss(mu=0, sigma=fwhm) - return round(mean + periodic + noise, 3) - - async def async_update(self) -> None: - """Update the sensor.""" - self._state = self.signal_calc() - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - - @property - def native_unit_of_measurement(self): - """Return the unit this state is expressed in.""" - return self._unit - - @property - def extra_state_attributes(self): - """Return other details about the sensor state.""" - return { - "amplitude": self._amp, - "mean": self._mean, - "period": self._period, - "phase": self._phase, - "spread": self._fwhm, - "seed": self._seed, - "relative_to_epoch": self._relative_to_epoch, - } diff --git a/homeassistant/components/simulated/strings.json b/homeassistant/components/simulated/strings.json deleted file mode 100644 index d25a84f48a5..00000000000 --- a/homeassistant/components/simulated/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "simulated_deprecation": { - "description": "The {integration} integration is deprecated", - "title": "The {integration} integration has been deprecated and will be removed in 2025.1. Please remove the {integration} from your configuration.yaml settings and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index c87218cb1b1..9494ab2e201 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5599,12 +5599,6 @@ "integration_type": "virtual", "supported_by": "overkiz" }, - "simulated": { - "name": "Simulated", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "sinch": { "name": "Sinch SMS", "integration_type": "hub", diff --git a/tests/components/simulated/__init__.py b/tests/components/simulated/__init__.py deleted file mode 100644 index 501fbab603a..00000000000 --- a/tests/components/simulated/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the simulated component.""" diff --git a/tests/components/simulated/test_sensor.py b/tests/components/simulated/test_sensor.py deleted file mode 100644 index b167147367a..00000000000 --- a/tests/components/simulated/test_sensor.py +++ /dev/null @@ -1,50 +0,0 @@ -"""The tests for the simulated sensor.""" - -from homeassistant.components.simulated.sensor import ( - CONF_AMP, - CONF_FWHM, - CONF_MEAN, - CONF_PERIOD, - CONF_PHASE, - CONF_RELATIVE_TO_EPOCH, - CONF_SEED, - CONF_UNIT, - DEFAULT_AMP, - DEFAULT_FWHM, - DEFAULT_MEAN, - DEFAULT_NAME, - DEFAULT_PHASE, - DEFAULT_RELATIVE_TO_EPOCH, - DEFAULT_SEED, - DOMAIN, -) -from homeassistant.const import CONF_FRIENDLY_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - - -async def test_simulated_sensor_default_config( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test default config.""" - config = {"sensor": {"platform": "simulated"}} - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids()) == 1 - state = hass.states.get("sensor.simulated") - - assert state.attributes.get(CONF_FRIENDLY_NAME) == DEFAULT_NAME - assert state.attributes.get(CONF_AMP) == DEFAULT_AMP - assert state.attributes.get(CONF_UNIT) is None - assert state.attributes.get(CONF_MEAN) == DEFAULT_MEAN - assert state.attributes.get(CONF_PERIOD) == 60.0 - assert state.attributes.get(CONF_PHASE) == DEFAULT_PHASE - assert state.attributes.get(CONF_FWHM) == DEFAULT_FWHM - assert state.attributes.get(CONF_SEED) == DEFAULT_SEED - assert state.attributes.get(CONF_RELATIVE_TO_EPOCH) == DEFAULT_RELATIVE_TO_EPOCH - - issue = issue_registry.async_get_issue(DOMAIN, DOMAIN) - assert issue.issue_id == DOMAIN - assert issue.translation_key == "simulated_deprecation" From 5bae000db566a5a446c6e247affd569960ca6685 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 6 Dec 2024 15:05:27 -0600 Subject: [PATCH 0329/1198] Bump pycups to 2.0.4 (#132514) --- homeassistant/components/cups/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cups/manifest.json b/homeassistant/components/cups/manifest.json index c4aa596f01e..c8f19236ce7 100644 --- a/homeassistant/components/cups/manifest.json +++ b/homeassistant/components/cups/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/cups", "iot_class": "local_polling", "quality_scale": "legacy", - "requirements": ["pycups==1.9.73"] + "requirements": ["pycups==2.0.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index fbd865dab6d..e681eafea60 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1832,7 +1832,7 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.cups -# pycups==1.9.73 +# pycups==2.0.4 # homeassistant.components.daikin pydaikin==2.13.7 From 12be82fdbc5aac5839ebedc4a11300efe9735902 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 22:40:29 +0100 Subject: [PATCH 0330/1198] Add parallel-updates rule to quality_scale validation (#132041) --- .../components/acaia/binary_sensor.py | 3 ++ homeassistant/components/acaia/sensor.py | 3 ++ .../components/elgato/quality_scale.yaml | 5 ++- homeassistant/components/elgato/sensor.py | 3 ++ .../husqvarna_automower/binary_sensor.py | 2 ++ .../husqvarna_automower/calendar.py | 2 ++ .../husqvarna_automower/device_tracker.py | 3 ++ .../components/husqvarna_automower/sensor.py | 2 ++ homeassistant/components/imap/sensor.py | 3 ++ homeassistant/components/iron_os/sensor.py | 3 ++ .../components/ista_ecotrend/sensor.py | 2 ++ .../components/lamarzocco/binary_sensor.py | 3 ++ .../components/lamarzocco/calendar.py | 3 ++ homeassistant/components/lamarzocco/sensor.py | 3 ++ .../components/mastodon/quality_scale.yaml | 5 ++- homeassistant/components/mastodon/sensor.py | 3 ++ .../components/tedee/binary_sensor.py | 3 ++ homeassistant/components/tedee/sensor.py | 3 ++ script/hassfest/quality_scale.py | 3 +- .../parallel_updates.py | 35 +++++++++++++++++++ 20 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 script/hassfest/quality_scale_validation/parallel_updates.py diff --git a/homeassistant/components/acaia/binary_sensor.py b/homeassistant/components/acaia/binary_sensor.py index 9aa4b92e932..ecb7ac06eb5 100644 --- a/homeassistant/components/acaia/binary_sensor.py +++ b/homeassistant/components/acaia/binary_sensor.py @@ -16,6 +16,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import AcaiaConfigEntry from .entity import AcaiaEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(kw_only=True, frozen=True) class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/acaia/sensor.py b/homeassistant/components/acaia/sensor.py index 6e6ce6afcb8..7ba44958eca 100644 --- a/homeassistant/components/acaia/sensor.py +++ b/homeassistant/components/acaia/sensor.py @@ -21,6 +21,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import AcaiaConfigEntry from .entity import AcaiaEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(kw_only=True, frozen=True) class AcaiaSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml index 2910bdb4473..301d00931d2 100644 --- a/homeassistant/components/elgato/quality_scale.yaml +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -33,7 +33,10 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: done + parallel-updates: + status: todo + comment: | + Does not set parallel-updates on button/switch action calls. reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/elgato/sensor.py b/homeassistant/components/elgato/sensor.py index f794d26cf7f..a28ee01f505 100644 --- a/homeassistant/components/elgato/sensor.py +++ b/homeassistant/components/elgato/sensor.py @@ -25,6 +25,9 @@ from . import ElgatorConfigEntry from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ElgatoSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/husqvarna_automower/binary_sensor.py b/homeassistant/components/husqvarna_automower/binary_sensor.py index f8b8f155458..3c23da76797 100644 --- a/homeassistant/components/husqvarna_automower/binary_sensor.py +++ b/homeassistant/components/husqvarna_automower/binary_sensor.py @@ -30,6 +30,8 @@ from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: diff --git a/homeassistant/components/husqvarna_automower/calendar.py b/homeassistant/components/husqvarna_automower/calendar.py index d4162af0c5c..f3e82fde5d4 100644 --- a/homeassistant/components/husqvarna_automower/calendar.py +++ b/homeassistant/components/husqvarna_automower/calendar.py @@ -15,6 +15,8 @@ from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 async def async_setup_entry( diff --git a/homeassistant/components/husqvarna_automower/device_tracker.py b/homeassistant/components/husqvarna_automower/device_tracker.py index 5e84b7cc67d..520eaceb1d0 100644 --- a/homeassistant/components/husqvarna_automower/device_tracker.py +++ b/homeassistant/components/husqvarna_automower/device_tracker.py @@ -8,6 +8,9 @@ from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 70b5510de36..fb8603623e4 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -35,6 +35,8 @@ from .entity import ( ) _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 ATTR_WORK_AREA_ID_ASSIGNMENT = "work_area_id_assignment" diff --git a/homeassistant/components/imap/sensor.py b/homeassistant/components/imap/sensor.py index b484586e057..60892388252 100644 --- a/homeassistant/components/imap/sensor.py +++ b/homeassistant/components/imap/sensor.py @@ -17,6 +17,9 @@ from . import ImapConfigEntry from .const import DOMAIN from .coordinator import ImapDataUpdateCoordinator +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + IMAP_MAIL_COUNT_DESCRIPTION = SensorEntityDescription( key="imap_mail_count", entity_category=EntityCategory.DIAGNOSTIC, diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index 05d56db26d3..34f0f6af6b2 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -30,6 +30,9 @@ from . import IronOSConfigEntry from .const import OHM from .entity import IronOSBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class PinecilSensor(StrEnum): """Pinecil Sensors.""" diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index 779a5d5c55f..eb06fabe373 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -40,6 +40,8 @@ from .coordinator import IstaCoordinator from .util import IstaConsumptionType, IstaValueType, get_native_value, get_statistics _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 444e4d0723b..0e11c54d896 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -17,6 +17,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoBinarySensorEntityDescription( diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 0ec9b55a9a1..46bfe875c9f 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -13,6 +13,9 @@ from homeassistant.util import dt as dt_util from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + CALENDAR_KEY = "auto_on_off_schedule" DAY_OF_WEEK = [ diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index d9e858b8191..6dda6e69a02 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -19,6 +19,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSensorEntityDescription( diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml index f287b9a0c1f..315ef808701 100644 --- a/homeassistant/components/mastodon/quality_scale.yaml +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -39,7 +39,10 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: done + parallel-updates: + status: todo + comment: | + Does not set parallel-updates on notify platform. reauthentication-flow: status: todo comment: | diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py index a7a1d40fcc4..1bb59ad7c05 100644 --- a/homeassistant/components/mastodon/sensor.py +++ b/homeassistant/components/mastodon/sensor.py @@ -23,6 +23,9 @@ from .const import ( ) from .entity import MastodonEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class MastodonSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index b586db7c2a7..94d3f0b6831 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -18,6 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class TedeeBinarySensorEntityDescription( diff --git a/homeassistant/components/tedee/sensor.py b/homeassistant/components/tedee/sensor.py index 90f76317fff..d61e7360dc4 100644 --- a/homeassistant/components/tedee/sensor.py +++ b/homeassistant/components/tedee/sensor.py @@ -18,6 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class TedeeSensorEntityDescription(SensorEntityDescription): diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index c55915c19c1..b33649427c1 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -18,6 +18,7 @@ from .quality_scale_validation import ( config_flow, diagnostics, discovery, + parallel_updates, reauthentication_flow, reconfiguration_flow, runtime_data, @@ -67,7 +68,7 @@ ALL_RULES = [ Rule("entity-unavailable", ScaledQualityScaleTiers.SILVER), Rule("integration-owner", ScaledQualityScaleTiers.SILVER), Rule("log-when-unavailable", ScaledQualityScaleTiers.SILVER), - Rule("parallel-updates", ScaledQualityScaleTiers.SILVER), + Rule("parallel-updates", ScaledQualityScaleTiers.SILVER, parallel_updates), Rule( "reauthentication-flow", ScaledQualityScaleTiers.SILVER, reauthentication_flow ), diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py new file mode 100644 index 00000000000..918d27a3fa8 --- /dev/null +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -0,0 +1,35 @@ +"""Enforce that the integration sets PARALLEL_UPDATES constant. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/parallel-updates +""" + +import ast + +from homeassistant.const import Platform +from script.hassfest.model import Integration + + +def _has_parallel_updates_defined(module: ast.Module) -> bool: + """Test if the module defines `PARALLEL_UPDATES` constant.""" + return any( + type(item) is ast.Assign and item.targets[0].id == "PARALLEL_UPDATES" + for item in module.body + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration sets PARALLEL_UPDATES constant.""" + + errors = [] + for platform in Platform: + module_file = integration.path / f"{platform}.py" + if not module_file.exists(): + continue + module = ast.parse(module_file.read_text()) + + if not _has_parallel_updates_defined(module): + errors.append( + f"Integration does not set `PARALLEL_UPDATES` in {module_file}" + ) + + return errors From a248a6d9917380e7f0e8420474436b6d01b3426c Mon Sep 17 00:00:00 2001 From: Alex Date: Fri, 6 Dec 2024 22:43:57 +0100 Subject: [PATCH 0331/1198] Update pyrisco to 0.6.5 (#132493) --- homeassistant/components/risco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index c226c1c590d..149b8761589 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/risco", "iot_class": "local_push", "loggers": ["pyrisco"], - "requirements": ["pyrisco==0.6.4"] + "requirements": ["pyrisco==0.6.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index e681eafea60..90927951f64 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2203,7 +2203,7 @@ pyrecswitch==1.0.2 pyrepetierng==0.1.0 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 839e0849a41..5b17df3cc4f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1778,7 +1778,7 @@ pyqwikswitch==0.93 pyrainbird==6.0.1 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 From 5f3bb7e89eea52ccd5e25d8d9ed2d04ca0041a27 Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Fri, 6 Dec 2024 22:55:39 +0100 Subject: [PATCH 0332/1198] Use build in unit of measurement in HomeWizard 'Water usage' sensor (#132261) --- homeassistant/components/homewizard/sensor.py | 3 ++- .../homewizard/snapshots/test_sensor.ambr | 16 ++++++++-------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 24ed5933d06..8b822bffc50 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -27,6 +27,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfReactivePower, UnitOfVolume, + UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -565,7 +566,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( HomeWizardSensorEntityDescription( key="active_liter_lpm", translation_key="active_liter_lpm", - native_unit_of_measurement="l/min", + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, state_class=SensorStateClass.MEASUREMENT, has_fn=lambda data: data.active_liter_lpm is not None, value_fn=lambda data: data.active_liter_lpm, diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index a91c87722d1..c5de96cbf8f 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -6468,7 +6468,7 @@ 'supported_features': 0, 'translation_key': 'active_liter_lpm', 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-entity_ids0][sensor.device_water_usage:state] @@ -6476,7 +6476,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -10228,7 +10228,7 @@ 'supported_features': 0, 'translation_key': 'active_liter_lpm', 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-invalid-EAN-entity_ids9][sensor.device_water_usage:state] @@ -10236,7 +10236,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -13562,7 +13562,7 @@ 'supported_features': 0, 'translation_key': 'active_liter_lpm', 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_water_usage:state] @@ -13570,7 +13570,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -15301,7 +15301,7 @@ 'supported_features': 0, 'translation_key': 'active_liter_lpm', 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-WTR-entity_ids4][sensor.device_water_usage:state] @@ -15309,7 +15309,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', From 18e8b080e0ea1f8fa9d5a41d27c5befc37210731 Mon Sep 17 00:00:00 2001 From: Tom Date: Fri, 6 Dec 2024 22:56:45 +0100 Subject: [PATCH 0333/1198] Plugwise add missing translation (#132239) Co-authored-by: Bouwe Westerdijk --- homeassistant/components/plugwise/strings.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index f74fc036e2a..20029298c4e 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -11,7 +11,10 @@ "username": "Smile Username" }, "data_description": { - "host": "Leave empty if using Auto Discovery" + "password": "The Smile ID printed on the label on the back of your Adam, Smile-T, or P1.", + "host": "The hostname or IP-address of your Smile. You can find it in your router or the Plugwise App.", + "port": "By default your Smile uses port 80, normally you should not have to change this.", + "username": "Default is `smile`, or `stretch` for the legacy Stretch." } } }, From 0d0ef6bf03706d492472e65eedd8164fac0775e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Fri, 6 Dec 2024 22:58:13 +0100 Subject: [PATCH 0334/1198] Add exception handlers to Home Connect action calls (#131895) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- .../components/home_connect/__init__.py | 91 +++++++++++++++---- .../components/home_connect/const.py | 5 +- .../components/home_connect/number.py | 7 +- .../components/home_connect/select.py | 3 +- .../components/home_connect/strings.json | 26 +++++- .../components/home_connect/switch.py | 7 +- homeassistant/components/home_connect/time.py | 7 +- tests/components/home_connect/conftest.py | 5 + tests/components/home_connect/test_init.py | 37 +++++++- 9 files changed, 151 insertions(+), 37 deletions(-) diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index 6e89fd2c9f7..818c4e6fe19 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, Platform from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_entry_oauth2_flow, config_validation as cv, @@ -39,6 +40,9 @@ from .const import ( SERVICE_SELECT_PROGRAM, SERVICE_SETTING, SERVICE_START_PROGRAM, + SVE_TRANSLATION_PLACEHOLDER_KEY, + SVE_TRANSLATION_PLACEHOLDER_PROGRAM, + SVE_TRANSLATION_PLACEHOLDER_VALUE, ) type HomeConnectConfigEntry = ConfigEntry[api.ConfigEntryAuth] @@ -139,6 +143,43 @@ def _get_appliance( raise ValueError(f"Appliance for device id {device_entry.id} not found") +def _get_appliance_or_raise_service_validation_error( + hass: HomeAssistant, device_id: str +) -> api.HomeConnectAppliance: + """Return a Home Connect appliance instance or raise a service validation error.""" + try: + return _get_appliance(hass, device_id) + except (ValueError, AssertionError) as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="appliance_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) from err + + +async def _run_appliance_service[*_Ts]( + hass: HomeAssistant, + appliance: api.HomeConnectAppliance, + method: str, + *args: *_Ts, + error_translation_key: str, + error_translation_placeholders: dict[str, str], +) -> None: + try: + await hass.async_add_executor_job(getattr(appliance, method), args) + except api.HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=error_translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + **error_translation_placeholders, + }, + ) from err + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Home Connect component.""" @@ -158,16 +199,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: option[ATTR_UNIT] = option_unit options.append(option) - - appliance = _get_appliance(hass, device_id) - await hass.async_add_executor_job(getattr(appliance, method), program, options) + await _run_appliance_service( + hass, + _get_appliance_or_raise_service_validation_error(hass, device_id), + method, + program, + options, + error_translation_key=method, + error_translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program, + }, + ) async def _async_service_command(call, command): """Execute calls to services executing a command.""" device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance(hass, device_id) - await hass.async_add_executor_job(appliance.execute_command, command) + appliance = _get_appliance_or_raise_service_validation_error(hass, device_id) + await _run_appliance_service( + hass, + appliance, + "execute_command", + command, + error_translation_key="execute_command", + error_translation_placeholders={"command": command}, + ) async def _async_service_key_value(call, method): """Execute calls to services taking a key and value.""" @@ -176,20 +232,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: unit = call.data.get(ATTR_UNIT) device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance(hass, device_id) - if unit is not None: - await hass.async_add_executor_job( - getattr(appliance, method), - key, - value, - unit, - ) - else: - await hass.async_add_executor_job( - getattr(appliance, method), - key, - value, - ) + await _run_appliance_service( + hass, + _get_appliance_or_raise_service_validation_error(hass, device_id), + method, + *((key, value) if unit is None else (key, value, unit)), + error_translation_key=method, + error_translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_KEY: key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) async def async_service_option_active(call): """Service for setting an option for an active program.""" diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index e9f32b0e772..e20cf3b1fa0 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -127,9 +127,12 @@ ATTR_STEPSIZE = "stepsize" ATTR_UNIT = "unit" ATTR_VALUE = "value" +SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity" + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name" SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID = "entity_id" -SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY = "setting_key" +SVE_TRANSLATION_PLACEHOLDER_PROGRAM = "program" +SVE_TRANSLATION_PLACEHOLDER_KEY = "key" SVE_TRANSLATION_PLACEHOLDER_VALUE = "value" OLD_NEW_UNIQUE_ID_SUFFIX_MAP = { diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index fc53939b9d8..0703b4772bb 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -22,8 +22,9 @@ from .const import ( ATTR_UNIT, ATTR_VALUE, DOMAIN, + SVE_TRANSLATION_KEY_SET_SETTING, SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_KEY, SVE_TRANSLATION_PLACEHOLDER_VALUE, ) from .entity import HomeConnectEntity @@ -119,11 +120,11 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key="set_setting", + translation_key=SVE_TRANSLATION_KEY_SET_SETTING, translation_placeholders={ **get_dict_from_home_connect_error(err), SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), }, ) from err diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py index 46b2bda24d6..c97b3db28e0 100644 --- a/homeassistant/components/home_connect/select.py +++ b/homeassistant/components/home_connect/select.py @@ -22,6 +22,7 @@ from .const import ( BSH_ACTIVE_PROGRAM, BSH_SELECTED_PROGRAM, DOMAIN, + SVE_TRANSLATION_PLACEHOLDER_PROGRAM, ) from .entity import HomeConnectEntity @@ -294,7 +295,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): translation_key=translation_key, translation_placeholders={ **get_dict_from_home_connect_error(err), - "program": bsh_key, + SVE_TRANSLATION_PLACEHOLDER_PROGRAM: bsh_key, }, ) from err self.async_entity_update() diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index 5f5ed3cee54..e70f2f28c65 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -22,6 +22,9 @@ } }, "exceptions": { + "appliance_not_found": { + "message": "Appliance for device id {device_id} not found" + }, "turn_on_light": { "message": "Error turning on {entity_id}: {description}" }, @@ -37,14 +40,17 @@ "set_light_color": { "message": "Error setting color of {entity_id}: {description}" }, + "set_setting_entity": { + "message": "Error assigning the value \"{value}\" to the setting \"{key}\" for {entity_id}: {description}" + }, "set_setting": { - "message": "Error assigning the value \"{value}\" to the setting \"{setting_key}\" for {entity_id}: {description}" + "message": "Error assigning the value \"{value}\" to the setting \"{key}\": {description}" }, "turn_on": { - "message": "Error turning on {entity_id} ({setting_key}): {description}" + "message": "Error turning on {entity_id} ({key}): {description}" }, "turn_off": { - "message": "Error turning off {entity_id} ({setting_key}): {description}" + "message": "Error turning off {entity_id} ({key}): {description}" }, "select_program": { "message": "Error selecting program {program}: {description}" @@ -52,8 +58,20 @@ "start_program": { "message": "Error starting program {program}: {description}" }, + "pause_program": { + "message": "Error pausing program: {description}" + }, "stop_program": { - "message": "Error stopping program {program}: {description}" + "message": "Error stopping program: {description}" + }, + "set_options_active_program": { + "message": "Error setting options for the active program: {description}" + }, + "set_options_selected_program": { + "message": "Error setting options for the selected program: {description}" + }, + "execute_command": { + "message": "Error executing command {command}: {description}" }, "power_on": { "message": "Error turning on {appliance_name}: {description}" diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 7e3a285912b..acb78e87db1 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -30,7 +30,7 @@ from .const import ( REFRIGERATION_SUPERMODEREFRIGERATOR, SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME, SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_KEY, SVE_TRANSLATION_PLACEHOLDER_VALUE, ) from .entity import HomeConnectDevice, HomeConnectEntity @@ -140,7 +140,7 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_placeholders={ **get_dict_from_home_connect_error(err), SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, }, ) from err @@ -164,7 +164,7 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_placeholders={ **get_dict_from_home_connect_error(err), SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, }, ) from err @@ -230,7 +230,6 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): translation_key="stop_program", translation_placeholders={ **get_dict_from_home_connect_error(err), - "program": self.program_name, }, ) from err self.async_entity_update() diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index cad16d63cb2..c1f125cd2f7 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -14,8 +14,9 @@ from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( ATTR_VALUE, DOMAIN, + SVE_TRANSLATION_KEY_SET_SETTING, SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_KEY, SVE_TRANSLATION_PLACEHOLDER_VALUE, ) from .entity import HomeConnectEntity @@ -82,11 +83,11 @@ class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key="set_setting", + translation_key=SVE_TRANSLATION_KEY_SET_SETTING, translation_placeholders={ **get_dict_from_home_connect_error(err), SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), }, ) from err diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index d2eff43e071..2ac8c851e1b 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -183,10 +183,15 @@ def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError mock.select_program.side_effect = HomeConnectError + mock.pause_program.side_effect = HomeConnectError mock.stop_program.side_effect = HomeConnectError + mock.set_options_active_program.side_effect = HomeConnectError + mock.set_options_selected_program.side_effect = HomeConnectError mock.get_status.side_effect = HomeConnectError mock.get_settings.side_effect = HomeConnectError mock.set_setting.side_effect = HomeConnectError + mock.set_setting.side_effect = HomeConnectError + mock.execute_command.side_effect = HomeConnectError return mock diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 7c4f73b6f0a..69601efb42d 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -29,6 +29,7 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er from script.hassfest.translations import RE_TRANSLATION_KEY @@ -290,8 +291,40 @@ async def test_services( ) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) @pytest.mark.usefixtures("bypass_throttle") async def test_services_exception( + service_call: list[dict[str, Any]], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + problematic_appliance: Mock, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a HomeAssistantError when there is an API error.""" + get_appliances.return_value = [problematic_appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, problematic_appliance.haId)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(HomeAssistantError): + await hass.services.async_call(**service_call) + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_services_appliance_not_found( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -299,7 +332,7 @@ async def test_services_exception( get_appliances: MagicMock, appliance: Mock, ) -> None: - """Raise a ValueError when device id does not match.""" + """Raise a ServiceValidationError when device id does not match.""" get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup() @@ -309,7 +342,7 @@ async def test_services_exception( service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" - with pytest.raises(AssertionError): + with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): await hass.services.async_call(**service_call) From d2463b9e7bae76308c63a76ca3325b6df8f87a18 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 6 Dec 2024 23:08:12 +0100 Subject: [PATCH 0335/1198] Update go2rtc-client to 0.1.2 (#132517) --- homeassistant/components/go2rtc/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index bedee99f930..1cd9e8c1107 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -8,6 +8,6 @@ "integration_type": "system", "iot_class": "local_polling", "quality_scale": "legacy", - "requirements": ["go2rtc-client==0.1.1"], + "requirements": ["go2rtc-client==0.1.2"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 34974b5e146..053e2b21279 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -28,7 +28,7 @@ ciso8601==2.3.1 cryptography==44.0.0 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 -go2rtc-client==0.1.1 +go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 habluetooth==3.6.0 hass-nabucasa==0.86.0 diff --git a/requirements_all.txt b/requirements_all.txt index 90927951f64..b18cb451bd6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -993,7 +993,7 @@ gitterpy==0.1.7 glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.1.1 +go2rtc-client==0.1.2 # homeassistant.components.goalzero goalzero==0.2.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b17df3cc4f..f22a979cee6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -843,7 +843,7 @@ gios==5.0.0 glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.1.1 +go2rtc-client==0.1.2 # homeassistant.components.goalzero goalzero==0.2.2 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 9c3b14ad4df..70ee2971278 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " From 16484dcee5dbffd6f0d1497c33c6b9beea480905 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 6 Dec 2024 23:26:24 +0100 Subject: [PATCH 0336/1198] Update debugpy to 1.8.8 (#132519) --- homeassistant/components/debugpy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index 1e31e002a81..c6e7f79be49 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.6"] + "requirements": ["debugpy==1.8.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index b18cb451bd6..6aa081a720e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -726,7 +726,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.8 # homeassistant.components.decora_wifi # decora-wifi==1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f22a979cee6..c479c95f7e9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -622,7 +622,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.8 # homeassistant.components.ecovacs deebot-client==9.2.0 From 61fbfc3d4009926ab3e32ca618c62f1ec0b7d7dd Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sat, 7 Dec 2024 06:49:07 +0100 Subject: [PATCH 0337/1198] Use device area/floor in intent_script (#130644) * Use device area/floor in intent_script * Add test --- .../components/intent_script/__init__.py | 11 +++++++++- tests/components/intent_script/test_init.py | 22 +++++++++++++++---- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/intent_script/__init__.py b/homeassistant/components/intent_script/__init__.py index 6f47cadb04f..a4f84f6ff9e 100644 --- a/homeassistant/components/intent_script/__init__.py +++ b/homeassistant/components/intent_script/__init__.py @@ -148,6 +148,8 @@ class ScriptIntentHandler(intent.IntentHandler): vol.Any("name", "area", "floor"): cv.string, vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } def __init__(self, intent_type: str, config: ConfigType) -> None: @@ -205,7 +207,14 @@ class ScriptIntentHandler(intent.IntentHandler): ) if match_constraints.has_constraints: - match_result = intent.async_match_targets(hass, match_constraints) + match_preferences = intent.MatchTargetsPreferences( + area_id=slots.get("preferred_area_id"), + floor_id=slots.get("preferred_floor_id"), + ) + + match_result = intent.async_match_targets( + hass, match_constraints, match_preferences + ) if match_result.is_match: targets = {} diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 26c575f0407..39084b9298b 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -4,7 +4,7 @@ from unittest.mock import patch from homeassistant import config as hass_config from homeassistant.components.intent_script import DOMAIN -from homeassistant.const import SERVICE_RELOAD +from homeassistant.const import ATTR_FRIENDLY_NAME, SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import ( area_registry as ar, @@ -235,17 +235,31 @@ async def test_intent_script_targets( floor_1 = floor_registry.async_create("first floor") kitchen = area_registry.async_get_or_create("kitchen") area_registry.async_update(kitchen.id, floor_id=floor_1.floor_id) + bathroom = area_registry.async_get_or_create("bathroom") entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" + "light", "demo", "kitchen", suggested_object_id="kitchen" ) entity_registry.async_update_entity("light.kitchen", area_id=kitchen.id) - hass.states.async_set("light.kitchen", "off") + hass.states.async_set( + "light.kitchen", "off", attributes={ATTR_FRIENDLY_NAME: "overhead light"} + ) + entity_registry.async_get_or_create( + "light", "demo", "bathroom", suggested_object_id="bathroom" + ) + entity_registry.async_update_entity("light.bathroom", area_id=bathroom.id) + hass.states.async_set( + "light.bathroom", "off", attributes={ATTR_FRIENDLY_NAME: "overhead light"} + ) response = await intent.async_handle( hass, "test", "Targets", - {"name": {"value": "kitchen"}, "domain": {"value": "light"}}, + { + "name": {"value": "overhead light"}, + "domain": {"value": "light"}, + "preferred_area_id": {"value": "kitchen"}, + }, ) assert len(calls) == 1 assert calls[0].data["targets"] == {"entities": ["light.kitchen"]} From 35fa6e5121f512c6f1170ba51c8b68c43cee0449 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Sat, 7 Dec 2024 09:57:18 +0100 Subject: [PATCH 0338/1198] Set PARALLEL_UPDATES in Bring sensor platform (#132538) * Set IQS `parallel-updates` to todo in Bring integration * Set parallel_updates in sensor --- homeassistant/components/bring/sensor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/bring/sensor.py b/homeassistant/components/bring/sensor.py index eddee46f3bc..bd33ce9bf88 100644 --- a/homeassistant/components/bring/sensor.py +++ b/homeassistant/components/bring/sensor.py @@ -24,6 +24,8 @@ from .coordinator import BringData, BringDataUpdateCoordinator from .entity import BringBaseEntity from .util import list_language, sum_attributes +PARALLEL_UPDATES = 0 + @dataclass(kw_only=True, frozen=True) class BringSensorEntityDescription(SensorEntityDescription): From acf207ad1ce6b18c1b93df79d6675a751d7e5736 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Sat, 7 Dec 2024 01:43:55 -0800 Subject: [PATCH 0339/1198] bump total_connect_client to 2024.12 (#132531) --- homeassistant/components/totalconnect/manifest.json | 2 +- homeassistant/components/totalconnect/quality_scale.yaml | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/totalconnect/manifest.json b/homeassistant/components/totalconnect/manifest.json index 87ec14621d9..33306a7adba 100644 --- a/homeassistant/components/totalconnect/manifest.json +++ b/homeassistant/components/totalconnect/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/totalconnect", "iot_class": "cloud_polling", "loggers": ["total_connect_client"], - "requirements": ["total-connect-client==2024.5"] + "requirements": ["total-connect-client==2024.12"] } diff --git a/homeassistant/components/totalconnect/quality_scale.yaml b/homeassistant/components/totalconnect/quality_scale.yaml index e52011d7d48..a8e5b60f7ee 100644 --- a/homeassistant/components/totalconnect/quality_scale.yaml +++ b/homeassistant/components/totalconnect/quality_scale.yaml @@ -10,7 +10,7 @@ rules: entity-unique-id: done has-entity-name: done entity-event-setup: todo - dependency-transparency: todo + dependency-transparency: done action-setup: todo common-modules: done docs-high-level-description: done diff --git a/requirements_all.txt b/requirements_all.txt index 6aa081a720e..7c4c461bb2a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2858,7 +2858,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_lte tp-connected==0.0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c479c95f7e9..504a0c18e7c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2277,7 +2277,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_omada tplink-omada-client==1.4.3 From b9002d0c64a766962a92445a124b94df2c137f92 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Sat, 7 Dec 2024 12:18:04 +0100 Subject: [PATCH 0340/1198] Bump pylamarzocco to 1.3.3 (#132534) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 54413ccf28f..00e76096e7f 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.3.2"] + "requirements": ["pylamarzocco==1.3.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7c4c461bb2a..45ba64f6dc8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2027,7 +2027,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.3.2 +pylamarzocco==1.3.3 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 504a0c18e7c..f18e7e177a2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1635,7 +1635,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.3.2 +pylamarzocco==1.3.3 # homeassistant.components.lastfm pylast==5.1.0 From e04fd48a05f51f5c5e60a7a45165a6a5051b4171 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Dec 2024 11:12:58 -0600 Subject: [PATCH 0341/1198] Bump yalexs-ble to 2.5.2 (#132560) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 96ed982e4ec..99dbbc0ed9c 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 50c2a0af457..474ed36e90c 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index c3d1a3d97f1..95d28cd5372 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.1"] + "requirements": ["yalexs-ble==2.5.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 45ba64f6dc8..489a3aa4333 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3044,7 +3044,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.1 +yalexs-ble==2.5.2 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f18e7e177a2..3b30f55b30c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2436,7 +2436,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.1 +yalexs-ble==2.5.2 # homeassistant.components.august # homeassistant.components.yale From 09908153f8cc31be5dccfb966a33e4750eba2c93 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sat, 7 Dec 2024 19:22:35 +0100 Subject: [PATCH 0342/1198] Bump uiprotect to 6.7.0 (#132565) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index e8a8c062800..c4327e4a2f9 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.6.5", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.7.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 489a3aa4333..2423f4829b7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2897,7 +2897,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.5 +uiprotect==6.7.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b30f55b30c..cfd00d70e76 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2313,7 +2313,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.6.5 +uiprotect==6.7.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From a8713af8b8f226a691754d513fbb6b8906a2228a Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Sat, 7 Dec 2024 22:31:11 +0100 Subject: [PATCH 0343/1198] Bump aiounifi to v81 to fix partitioned cookies on python 3.13 (#132540) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 66d0a53284b..ce573592153 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -7,7 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiounifi"], - "requirements": ["aiounifi==80"], + "requirements": ["aiounifi==81"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 2423f4829b7..fcd28e2bc1a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -399,7 +399,7 @@ aiotedee==0.2.20 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet aiovlc==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cfd00d70e76..4d06bbf79dd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -381,7 +381,7 @@ aiotedee==0.2.20 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet aiovlc==0.5.1 From b40d8074c065f0b2e12b58cd3d11db9373bc11b9 Mon Sep 17 00:00:00 2001 From: mkmer Date: Sun, 8 Dec 2024 09:46:44 -0500 Subject: [PATCH 0344/1198] Use runtime_data in Whirlpool (#132613) Use runtime_data in whirlpool --- homeassistant/components/whirlpool/__init__.py | 16 ++++++---------- homeassistant/components/whirlpool/climate.py | 7 +++---- .../components/whirlpool/diagnostics.py | 8 +++----- homeassistant/components/whirlpool/sensor.py | 7 +++---- 4 files changed, 15 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/whirlpool/__init__.py b/homeassistant/components/whirlpool/__init__.py index 36f8fbec59d..64adcda4742 100644 --- a/homeassistant/components/whirlpool/__init__.py +++ b/homeassistant/components/whirlpool/__init__.py @@ -20,8 +20,10 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] +type WhirlpoolConfigEntry = ConfigEntry[WhirlpoolData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: """Set up Whirlpool Sixth Sense from a config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -47,21 +49,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Cannot fetch appliances") return False - hass.data[DOMAIN][entry.entry_id] = WhirlpoolData( - appliances_manager, auth, backend_selector - ) + entry.runtime_data = WhirlpoolData(appliances_manager, auth, backend_selector) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @dataclass diff --git a/homeassistant/components/whirlpool/climate.py b/homeassistant/components/whirlpool/climate.py index e1cedd38c04..943c5d1c956 100644 --- a/homeassistant/components/whirlpool/climate.py +++ b/homeassistant/components/whirlpool/climate.py @@ -23,7 +23,6 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -31,7 +30,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WhirlpoolData +from . import WhirlpoolConfigEntry from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -70,11 +69,11 @@ SUPPORTED_TARGET_TEMPERATURE_STEP = 1 async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry.""" - whirlpool_data: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool_data = config_entry.runtime_data aircons = [ AirConEntity( diff --git a/homeassistant/components/whirlpool/diagnostics.py b/homeassistant/components/whirlpool/diagnostics.py index 9b1dd00e7bd..87d6ea827e2 100644 --- a/homeassistant/components/whirlpool/diagnostics.py +++ b/homeassistant/components/whirlpool/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import WhirlpoolData -from .const import DOMAIN +from . import WhirlpoolConfigEntry TO_REDACT = { "SERIAL_NUMBER", @@ -24,11 +22,11 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - whirlpool: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool = config_entry.runtime_data diagnostics_data = { "Washer_dryers": { wd["NAME"]: dict(wd.items()) diff --git a/homeassistant/components/whirlpool/sensor.py b/homeassistant/components/whirlpool/sensor.py index 8c74f01298e..b84518cedf1 100644 --- a/homeassistant/components/whirlpool/sensor.py +++ b/homeassistant/components/whirlpool/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo @@ -23,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow -from . import WhirlpoolData +from . import WhirlpoolConfigEntry from .const import DOMAIN TANK_FILL = { @@ -132,12 +131,12 @@ SENSOR_TIMER: tuple[SensorEntityDescription] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Config flow entry for Whrilpool Laundry.""" entities: list = [] - whirlpool_data: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool_data = config_entry.runtime_data for appliance in whirlpool_data.appliances_manager.washer_dryers: _wd = WasherDryer( whirlpool_data.backend_selector, From d32e69dcb6dd295da5c44204d92216c5b0624d38 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Sun, 8 Dec 2024 15:59:27 +0100 Subject: [PATCH 0345/1198] Fix config flow in Husqvarna Automower (#132615) --- homeassistant/components/husqvarna_automower/config_flow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 4da3bd14089..7efed529453 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -53,10 +53,10 @@ class HusqvarnaConfigFlowHandler( tz = await dt_util.async_get_time_zone(str(dt_util.DEFAULT_TIME_ZONE)) automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) try: - data = await automower_api.get_status() + status_data = await automower_api.get_status() except Exception: # noqa: BLE001 return self.async_abort(reason="unknown") - if data == {}: + if status_data == {}: return self.async_abort(reason="no_mower_connected") structured_token = structure_token(token[CONF_ACCESS_TOKEN]) From 2f0e6a6dc7bb53155ad8f537030bc8aaac33ca03 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Sun, 8 Dec 2024 15:32:39 -0500 Subject: [PATCH 0346/1198] Bump ZHA dependencies (#132630) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 1fbbd83bb9c..3a301be9b02 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.41"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.42"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index fcd28e2bc1a..ed6b402bdad 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3081,7 +3081,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.41 +zha==0.0.42 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4d06bbf79dd..22afad01803 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2467,7 +2467,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.41 +zha==0.0.42 # homeassistant.components.zwave_js zwave-js-server-python==0.60.0 From a4ceed776e3715891cb70f70d7b0a271ade47089 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Dec 2024 22:50:22 +0100 Subject: [PATCH 0347/1198] Add tests to Nord Pool (#132468) --- tests/components/nordpool/test_sensor.py | 37 ++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py index c7a305c8a40..5c2d138cb34 100644 --- a/tests/components/nordpool/test_sensor.py +++ b/tests/components/nordpool/test_sensor.py @@ -6,6 +6,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -23,3 +24,39 @@ async def test_sensor( """Test the Nord Pool sensor.""" await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) + + +@pytest.mark.freeze_time("2024-11-05T23:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_no_next_price(hass: HomeAssistant, load_int: ConfigEntry) -> None: + """Test the Nord Pool sensor.""" + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.28914" + assert last_price.state == "0.28914" + assert next_price.state == STATE_UNKNOWN + + +@pytest.mark.freeze_time("2024-11-05T00:00:00+01:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_no_previous_price( + hass: HomeAssistant, load_int: ConfigEntry +) -> None: + """Test the Nord Pool sensor.""" + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.25073" + assert last_price.state == STATE_UNKNOWN + assert next_price.state == "0.07636" From 421e2411d3d29c7550edf4b6e4f5365d142a5215 Mon Sep 17 00:00:00 2001 From: Tom Date: Sun, 8 Dec 2024 22:58:17 +0100 Subject: [PATCH 0348/1198] Plugwise Quality improvements (#132175) --- homeassistant/components/plugwise/climate.py | 18 +++++----- .../components/plugwise/coordinator.py | 4 +-- .../components/plugwise/quality_scale.yaml | 34 +++++++------------ .../components/plugwise/strings.json | 8 +++++ tests/components/plugwise/test_climate.py | 2 +- tests/components/plugwise/test_init.py | 1 - 6 files changed, 33 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index b27fd1d4f0e..4090405650a 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import PlugwiseConfigEntry @@ -226,12 +226,6 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): if ATTR_TARGET_TEMP_LOW in kwargs: data["setpoint_low"] = kwargs.get(ATTR_TARGET_TEMP_LOW) - for temperature in data.values(): - if temperature is None or not ( - self._attr_min_temp <= temperature <= self._attr_max_temp - ): - raise ValueError("Invalid temperature change requested") - if mode := kwargs.get(ATTR_HVAC_MODE): await self.async_set_hvac_mode(mode) @@ -241,7 +235,15 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set the hvac mode.""" if hvac_mode not in self.hvac_modes: - raise HomeAssistantError("Unsupported hvac_mode") + hvac_modes = ", ".join(self.hvac_modes) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unsupported_hvac_mode_requested", + translation_placeholders={ + "hvac_mode": hvac_mode, + "hvac_modes": hvac_modes, + }, + ) if hvac_mode == self.hvac_mode: return diff --git a/homeassistant/components/plugwise/coordinator.py b/homeassistant/components/plugwise/coordinator.py index 6ce6855e7d6..bf9e7d31cc0 100644 --- a/homeassistant/components/plugwise/coordinator.py +++ b/homeassistant/components/plugwise/coordinator.py @@ -68,7 +68,6 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): async def _async_update_data(self) -> PlugwiseData: """Fetch data from Plugwise.""" - data = PlugwiseData(devices={}, gateway={}) try: if not self._connected: await self._connect() @@ -85,9 +84,8 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): raise UpdateFailed("Data incomplete or missing") from err except UnsupportedDeviceError as err: raise ConfigEntryError("Device with unsupported firmware") from err - else: - self._async_add_remove_devices(data, self.config_entry) + self._async_add_remove_devices(data, self.config_entry) return data def _async_add_remove_devices(self, data: PlugwiseData, entry: ConfigEntry) -> None: diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index 58a20046c5b..b2801319e91 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -8,26 +8,22 @@ rules: config-flow-test-coverage: status: todo comment: Cover test_form and zeroconf - runtime-data: - status: todo - comment: Clean up test_init for testing internals + runtime-data: done test-before-setup: done - appropriate-polling: - status: todo - comment: Clean up coordinator (L71) check for mypy happiness + appropriate-polling: done entity-unique-id: done has-entity-name: done entity-event-setup: done dependency-transparency: done action-setup: - status: todo - comment: Check if we have these, otherwise exempt + status: exempt + comment: Plugwise integration has no custom actions common-modules: status: todo comment: Verify entity for async_added_to_hass usage (discard?) docs-high-level-description: status: todo - comment: Rewrite top section + comment: Rewrite top section, docs PR prepared docs-installation-instructions: status: todo comment: Docs PR 36087 @@ -38,9 +34,7 @@ rules: config-entry-unloading: done log-when-unavailable: done entity-unavailable: done - action-exceptions: - status: todo - comment: Climate exception on ValueError should be ServiceValidationError + action-exceptions: done reauthentication-flow: status: exempt comment: The hubs have a hardcoded `Smile ID` printed on the sticker used as password, it can not be changed @@ -53,7 +47,7 @@ rules: integration-owner: done docs-installation-parameters: status: todo - comment: Docs PR 36087 (partial) + todo rewrite generically + comment: Docs PR 36087 (partial) + todo rewrite generically (PR prepared) docs-configuration-parameters: status: exempt comment: Plugwise has no options flow @@ -68,34 +62,32 @@ rules: diagnostics: done exception-translations: status: todo - comment: Add coordinator, util and climate exceptions + comment: Add coordinator, util exceptions (climate done in core 132175) icon-translations: done reconfiguration-flow: status: todo comment: This integration does not have any reconfiguration steps (yet) investigate how/why - dynamic-devices: - status: todo - comment: Add missing logic to button for unloading and creation + dynamic-devices: done discovery-update-info: done repair-issues: status: exempt comment: This integration does not have repairs docs-use-cases: status: todo - comment: Check for completeness + comment: Check for completeness, PR prepared docs-supported-devices: status: todo - comment: The list is there but could be improved for readability + comment: The list is there but could be improved for readability, PR prepared docs-supported-functions: status: todo comment: Check for completeness docs-data-update: done docs-known-limitations: status: todo - comment: Partial in 36087 but could be more elaborat + comment: Partial in 36087 but could be more elaborate docs-troubleshooting: status: todo - comment: Check for completeness + comment: Check for completeness, PR prepared docs-examples: status: todo comment: Check for completeness diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index 20029298c4e..badd522e78b 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -284,5 +284,13 @@ "name": "Relay" } } + }, + "exceptions": { + "invalid_temperature_change_requested": { + "message": "Invalid temperature change requested." + }, + "unsupported_hvac_mode_requested": { + "message": "Unsupported mode {hvac_mode} requested, valid modes are: {hvac_modes}." + } } } diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index c0c1c00c68d..39dcec92195 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -233,7 +233,7 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", "off" ) - with pytest.raises(HomeAssistantError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 3b9881c9e3d..99ff79263b6 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -78,7 +78,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From d166e5fdcc1c58bc2fc08b8076f91d40c885089e Mon Sep 17 00:00:00 2001 From: Hugo Ideler <547309+hugoideler@users.noreply.github.com> Date: Sun, 8 Dec 2024 23:29:43 +0100 Subject: [PATCH 0349/1198] Bump nsapi to 3.1.2 (#132596) --- homeassistant/components/nederlandse_spoorwegen/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nederlandse_spoorwegen/manifest.json b/homeassistant/components/nederlandse_spoorwegen/manifest.json index 8a8a20c453b..0ef9d8d86f3 100644 --- a/homeassistant/components/nederlandse_spoorwegen/manifest.json +++ b/homeassistant/components/nederlandse_spoorwegen/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/nederlandse_spoorwegen", "iot_class": "cloud_polling", "quality_scale": "legacy", - "requirements": ["nsapi==3.0.5"] + "requirements": ["nsapi==3.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index ed6b402bdad..7f3dda1ad3f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1478,7 +1478,7 @@ notifications-android-tv==0.1.5 notify-events==1.0.4 # homeassistant.components.nederlandse_spoorwegen -nsapi==3.0.5 +nsapi==3.1.2 # homeassistant.components.nsw_fuel_station nsw-fuel-api-client==1.1.0 From be10d79c75ac569505da8040bbcf60692dd53700 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 8 Dec 2024 23:30:12 +0100 Subject: [PATCH 0350/1198] Update twentemilieu to 2.2.0 (#132554) --- homeassistant/components/twentemilieu/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index a89091948c2..292887c6c5b 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], - "requirements": ["twentemilieu==2.1.0"] + "requirements": ["twentemilieu==2.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7f3dda1ad3f..0fc70baff9d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2882,7 +2882,7 @@ ttn_client==1.2.0 tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.1.0 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 22afad01803..60fdb450ace 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2298,7 +2298,7 @@ ttn_client==1.2.0 tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.1.0 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 From ce8c5fc3a9aa53c5de41b528c9664852fae1054f Mon Sep 17 00:00:00 2001 From: Ravaka Razafimanantsoa <3774520+SeraphicRav@users.noreply.github.com> Date: Mon, 9 Dec 2024 07:35:41 +0900 Subject: [PATCH 0351/1198] Fix API change for AC not supporting floats in SwitchBot Cloud (#132231) --- homeassistant/components/switchbot_cloud/climate.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/switchbot_cloud/climate.py b/homeassistant/components/switchbot_cloud/climate.py index 90d8258d0a3..4e05e9e9a1e 100644 --- a/homeassistant/components/switchbot_cloud/climate.py +++ b/homeassistant/components/switchbot_cloud/climate.py @@ -79,6 +79,8 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): _attr_hvac_mode = HVACMode.FAN_ONLY _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature = 21 + _attr_target_temperature_step = 1 + _attr_precision = 1 _attr_name = None async def _do_send_command( @@ -96,7 +98,7 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): ) await self.send_api_command( AirConditionerCommands.SET_ALL, - parameters=f"{new_temperature},{new_mode},{new_fan_speed},on", + parameters=f"{int(new_temperature)},{new_mode},{new_fan_speed},on", ) async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: From 0b7447c562b23dad963321612f29eb9594d0df67 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 8 Dec 2024 23:36:55 +0100 Subject: [PATCH 0352/1198] Bump plugwise to v1.6.2 and adapt (#132608) --- homeassistant/components/plugwise/climate.py | 13 ++----------- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../plugwise/fixtures/m_adam_heating/all_data.json | 2 +- .../plugwise/fixtures/m_adam_jip/all_data.json | 8 ++++---- .../m_adam_multiple_devices_per_zone/all_data.json | 7 ++++++- .../plugwise/snapshots/test_diagnostics.ambr | 7 ++++++- tests/components/plugwise/test_climate.py | 12 ++++-------- 9 files changed, 26 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 4090405650a..fb0124e144d 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -188,17 +188,8 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._previous_action_mode(self.coordinator) # Adam provides the hvac_action for each thermostat - if self._gateway["smile_name"] == "Adam": - if (control_state := self.device.get("control_state")) == "cooling": - return HVACAction.COOLING - if control_state == "heating": - return HVACAction.HEATING - if control_state == "preheating": - return HVACAction.PREHEATING - if control_state == "off": - return HVACAction.IDLE - - return HVACAction.IDLE + if (action := self.device.get("control_state")) is not None: + return HVACAction(action) # Anna heater: str = self._gateway["heater_id"] diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index df35777ac54..d7fcec3bbae 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.1"], + "requirements": ["plugwise==1.6.2"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 0fc70baff9d..93c9244b7db 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.1 +plugwise==1.6.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 60fdb450ace..13df06e7ff6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1332,7 +1332,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.1 +plugwise==1.6.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index fab2cea5fdc..bb24faeebfa 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -176,7 +176,7 @@ "off" ], "climate_mode": "auto", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Bathroom", diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 4516ce2c2d0..1ca9e77010f 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,7 +3,7 @@ "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", "climate_mode": "off", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Slaapkamer", @@ -26,7 +26,7 @@ "13228dab8ce04617af318a2888b3c548": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Woonkamer", @@ -238,7 +238,7 @@ "d27aede973b54be484f6842d1b2802ad": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Kinderkamer", @@ -285,7 +285,7 @@ "d58fec52899f4f1c92e4f8fad6d8c48c": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Logeerkamer", diff --git a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json index 67e8c235cc3..8da184a7a3e 100644 --- a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json @@ -32,6 +32,7 @@ "off" ], "climate_mode": "auto", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Badkamer", @@ -66,6 +67,7 @@ "off" ], "climate_mode": "heat", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Bios", @@ -112,6 +114,7 @@ "446ac08dd04d4eff8ac57489757b7314": { "active_preset": "no_frost", "climate_mode": "heat", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Garage", @@ -258,6 +261,7 @@ "off" ], "climate_mode": "auto", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Jessie", @@ -402,6 +406,7 @@ "off" ], "climate_mode": "auto", + "control_state": "heating", "dev_class": "climate", "model": "ThermoZone", "name": "Woonkamer", @@ -577,7 +582,7 @@ "cooling_present": false, "gateway_id": "fe799307f1624099878210aa0b9f1475", "heater_id": "90986d591dcd426cae3ec3e8111ff730", - "item_count": 364, + "item_count": 369, "notifications": { "af82e4ccf9c548528166d38e560662a4": { "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index bf7d4260a32..806c92fe7cb 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -34,6 +34,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Badkamer', @@ -75,6 +76,7 @@ 'off', ]), 'climate_mode': 'heat', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Bios', @@ -131,6 +133,7 @@ '446ac08dd04d4eff8ac57489757b7314': dict({ 'active_preset': 'no_frost', 'climate_mode': 'heat', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Garage', @@ -286,6 +289,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Jessie', @@ -440,6 +444,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'heating', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Woonkamer', @@ -625,7 +630,7 @@ 'cooling_present': False, 'gateway_id': 'fe799307f1624099878210aa0b9f1475', 'heater_id': '90986d591dcd426cae3ec3e8111ff730', - 'item_count': 364, + 'item_count': 369, 'notifications': dict({ 'af82e4ccf9c548528166d38e560662a4': dict({ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index 39dcec92195..6320ab1f96b 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -31,15 +31,13 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.woonkamer") assert state assert state.state == HVACMode.AUTO + assert state.attributes["hvac_action"] == "heating" assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality - assert "preset_modes" in state.attributes assert "no_frost" in state.attributes["preset_modes"] assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 20.9 assert state.attributes["preset_mode"] == "home" + assert state.attributes["current_temperature"] == 20.9 assert state.attributes["supported_features"] == 17 assert state.attributes["temperature"] == 21.5 assert state.attributes["min_temp"] == 0.0 @@ -49,15 +47,13 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.jessie") assert state assert state.state == HVACMode.AUTO + assert state.attributes["hvac_action"] == "idle" assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality - assert "preset_modes" in state.attributes assert "no_frost" in state.attributes["preset_modes"] assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 17.2 assert state.attributes["preset_mode"] == "asleep" + assert state.attributes["current_temperature"] == 17.2 assert state.attributes["temperature"] == 15.0 assert state.attributes["min_temp"] == 0.0 assert state.attributes["max_temp"] == 35.0 From ed938ba315794a4e8abe75fb7404011b620f4d74 Mon Sep 17 00:00:00 2001 From: hahn-th <15319212+hahn-th@users.noreply.github.com> Date: Sun, 8 Dec 2024 23:38:23 +0100 Subject: [PATCH 0353/1198] Bump homematicip from 1.1.3 to 1.1.5 (#132537) --- homeassistant/components/homematicip_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 7878a8b4e0a..a44d0586952 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "iot_class": "cloud_push", "loggers": ["homematicip"], - "requirements": ["homematicip==1.1.3"] + "requirements": ["homematicip==1.1.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 93c9244b7db..da41db79e06 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1136,7 +1136,7 @@ home-assistant-intents==2024.12.4 homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.3 +homematicip==1.1.5 # homeassistant.components.horizon horimote==0.4.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 13df06e7ff6..8e10a4e9b36 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -962,7 +962,7 @@ home-assistant-intents==2024.12.4 homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.3 +homematicip==1.1.5 # homeassistant.components.remember_the_milk httplib2==0.20.4 From 9f0356fcfed89b6ec134235f98b837ae853da1c7 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Mon, 9 Dec 2024 00:20:53 +0100 Subject: [PATCH 0354/1198] Increase test coverage in apsystems coordinator (#132631) Co-authored-by: Joost Lekkerkerker --- tests/components/apsystems/test_init.py | 50 +++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 3 deletions(-) diff --git a/tests/components/apsystems/test_init.py b/tests/components/apsystems/test_init.py index c85c4094e30..f127744dbf4 100644 --- a/tests/components/apsystems/test_init.py +++ b/tests/components/apsystems/test_init.py @@ -1,8 +1,11 @@ """Test the APSystem setup.""" +import datetime from unittest.mock import AsyncMock from APsystemsEZ1 import InverterReturnedError +from freezegun.api import FrozenDateTimeFactory +import pytest from homeassistant.components.apsystems.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -10,16 +13,57 @@ from homeassistant.core import HomeAssistant from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed + +SCAN_INTERVAL = datetime.timedelta(seconds=12) -async def test_update_failed( +@pytest.mark.usefixtures("mock_apsystems") +async def test_load_unload_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_setup_failed( hass: HomeAssistant, mock_apsystems: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test update failed.""" - mock_apsystems.get_output_data.side_effect = InverterReturnedError + mock_apsystems.get_device_info.side_effect = TimeoutError await setup_integration(hass, mock_config_entry) entry = hass.config_entries.async_entries(DOMAIN)[0] assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_update( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update data with an inverter error and recover.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert "Inverter returned an error" not in caplog.text + mock_apsystems.get_output_data.side_effect = InverterReturnedError + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert "Error fetching APSystems Data data:" in caplog.text + caplog.clear() + mock_apsystems.get_output_data.side_effect = None + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert "Fetching APSystems Data data recovered" in caplog.text From 182c85cf23161592827b282a85058ab704982291 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 9 Dec 2024 07:51:03 +0100 Subject: [PATCH 0355/1198] Enable additional entities on myUplink model SMO20 (#131688) * Add a couple of entities to SMO 20 * Enable additional entities on SMO20 --- homeassistant/components/myuplink/helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/components/myuplink/helpers.py b/homeassistant/components/myuplink/helpers.py index de5486d8dea..bd875d8a872 100644 --- a/homeassistant/components/myuplink/helpers.py +++ b/homeassistant/components/myuplink/helpers.py @@ -95,11 +95,17 @@ PARAMETER_ID_TO_EXCLUDE_F730 = ( ) PARAMETER_ID_TO_INCLUDE_SMO20 = ( + "40013", + "40033", "40940", + "44069", + "44071", + "44073", "47011", "47015", "47028", "47032", + "47398", "50004", ) From 206cac681120949ef9de2def37af19d197b6fb60 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:17:15 +0100 Subject: [PATCH 0356/1198] Bump actions/attest-build-provenance from 2.0.0 to 2.0.1 (#132661) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index a6da4a05fa2..c172e0b14eb 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@619dbb2e03e0189af0c55118e7d3c5e129e99726 # v2.0.0 + uses: actions/attest-build-provenance@c4fbc648846ca6f503a13a2281a5e7b98aa57202 # v2.0.1 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} From 644b07d08468fd94c55f4a2f1bb863da48c41f55 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:24:09 +0100 Subject: [PATCH 0357/1198] Remove deprecated supported features warning in Camera (#132640) --- homeassistant/components/camera/__init__.py | 26 ++++----------------- tests/components/camera/test_init.py | 20 ---------------- 2 files changed, 5 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index 4d718433fca..725fc84adc3 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -516,19 +516,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> CameraEntityFeature: - """Return the supported features as CameraEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = CameraEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - @cached_property def is_recording(self) -> bool: """Return true if the device is recording.""" @@ -582,7 +569,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self._deprecate_attr_frontend_stream_type_logged = True return self._attr_frontend_stream_type - if CameraEntityFeature.STREAM not in self.supported_features_compat: + if CameraEntityFeature.STREAM not in self.supported_features: return None if ( self._webrtc_provider @@ -811,9 +798,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): async def async_internal_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_internal_added_to_hass() - self.__supports_stream = ( - self.supported_features_compat & CameraEntityFeature.STREAM - ) + self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM await self.async_refresh_providers(write_state=False) async def async_refresh_providers(self, *, write_state: bool = True) -> None: @@ -853,7 +838,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]] ) -> _T | None: """Get first provider that supports this camera.""" - if CameraEntityFeature.STREAM not in self.supported_features_compat: + if CameraEntityFeature.STREAM not in self.supported_features: return None return await fn(self.hass, self) @@ -911,7 +896,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def camera_capabilities(self) -> CameraCapabilities: """Return the camera capabilities.""" frontend_stream_types = set() - if CameraEntityFeature.STREAM in self.supported_features_compat: + if CameraEntityFeature.STREAM in self.supported_features: if self._supports_native_sync_webrtc or self._supports_native_async_webrtc: # The camera has a native WebRTC implementation frontend_stream_types.add(StreamType.WEB_RTC) @@ -931,8 +916,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """ super().async_write_ha_state() if self.__supports_stream != ( - supports_stream := self.supported_features_compat - & CameraEntityFeature.STREAM + supports_stream := self.supported_features & CameraEntityFeature.STREAM ): self.__supports_stream = supports_stream self._invalidate_camera_capabilities_cache() diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 32520fcad23..a3045e27cf1 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -826,26 +826,6 @@ def test_deprecated_state_constants( import_and_test_deprecated_constant_enum(caplog, module, enum, "STATE_", "2025.10") -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockCamera(camera.Camera): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockCamera() - assert entity.supported_features_compat is camera.CameraEntityFeature(1) - assert "MockCamera" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "CameraEntityFeature.ON_OFF" in caplog.text - caplog.clear() - assert entity.supported_features_compat is camera.CameraEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - @pytest.mark.usefixtures("mock_camera") async def test_entity_picture_url_changes_on_token_update(hass: HomeAssistant) -> None: """Test the token is rotated and entity entity picture cache is cleared.""" From 6c3e56748c331be8405ea88bf689fb810220f641 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:29:31 +0100 Subject: [PATCH 0358/1198] Use ast_parse_module in parallel_updates IQS rule (#132646) --- script/hassfest/quality_scale_validation/parallel_updates.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py index 918d27a3fa8..74ec55991f9 100644 --- a/script/hassfest/quality_scale_validation/parallel_updates.py +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -6,6 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/p import ast from homeassistant.const import Platform +from script.hassfest import ast_parse_module from script.hassfest.model import Integration @@ -25,7 +26,7 @@ def validate(integration: Integration) -> list[str] | None: module_file = integration.path / f"{platform}.py" if not module_file.exists(): continue - module = ast.parse(module_file.read_text()) + module = ast_parse_module(module_file) if not _has_parallel_updates_defined(module): errors.append( From eddb416f6d7961a4ad677ff2f5800fb9f61de0b9 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 9 Dec 2024 08:30:18 +0100 Subject: [PATCH 0359/1198] Remove Stookalert integration (#132569) --- .strict-typing | 1 - CODEOWNERS | 2 - .../components/stookalert/__init__.py | 29 --------- .../components/stookalert/binary_sensor.py | 57 ------------------ .../components/stookalert/config_flow.py | 33 ----------- homeassistant/components/stookalert/const.py | 24 -------- .../components/stookalert/diagnostics.py | 20 ------- .../components/stookalert/manifest.json | 10 ---- .../components/stookalert/strings.json | 14 ----- homeassistant/generated/config_flows.py | 1 - homeassistant/generated/integrations.json | 6 -- mypy.ini | 10 ---- requirements_all.txt | 3 - requirements_test_all.txt | 3 - script/hassfest/quality_scale.py | 1 - tests/components/stookalert/__init__.py | 1 - .../components/stookalert/test_config_flow.py | 59 ------------------- 17 files changed, 274 deletions(-) delete mode 100644 homeassistant/components/stookalert/__init__.py delete mode 100644 homeassistant/components/stookalert/binary_sensor.py delete mode 100644 homeassistant/components/stookalert/config_flow.py delete mode 100644 homeassistant/components/stookalert/const.py delete mode 100644 homeassistant/components/stookalert/diagnostics.py delete mode 100644 homeassistant/components/stookalert/manifest.json delete mode 100644 homeassistant/components/stookalert/strings.json delete mode 100644 tests/components/stookalert/__init__.py delete mode 100644 tests/components/stookalert/test_config_flow.py diff --git a/.strict-typing b/.strict-typing index 42f35b52153..a45be32c3c6 100644 --- a/.strict-typing +++ b/.strict-typing @@ -440,7 +440,6 @@ homeassistant.components.ssdp.* homeassistant.components.starlink.* homeassistant.components.statistics.* homeassistant.components.steamist.* -homeassistant.components.stookalert.* homeassistant.components.stookwijzer.* homeassistant.components.stream.* homeassistant.components.streamlabswater.* diff --git a/CODEOWNERS b/CODEOWNERS index 916ff63e696..782f999601f 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1422,8 +1422,6 @@ build.json @home-assistant/supervisor /homeassistant/components/steamist/ @bdraco /tests/components/steamist/ @bdraco /homeassistant/components/stiebel_eltron/ @fucm -/homeassistant/components/stookalert/ @fwestenberg @frenck -/tests/components/stookalert/ @fwestenberg @frenck /homeassistant/components/stookwijzer/ @fwestenberg /tests/components/stookwijzer/ @fwestenberg /homeassistant/components/stream/ @hunterjm @uvjustin @allenporter diff --git a/homeassistant/components/stookalert/__init__.py b/homeassistant/components/stookalert/__init__.py deleted file mode 100644 index 0ef9c7fa845..00000000000 --- a/homeassistant/components/stookalert/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -"""The Stookalert integration.""" - -from __future__ import annotations - -import stookalert - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .const import CONF_PROVINCE, DOMAIN - -PLATFORMS = [Platform.BINARY_SENSOR] - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Stookalert from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = stookalert.stookalert(entry.data[CONF_PROVINCE]) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload Stookalert config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - del hass.data[DOMAIN][entry.entry_id] - return unload_ok diff --git a/homeassistant/components/stookalert/binary_sensor.py b/homeassistant/components/stookalert/binary_sensor.py deleted file mode 100644 index a2fff52f2a3..00000000000 --- a/homeassistant/components/stookalert/binary_sensor.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Support for Stookalert Binary Sensor.""" - -from __future__ import annotations - -from datetime import timedelta - -import stookalert - -from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, - BinarySensorEntity, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import CONF_PROVINCE, DOMAIN - -SCAN_INTERVAL = timedelta(minutes=60) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Stookalert binary sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([StookalertBinarySensor(client, entry)], update_before_add=True) - - -class StookalertBinarySensor(BinarySensorEntity): - """Defines a Stookalert binary sensor.""" - - _attr_attribution = "Data provided by rivm.nl" - _attr_device_class = BinarySensorDeviceClass.SAFETY - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, client: stookalert.stookalert, entry: ConfigEntry) -> None: - """Initialize a Stookalert device.""" - self._client = client - self._attr_unique_id = entry.unique_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{entry.entry_id}")}, - name=f"Stookalert {entry.data[CONF_PROVINCE]}", - manufacturer="RIVM", - model="Stookalert", - entry_type=DeviceEntryType.SERVICE, - configuration_url="https://www.rivm.nl/stookalert", - ) - - def update(self) -> None: - """Update the data from the Stookalert handler.""" - self._client.get_alerts() - self._attr_is_on = self._client.state == 1 diff --git a/homeassistant/components/stookalert/config_flow.py b/homeassistant/components/stookalert/config_flow.py deleted file mode 100644 index 0d3bc0c1761..00000000000 --- a/homeassistant/components/stookalert/config_flow.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Config flow to configure the Stookalert integration.""" - -from __future__ import annotations - -from typing import Any - -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult - -from .const import CONF_PROVINCE, DOMAIN, PROVINCES - - -class StookalertFlowHandler(ConfigFlow, domain=DOMAIN): - """Config flow for Stookalert.""" - - VERSION = 1 - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a flow initialized by the user.""" - if user_input is not None: - await self.async_set_unique_id(user_input[CONF_PROVINCE]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=user_input[CONF_PROVINCE], data=user_input - ) - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required(CONF_PROVINCE): vol.In(PROVINCES)}), - ) diff --git a/homeassistant/components/stookalert/const.py b/homeassistant/components/stookalert/const.py deleted file mode 100644 index 9896eea212a..00000000000 --- a/homeassistant/components/stookalert/const.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Constants for the Stookalert integration.""" - -import logging -from typing import Final - -DOMAIN: Final = "stookalert" -LOGGER = logging.getLogger(__package__) - -CONF_PROVINCE: Final = "province" - -PROVINCES: Final = ( - "Drenthe", - "Flevoland", - "Friesland", - "Gelderland", - "Groningen", - "Limburg", - "Noord-Brabant", - "Noord-Holland", - "Overijssel", - "Utrecht", - "Zeeland", - "Zuid-Holland", -) diff --git a/homeassistant/components/stookalert/diagnostics.py b/homeassistant/components/stookalert/diagnostics.py deleted file mode 100644 index c15e808ae19..00000000000 --- a/homeassistant/components/stookalert/diagnostics.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Diagnostics support for Stookalert.""" - -from __future__ import annotations - -from typing import Any - -import stookalert - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - -from .const import DOMAIN - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - client: stookalert.stookalert = hass.data[DOMAIN][entry.entry_id] - return {"state": client.state} diff --git a/homeassistant/components/stookalert/manifest.json b/homeassistant/components/stookalert/manifest.json deleted file mode 100644 index 2bebc639720..00000000000 --- a/homeassistant/components/stookalert/manifest.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "domain": "stookalert", - "name": "RIVM Stookalert", - "codeowners": ["@fwestenberg", "@frenck"], - "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/stookalert", - "integration_type": "service", - "iot_class": "cloud_polling", - "requirements": ["stookalert==0.1.4"] -} diff --git a/homeassistant/components/stookalert/strings.json b/homeassistant/components/stookalert/strings.json deleted file mode 100644 index a05ae4e61e7..00000000000 --- a/homeassistant/components/stookalert/strings.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "config": { - "step": { - "user": { - "data": { - "province": "Province" - } - } - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" - } - } -} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 5cd9dd786fe..37ffc8868fd 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -574,7 +574,6 @@ FLOWS = { "starlink", "steam_online", "steamist", - "stookalert", "stookwijzer", "streamlabswater", "subaru", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 9494ab2e201..b1b52332045 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5951,12 +5951,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "stookalert": { - "name": "RIVM Stookalert", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "stookwijzer": { "name": "Stookwijzer", "integration_type": "service", diff --git a/mypy.ini b/mypy.ini index ce51adc3816..fb58810515b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -4156,16 +4156,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.stookalert.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.stookwijzer.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index da41db79e06..02e2f1f048d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2742,9 +2742,6 @@ statsd==3.2.1 # homeassistant.components.steam_online steamodd==4.21 -# homeassistant.components.stookalert -stookalert==0.1.4 - # homeassistant.components.stookwijzer stookwijzer==1.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8e10a4e9b36..85b31f9c95b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2194,9 +2194,6 @@ statsd==3.2.1 # homeassistant.components.steam_online steamodd==4.21 -# homeassistant.components.stookalert -stookalert==0.1.4 - # homeassistant.components.stookwijzer stookwijzer==1.5.1 diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index b33649427c1..b1d7e597a07 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -990,7 +990,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "steam_online", "steamist", "stiebel_eltron", - "stookalert", "stream", "streamlabswater", "subaru", diff --git a/tests/components/stookalert/__init__.py b/tests/components/stookalert/__init__.py deleted file mode 100644 index 3785c76639a..00000000000 --- a/tests/components/stookalert/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Stookalert integration.""" diff --git a/tests/components/stookalert/test_config_flow.py b/tests/components/stookalert/test_config_flow.py deleted file mode 100644 index 3664527cbcf..00000000000 --- a/tests/components/stookalert/test_config_flow.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Tests for the Stookalert config flow.""" - -from unittest.mock import patch - -from homeassistant.components.stookalert.const import CONF_PROVINCE, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_user_flow(hass: HomeAssistant) -> None: - """Test the full user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - - with patch( - "homeassistant.components.stookalert.async_setup_entry", return_value=True - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROVINCE: "Overijssel", - }, - ) - - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "Overijssel" - assert result2.get("data") == { - CONF_PROVINCE: "Overijssel", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_already_configured(hass: HomeAssistant) -> None: - """Test we abort if the Stookalert province is already configured.""" - MockConfigEntry( - domain=DOMAIN, data={CONF_PROVINCE: "Overijssel"}, unique_id="Overijssel" - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROVINCE: "Overijssel", - }, - ) - - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "already_configured" From e0bb0447828ab27bb4c6c72e49e2fd17dc781a86 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 9 Dec 2024 08:31:42 +0100 Subject: [PATCH 0360/1198] Remove not needed code check in yale_smart_alarm (#132649) --- homeassistant/components/yale_smart_alarm/lock.py | 8 +------- homeassistant/components/yale_smart_alarm/strings.json | 3 --- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/lock.py b/homeassistant/components/yale_smart_alarm/lock.py index 243299658ed..7a93baf0827 100644 --- a/homeassistant/components/yale_smart_alarm/lock.py +++ b/homeassistant/components/yale_smart_alarm/lock.py @@ -9,7 +9,7 @@ from yalesmartalarmclient import YaleLock, YaleLockState from homeassistant.components.lock import LockEntity, LockState from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry @@ -65,12 +65,6 @@ class YaleDoorlock(YaleLockEntity, LockEntity): async def async_set_lock(self, state: YaleLockState, code: str | None) -> None: """Set lock.""" - if state is YaleLockState.UNLOCKED and not code: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="no_code", - ) - lock_state = False try: if state is YaleLockState.LOCKED: diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index 7f940e1139e..bd3ba0f0186 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -88,9 +88,6 @@ "set_lock": { "message": "Could not set lock for {name}: {error}" }, - "no_code": { - "message": "Can not unlock without code" - }, "could_not_change_lock": { "message": "Could not set lock, check system ready for lock" }, From f7ce11265399be456eab08fe7225cad6d21afc8f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:32:30 +0100 Subject: [PATCH 0361/1198] Remove deprecated supported features warning in Remote (#132643) --- homeassistant/components/remote/__init__.py | 15 +------------- tests/components/remote/test_init.py | 22 --------------------- 2 files changed, 1 insertion(+), 36 deletions(-) diff --git a/homeassistant/components/remote/__init__.py b/homeassistant/components/remote/__init__.py index 9c54a40be70..36e482f0a29 100644 --- a/homeassistant/components/remote/__init__.py +++ b/homeassistant/components/remote/__init__.py @@ -170,19 +170,6 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> RemoteEntityFeature: - """Return the supported features as RemoteEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = RemoteEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - @cached_property def current_activity(self) -> str | None: """Active activity.""" @@ -197,7 +184,7 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) @property def state_attributes(self) -> dict[str, Any] | None: """Return optional state attributes.""" - if RemoteEntityFeature.ACTIVITY not in self.supported_features_compat: + if RemoteEntityFeature.ACTIVITY not in self.supported_features: return None return { diff --git a/tests/components/remote/test_init.py b/tests/components/remote/test_init.py index 27219788906..51728d02ef3 100644 --- a/tests/components/remote/test_init.py +++ b/tests/components/remote/test_init.py @@ -1,7 +1,5 @@ """The tests for the Remote component, adapted from Light Test.""" -import pytest - from homeassistant.components import remote from homeassistant.components.remote import ( ATTR_ALTERNATIVE, @@ -142,23 +140,3 @@ async def test_delete_command(hass: HomeAssistant) -> None: assert call.domain == remote.DOMAIN assert call.service == SERVICE_DELETE_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_ID - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockRemote(remote.RemoteEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockRemote() - assert entity.supported_features_compat is remote.RemoteEntityFeature(1) - assert "MockRemote" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "RemoteEntityFeature.LEARN_COMMAND" in caplog.text - caplog.clear() - assert entity.supported_features_compat is remote.RemoteEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text From 9ef9f2fafb86631c2222bfc571c7e67b73f9fcda Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:32:49 +0100 Subject: [PATCH 0362/1198] Remove deprecated supported features warning in Humidifier (#132641) --- .../components/humidifier/__init__.py | 17 ++----------- tests/components/humidifier/test_init.py | 25 ------------------- 2 files changed, 2 insertions(+), 40 deletions(-) diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py index 1498c4f6e3d..8c892dca327 100644 --- a/homeassistant/components/humidifier/__init__.py +++ b/homeassistant/components/humidifier/__init__.py @@ -170,7 +170,7 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT ATTR_MAX_HUMIDITY: self.max_humidity, } - if HumidifierEntityFeature.MODES in self.supported_features_compat: + if HumidifierEntityFeature.MODES in self.supported_features: data[ATTR_AVAILABLE_MODES] = self.available_modes return data @@ -199,7 +199,7 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT if self.target_humidity is not None: data[ATTR_HUMIDITY] = self.target_humidity - if HumidifierEntityFeature.MODES in self.supported_features_compat: + if HumidifierEntityFeature.MODES in self.supported_features: data[ATTR_MODE] = self.mode return data @@ -266,19 +266,6 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT """Return the list of supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> HumidifierEntityFeature: - """Return the supported features as HumidifierEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = HumidifierEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - async def async_service_humidity_set( entity: HumidifierEntity, service_call: ServiceCall diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index 9c10d5e39e1..ce54863736b 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -6,7 +6,6 @@ import pytest from homeassistant.components.humidifier import ( ATTR_HUMIDITY, - ATTR_MODE, DOMAIN as HUMIDIFIER_DOMAIN, MODE_ECO, MODE_NORMAL, @@ -51,30 +50,6 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockHumidifierEntity(HumidifierEntity): - _attr_mode = "mode1" - - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockHumidifierEntity() - assert entity.supported_features_compat is HumidifierEntityFeature(1) - assert "MockHumidifierEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "HumidifierEntityFeature.MODES" in caplog.text - caplog.clear() - assert entity.supported_features_compat is HumidifierEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - assert entity.state_attributes[ATTR_MODE] == "mode1" - - async def test_humidity_validation( hass: HomeAssistant, register_test_integration: MockConfigEntry, From 1ec91e7c8968cbb3100ad2094e070b410692d73d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 08:45:36 +0100 Subject: [PATCH 0363/1198] Remove deprecated supported features warning in Lock (#132642) --- homeassistant/components/lock/__init__.py | 7 +------ tests/components/lock/test_init.py | 17 ----------------- 2 files changed, 1 insertion(+), 23 deletions(-) diff --git a/homeassistant/components/lock/__init__.py b/homeassistant/components/lock/__init__.py index 9363d388637..39d5d3c350d 100644 --- a/homeassistant/components/lock/__init__.py +++ b/homeassistant/components/lock/__init__.py @@ -285,12 +285,7 @@ class LockEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @cached_property def supported_features(self) -> LockEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = LockEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_supported_features async def async_internal_added_to_hass(self) -> None: """Call when the sensor entity is added to hass.""" diff --git a/tests/components/lock/test_init.py b/tests/components/lock/test_init.py index a1fed9fe7e1..68af8c7d482 100644 --- a/tests/components/lock/test_init.py +++ b/tests/components/lock/test_init.py @@ -417,20 +417,3 @@ def test_deprecated_constants( import_and_test_deprecated_constant_enum( caplog, lock, enum, constant_prefix, remove_in_version ) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockLockEntity(lock.LockEntity): - _attr_supported_features = 1 - - entity = MockLockEntity() - assert entity.supported_features is lock.LockEntityFeature(1) - assert "MockLockEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "LockEntityFeature.OPEN" in caplog.text - caplog.clear() - assert entity.supported_features is lock.LockEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text From 24b1eeb900e064481b72eadcd1cbaea49b9412fa Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:15:01 +0100 Subject: [PATCH 0364/1198] Remove YAML support from vizio (#132351) --- homeassistant/components/vizio/__init__.py | 43 +-- homeassistant/components/vizio/config_flow.py | 94 +---- homeassistant/components/vizio/const.py | 48 --- tests/components/vizio/const.py | 25 -- tests/components/vizio/test_config_flow.py | 331 +----------------- tests/components/vizio/test_init.py | 11 - tests/components/vizio/test_media_player.py | 24 +- 7 files changed, 11 insertions(+), 565 deletions(-) diff --git a/homeassistant/components/vizio/__init__.py b/homeassistant/components/vizio/__init__.py index 09d6f3be090..4af42d76b62 100644 --- a/homeassistant/components/vizio/__init__.py +++ b/homeassistant/components/vizio/__init__.py @@ -4,55 +4,18 @@ from __future__ import annotations from typing import Any -import voluptuous as vol - from homeassistant.components.media_player import MediaPlayerDeviceClass -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry, ConfigEntryState -from homeassistant.const import Platform +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_DEVICE_CLASS, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType -from .const import CONF_APPS, CONF_DEVICE_CLASS, DOMAIN, VIZIO_SCHEMA +from .const import CONF_APPS, DOMAIN from .coordinator import VizioAppsDataUpdateCoordinator - -def validate_apps(config: ConfigType) -> ConfigType: - """Validate CONF_APPS is only used when CONF_DEVICE_CLASS is MediaPlayerDeviceClass.TV.""" - if ( - config.get(CONF_APPS) is not None - and config[CONF_DEVICE_CLASS] != MediaPlayerDeviceClass.TV - ): - raise vol.Invalid( - f"'{CONF_APPS}' can only be used if {CONF_DEVICE_CLASS}' is" - f" '{MediaPlayerDeviceClass.TV}'" - ) - - return config - - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.All(cv.ensure_list, [vol.All(VIZIO_SCHEMA, validate_apps)])}, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = [Platform.MEDIA_PLAYER] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Component setup, run import config flow for each entry in config.""" - if DOMAIN in config: - for entry in config[DOMAIN]: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=entry - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load the saved entities.""" diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index 54031930503..d3921061d8e 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -14,8 +14,6 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.components.media_player import MediaPlayerDeviceClass from homeassistant.config_entries import ( - SOURCE_IGNORE, - SOURCE_IMPORT, SOURCE_ZEROCONF, ConfigEntry, ConfigFlow, @@ -231,7 +229,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "existing_config_entry_found" if not errors: - if self._must_show_form and self.source == SOURCE_ZEROCONF: + if self._must_show_form and self.context["source"] == SOURCE_ZEROCONF: # Discovery should always display the config form before trying to # create entry so that user can update default config options self._must_show_form = False @@ -251,98 +249,13 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: return await self._create_entry(user_input) - elif self._must_show_form and self.source == SOURCE_IMPORT: - # Import should always display the config form if CONF_ACCESS_TOKEN - # wasn't included but is needed so that the user can choose to update - # their configuration.yaml or to proceed with config flow pairing. We - # will also provide contextual message to user explaining why - _LOGGER.warning( - ( - "Couldn't complete configuration.yaml import: '%s' key is " - "missing. Either provide '%s' key in configuration.yaml or " - "finish setup by completing configuration via frontend" - ), - CONF_ACCESS_TOKEN, - CONF_ACCESS_TOKEN, - ) - self._must_show_form = False else: self._data = copy.deepcopy(user_input) return await self.async_step_pair_tv() schema = self._user_schema or _get_config_schema() - - if errors and self.source == SOURCE_IMPORT: - # Log an error message if import config flow fails since otherwise failure is silent - _LOGGER.error( - "Importing from configuration.yaml failed: %s", - ", ".join(errors.values()), - ) - return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - # Check if new config entry matches any existing config entries - for entry in self._async_current_entries(): - # If source is ignore bypass host check and continue through loop - if entry.source == SOURCE_IGNORE: - continue - - if await self.hass.async_add_executor_job( - _host_is_same, entry.data[CONF_HOST], import_data[CONF_HOST] - ): - updated_options: dict[str, Any] = {} - updated_data: dict[str, Any] = {} - remove_apps = False - - if entry.data[CONF_HOST] != import_data[CONF_HOST]: - updated_data[CONF_HOST] = import_data[CONF_HOST] - - if entry.data[CONF_NAME] != import_data[CONF_NAME]: - updated_data[CONF_NAME] = import_data[CONF_NAME] - - # Update entry.data[CONF_APPS] if import_config[CONF_APPS] differs, and - # pop entry.data[CONF_APPS] if import_config[CONF_APPS] is not specified - if entry.data.get(CONF_APPS) != import_data.get(CONF_APPS): - if not import_data.get(CONF_APPS): - remove_apps = True - else: - updated_options[CONF_APPS] = import_data[CONF_APPS] - - if entry.data.get(CONF_VOLUME_STEP) != import_data[CONF_VOLUME_STEP]: - updated_options[CONF_VOLUME_STEP] = import_data[CONF_VOLUME_STEP] - - if updated_options or updated_data or remove_apps: - new_data = entry.data.copy() - new_options = entry.options.copy() - - if remove_apps: - new_data.pop(CONF_APPS) - new_options.pop(CONF_APPS) - - if updated_data: - new_data.update(updated_data) - - # options are stored in entry options and data so update both - if updated_options: - new_data.update(updated_options) - new_options.update(updated_options) - - self.hass.config_entries.async_update_entry( - entry=entry, data=new_data, options=new_options - ) - return self.async_abort(reason="updated_entry") - - return self.async_abort(reason="already_configured_device") - - self._must_show_form = True - # Store config key/value pairs that are not configurable in user step so they - # don't get lost on user step - if import_data.get(CONF_APPS): - self._apps = copy.deepcopy(import_data[CONF_APPS]) - return await self.async_step_user(user_input=import_data) - async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: @@ -433,11 +346,6 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): if pair_data: self._data[CONF_ACCESS_TOKEN] = pair_data.auth_token self._must_show_form = True - - if self.source == SOURCE_IMPORT: - # If user is pairing via config import, show different message - return await self.async_step_pairing_complete_import() - return await self.async_step_pairing_complete() # If no data was retrieved, it's assumed that the pairing attempt was not diff --git a/homeassistant/components/vizio/const.py b/homeassistant/components/vizio/const.py index 4eb96256d2e..8451ae747de 100644 --- a/homeassistant/components/vizio/const.py +++ b/homeassistant/components/vizio/const.py @@ -10,14 +10,6 @@ from homeassistant.components.media_player import ( MediaPlayerDeviceClass, MediaPlayerEntityFeature, ) -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_DEVICE_CLASS, - CONF_EXCLUDE, - CONF_HOST, - CONF_INCLUDE, - CONF_NAME, -) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import VolDictType @@ -84,43 +76,3 @@ VIZIO_DEVICE_CLASSES = { MediaPlayerDeviceClass.SPEAKER: VIZIO_DEVICE_CLASS_SPEAKER, MediaPlayerDeviceClass.TV: VIZIO_DEVICE_CLASS_TV, } - -VIZIO_SCHEMA = { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_ACCESS_TOKEN): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.All( - cv.string, - vol.Lower, - vol.In([MediaPlayerDeviceClass.TV, MediaPlayerDeviceClass.SPEAKER]), - ), - vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): vol.All( - vol.Coerce(int), vol.Range(min=1, max=10) - ), - vol.Optional(CONF_APPS): vol.All( - { - vol.Exclusive(CONF_INCLUDE, "apps_filter"): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Exclusive(CONF_EXCLUDE, "apps_filter"): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_ADDITIONAL_CONFIGS): vol.All( - cv.ensure_list, - [ - { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_CONFIG): { - vol.Required(CONF_APP_ID): cv.string, - vol.Required(CONF_NAME_SPACE): vol.Coerce(int), - vol.Optional(CONF_MESSAGE, default=None): vol.Or( - cv.string, None - ), - }, - }, - ], - ), - }, - cv.has_at_least_one_key(CONF_INCLUDE, CONF_EXCLUDE, CONF_ADDITIONAL_CONFIGS), - ), -} diff --git a/tests/components/vizio/const.py b/tests/components/vizio/const.py index 3e7b0c83c70..51151ae8f42 100644 --- a/tests/components/vizio/const.py +++ b/tests/components/vizio/const.py @@ -112,14 +112,6 @@ MOCK_OPTIONS = { CONF_VOLUME_STEP: VOLUME_STEP, } -MOCK_IMPORT_VALID_TV_CONFIG = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.TV, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, -} - MOCK_TV_WITH_INCLUDE_CONFIG = { CONF_NAME: NAME, CONF_HOST: HOST, @@ -147,23 +139,6 @@ MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG = { CONF_APPS: {CONF_ADDITIONAL_CONFIGS: [ADDITIONAL_APP_CONFIG]}, } -MOCK_SPEAKER_APPS_FAILURE = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.SPEAKER, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, - CONF_APPS: {CONF_ADDITIONAL_CONFIGS: [ADDITIONAL_APP_CONFIG]}, -} - -MOCK_TV_APPS_FAILURE = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.TV, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, - CONF_APPS: None, -} MOCK_TV_APPS_WITH_VALID_APPS_CONFIG = { CONF_HOST: HOST, diff --git a/tests/components/vizio/test_config_flow.py b/tests/components/vizio/test_config_flow.py index 42d4394ca80..2ef7c18bd04 100644 --- a/tests/components/vizio/test_config_flow.py +++ b/tests/components/vizio/test_config_flow.py @@ -3,30 +3,20 @@ import dataclasses import pytest -import voluptuous as vol from homeassistant.components.media_player import MediaPlayerDeviceClass -from homeassistant.components.vizio.config_flow import _get_config_schema from homeassistant.components.vizio.const import ( CONF_APPS, CONF_APPS_TO_INCLUDE_OR_EXCLUDE, - CONF_INCLUDE, CONF_VOLUME_STEP, - DEFAULT_NAME, - DEFAULT_VOLUME_STEP, DOMAIN, - VIZIO_SCHEMA, -) -from homeassistant.config_entries import ( - SOURCE_IGNORE, - SOURCE_IMPORT, - SOURCE_USER, - SOURCE_ZEROCONF, ) +from homeassistant.config_entries import SOURCE_IGNORE, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_DEVICE_CLASS, CONF_HOST, + CONF_INCLUDE, CONF_NAME, CONF_PIN, ) @@ -38,14 +28,11 @@ from .const import ( CURRENT_APP, HOST, HOST2, - MOCK_IMPORT_VALID_TV_CONFIG, MOCK_INCLUDE_APPS, MOCK_INCLUDE_NO_APPS, MOCK_PIN_CONFIG, MOCK_SPEAKER_CONFIG, MOCK_TV_CONFIG_NO_TOKEN, - MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG, - MOCK_TV_WITH_EXCLUDE_CONFIG, MOCK_USER_VALID_TV_CONFIG, MOCK_ZEROCONF_SERVICE_INFO, NAME, @@ -370,297 +357,6 @@ async def test_user_ignore(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: - """Test import config flow with minimum fields.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)( - {CONF_HOST: HOST, CONF_DEVICE_CLASS: MediaPlayerDeviceClass.SPEAKER} - ), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"][CONF_NAME] == DEFAULT_NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER - assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_all_fields(hass: HomeAssistant) -> None: - """Test import config flow with all fields.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - assert result["data"][CONF_ACCESS_TOKEN] == ACCESS_TOKEN - assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_entity_already_configured(hass: HomeAssistant) -> None: - """Test entity is already configured during import setup.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - options={CONF_VOLUME_STEP: VOLUME_STEP}, - ) - entry.add_to_hass(hass) - fail_entry = vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG.copy()) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=fail_entry - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured_device" - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_options(hass: HomeAssistant) -> None: - """Test import config flow with updated options.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].options == {CONF_VOLUME_STEP: DEFAULT_VOLUME_STEP} - assert result["type"] is FlowResultType.CREATE_ENTRY - entry_id = result["result"].entry_id - - updated_config = MOCK_SPEAKER_CONFIG.copy() - updated_config[CONF_VOLUME_STEP] = VOLUME_STEP + 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - config_entry = hass.config_entries.async_get_entry(entry_id) - assert config_entry.options[CONF_VOLUME_STEP] == VOLUME_STEP + 1 - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: - """Test import config flow with updated name and apps.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - entry_id = result["result"].entry_id - - updated_config = MOCK_IMPORT_VALID_TV_CONFIG.copy() - updated_config[CONF_NAME] = NAME2 - updated_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - config_entry = hass.config_entries.async_get_entry(entry_id) - assert config_entry.data[CONF_NAME] == NAME2 - assert config_entry.data[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} - assert config_entry.options[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: - """Test import config flow with removed apps.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_TV_WITH_EXCLUDE_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - config_entry = hass.config_entries.async_get_entry(result["result"].entry_id) - assert CONF_APPS in config_entry.data - assert CONF_APPS in config_entry.options - - updated_config = MOCK_TV_WITH_EXCLUDE_CONFIG.copy() - updated_config.pop(CONF_APPS) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - assert CONF_APPS not in config_entry.data - assert CONF_APPS not in config_entry.options - - -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_needs_pairing(hass: HomeAssistant) -> None: - """Test pairing config flow when access token not provided for tv during import.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_TV_CONFIG_NO_TOKEN - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_TV_CONFIG_NO_TOKEN - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair_tv" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_PIN_CONFIG - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pairing_complete_import" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - - -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: - """Test pairing config flow when access token not provided for tv but apps are included during import.""" - import_config = MOCK_TV_CONFIG_NO_TOKEN.copy() - import_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=import_config - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Mock inputting info without apps to make sure apps get stored - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=_get_config_schema(MOCK_TV_CONFIG_NO_TOKEN)(MOCK_TV_CONFIG_NO_TOKEN), - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair_tv" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_PIN_CONFIG - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pairing_complete_import" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - assert result["data"][CONF_APPS][CONF_INCLUDE] == [CURRENT_APP] - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: - """Test import config flow with additional configs defined in CONF_APPS.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - config_entry = hass.config_entries.async_get_entry(result["result"].entry_id) - assert CONF_APPS in config_entry.data - assert CONF_APPS not in config_entry.options - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that error is logged when import config has an error.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - options={CONF_VOLUME_STEP: VOLUME_STEP}, - unique_id=UNIQUE_ID, - ) - entry.add_to_hass(hass) - fail_entry = MOCK_SPEAKER_CONFIG.copy() - fail_entry[CONF_HOST] = "0.0.0.0" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(fail_entry), - ) - - assert result["type"] is FlowResultType.FORM - - # Ensure error gets logged - vizio_log_list = [ - log - for log in caplog.records - if log.name == "homeassistant.components.vizio.config_flow" - ] - assert len(vizio_log_list) == 1 - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_ignore(hass: HomeAssistant) -> None: - """Test import config flow doesn't throw an error when there's an existing ignored source.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_SPEAKER_CONFIG, - options={CONF_VOLUME_STEP: VOLUME_STEP}, - source=SOURCE_IGNORE, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - @pytest.mark.usefixtures( "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" ) @@ -854,26 +550,3 @@ async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> # Flow should abort because device is already setup assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup", "vizio_hostname_check") -async def test_import_flow_already_configured_hostname(hass: HomeAssistant) -> None: - """Test entity is already configured during import setup when existing entry uses hostname.""" - config = MOCK_SPEAKER_CONFIG.copy() - config[CONF_HOST] = "hostname" - entry = MockConfigEntry( - domain=DOMAIN, data=config, options={CONF_VOLUME_STEP: VOLUME_STEP} - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - - # Flow should abort because device was updated - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - - assert entry.data[CONF_HOST] == HOST diff --git a/tests/components/vizio/test_init.py b/tests/components/vizio/test_init.py index c2b19377809..e004255ec6d 100644 --- a/tests/components/vizio/test_init.py +++ b/tests/components/vizio/test_init.py @@ -7,7 +7,6 @@ import pytest from homeassistant.components.vizio.const import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID @@ -15,16 +14,6 @@ from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID from tests.common import MockConfigEntry, async_fire_time_changed -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_setup_component(hass: HomeAssistant) -> None: - """Test component setup.""" - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG} - ) - await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(Platform.MEDIA_PLAYER)) == 1 - - @pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_tv_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading TV entry.""" diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index 12e19077c8e..a76dfa3fa2d 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -19,7 +19,6 @@ from pyvizio.const import ( MAX_VOLUME, UNKNOWN_APP, ) -import voluptuous as vol from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, @@ -42,7 +41,6 @@ from homeassistant.components.media_player import ( SERVICE_VOLUME_UP, MediaPlayerDeviceClass, ) -from homeassistant.components.vizio import validate_apps from homeassistant.components.vizio.const import ( CONF_ADDITIONAL_CONFIGS, CONF_APPS, @@ -50,7 +48,6 @@ from homeassistant.components.vizio.const import ( DEFAULT_VOLUME_STEP, DOMAIN, SERVICE_UPDATE_SETTING, - VIZIO_SCHEMA, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -69,9 +66,7 @@ from .const import ( EQ_LIST, INPUT_LIST, INPUT_LIST_WITH_APPS, - MOCK_SPEAKER_APPS_FAILURE, MOCK_SPEAKER_CONFIG, - MOCK_TV_APPS_FAILURE, MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG, MOCK_TV_WITH_EXCLUDE_CONFIG, MOCK_TV_WITH_INCLUDE_CONFIG, @@ -155,7 +150,7 @@ async def _test_setup_tv(hass: HomeAssistant, vizio_power_state: bool | None) -> config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) @@ -181,7 +176,7 @@ async def _test_setup_speaker( config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), + data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID, ) @@ -215,7 +210,7 @@ async def _cm_for_test_setup_tv_with_apps( ) -> AsyncIterator[None]: """Context manager to setup test for Vizio TV with support for apps.""" config_entry = MockConfigEntry( - domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID + domain=DOMAIN, data=device_config, unique_id=UNIQUE_ID ) async with _cm_for_test_setup_without_apps( @@ -641,15 +636,6 @@ async def test_setup_with_apps_additional_apps_config( assert not service_call2.called -def test_invalid_apps_config(hass: HomeAssistant) -> None: - """Test that schema validation fails on certain conditions.""" - with pytest.raises(vol.Invalid): - vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_TV_APPS_FAILURE) - - with pytest.raises(vol.Invalid): - vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE) - - @pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_unknown_app_config( hass: HomeAssistant, @@ -687,7 +673,7 @@ async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when mute property isn't returned by Vizio API.""" config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) @@ -742,7 +728,7 @@ async def test_vizio_update_with_apps_on_input(hass: HomeAssistant) -> None: """Test a vizio TV with apps that is on a TV input.""" config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) await _add_config_entry_to_hass(hass, config_entry) From 427db020298ee112e64704401ee0f635f5f4490a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:16:48 +0100 Subject: [PATCH 0365/1198] Remove deprecated supported features warning in AlarmControlPanel (#132665) --- .../alarm_control_panel/__init__.py | 7 +----- .../alarm_control_panel/test_init.py | 23 ------------------- 2 files changed, 1 insertion(+), 29 deletions(-) diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index 5bb00360177..4c5e201df8f 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -355,12 +355,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A @cached_property def supported_features(self) -> AlarmControlPanelEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = AlarmControlPanelEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_supported_features @final @property diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 84d27a96db2..168d7ecc269 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -54,29 +54,6 @@ async def help_test_async_alarm_control_panel_service( await hass.async_block_till_done() -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockAlarmControlPanelEntity(alarm_control_panel.AlarmControlPanelEntity): - _attr_supported_features = 1 - - entity = MockAlarmControlPanelEntity() - assert ( - entity.supported_features - is alarm_control_panel.AlarmControlPanelEntityFeature(1) - ) - assert "MockAlarmControlPanelEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "AlarmControlPanelEntityFeature.ARM_HOME" in caplog.text - caplog.clear() - assert ( - entity.supported_features - is alarm_control_panel.AlarmControlPanelEntityFeature(1) - ) - assert "is using deprecated supported features values" not in caplog.text - - async def test_set_mock_alarm_control_panel_options( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 31150bf897ffbdbb15cf419ffee5f0eebf9ca119 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:19:07 +0100 Subject: [PATCH 0366/1198] Remove deprecated supported features warning in Siren (#132666) --- homeassistant/components/siren/__init__.py | 7 +------ tests/components/siren/test_init.py | 18 ------------------ 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/homeassistant/components/siren/__init__.py b/homeassistant/components/siren/__init__.py index 8fab0dfe96d..9ce6898fd93 100644 --- a/homeassistant/components/siren/__init__.py +++ b/homeassistant/components/siren/__init__.py @@ -191,9 +191,4 @@ class SirenEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @cached_property def supported_features(self) -> SirenEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = SirenEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_supported_features diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 68a4eb03998..b78d25366fa 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock import pytest -from homeassistant.components import siren from homeassistant.components.siren import ( SirenEntity, SirenEntityDescription, @@ -106,20 +105,3 @@ async def test_missing_tones_dict(hass: HomeAssistant) -> None: siren.hass = hass with pytest.raises(ValueError): process_turn_on_params(siren, {"tone": 3}) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockSirenEntity(siren.SirenEntity): - _attr_supported_features = 1 - - entity = MockSirenEntity() - assert entity.supported_features is siren.SirenEntityFeature(1) - assert "MockSirenEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "SirenEntityFeature.TURN_ON" in caplog.text - caplog.clear() - assert entity.supported_features is siren.SirenEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text From 57d5d7d2f2436282b936b40260c309502c954697 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:47:38 +0100 Subject: [PATCH 0367/1198] Remove deprecated supported features warning in Vacuum (#132670) --- homeassistant/components/vacuum/__init__.py | 17 ++-------- tests/components/vacuum/test_init.py | 36 --------------------- 2 files changed, 2 insertions(+), 51 deletions(-) diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index 6fe2c3e2a5b..46e35bb3e11 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -312,7 +312,7 @@ class StateVacuumEntity( @property def capability_attributes(self) -> dict[str, Any] | None: """Return capability attributes.""" - if VacuumEntityFeature.FAN_SPEED in self.supported_features_compat: + if VacuumEntityFeature.FAN_SPEED in self.supported_features: return {ATTR_FAN_SPEED_LIST: self.fan_speed_list} return None @@ -330,7 +330,7 @@ class StateVacuumEntity( def state_attributes(self) -> dict[str, Any]: """Return the state attributes of the vacuum cleaner.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features if VacuumEntityFeature.BATTERY in supported_features: data[ATTR_BATTERY_LEVEL] = self.battery_level @@ -369,19 +369,6 @@ class StateVacuumEntity( """Flag vacuum cleaner features that are supported.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> VacuumEntityFeature: - """Return the supported features as VacuumEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = VacuumEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - def stop(self, **kwargs: Any) -> None: """Stop the vacuum cleaner.""" raise NotImplementedError diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index 8babd9fa265..db6cd242f3f 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -272,42 +272,6 @@ async def test_send_command(hass: HomeAssistant, config_flow_fixture: None) -> N assert "test" in strings -async def test_supported_features_compat(hass: HomeAssistant) -> None: - """Test StateVacuumEntity using deprecated feature constants features.""" - - features = ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.FAN_SPEED - | VacuumEntityFeature.START - | VacuumEntityFeature.STOP - | VacuumEntityFeature.PAUSE - ) - - class _LegacyConstantsStateVacuum(StateVacuumEntity): - _attr_supported_features = int(features) - _attr_fan_speed_list = ["silent", "normal", "pet hair"] - - entity = _LegacyConstantsStateVacuum() - assert isinstance(entity.supported_features, int) - assert entity.supported_features == int(features) - assert entity.supported_features_compat is ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.FAN_SPEED - | VacuumEntityFeature.START - | VacuumEntityFeature.STOP - | VacuumEntityFeature.PAUSE - ) - assert entity.state_attributes == { - "battery_level": None, - "battery_icon": "mdi:battery-unknown", - "fan_speed": None, - } - assert entity.capability_attributes == { - "fan_speed_list": ["silent", "normal", "pet hair"] - } - assert entity._deprecated_supported_features_reported - - async def test_vacuum_not_log_deprecated_state_warning( hass: HomeAssistant, mock_vacuum_entity: MockVacuum, From 5e8012f3f5617919ff941e9439c2a7c96dd018f2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:48:40 +0100 Subject: [PATCH 0368/1198] Remove deprecated supported features warning in WaterHeater (#132668) --- .../components/water_heater/__init__.py | 17 ++---------- tests/components/water_heater/test_init.py | 27 ------------------- 2 files changed, 2 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index dbd697f2367..43a9364e59d 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -194,7 +194,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ), } - if WaterHeaterEntityFeature.OPERATION_MODE in self.supported_features_compat: + if WaterHeaterEntityFeature.OPERATION_MODE in self.supported_features: data[ATTR_OPERATION_LIST] = self.operation_list return data @@ -230,7 +230,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ), } - supported_features = self.supported_features_compat + supported_features = self.supported_features if WaterHeaterEntityFeature.OPERATION_MODE in supported_features: data[ATTR_OPERATION_MODE] = self.current_operation @@ -379,19 +379,6 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the list of supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> WaterHeaterEntityFeature: - """Return the supported features as WaterHeaterEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = WaterHeaterEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - async def async_service_away_mode( entity: WaterHeaterEntity, service: ServiceCall diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 0c5651058ed..78efd94ef8e 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -9,8 +9,6 @@ import pytest import voluptuous as vol from homeassistant.components.water_heater import ( - ATTR_OPERATION_LIST, - ATTR_OPERATION_MODE, DOMAIN, SERVICE_SET_OPERATION_MODE, SET_TEMPERATURE_SCHEMA, @@ -206,28 +204,3 @@ async def test_operation_mode_validation( ) await hass.async_block_till_done() water_heater_entity.set_operation_mode.assert_has_calls([mock.call("eco")]) - - -def test_deprecated_supported_features_ints( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test deprecated supported features ints.""" - - class MockWaterHeaterEntity(WaterHeaterEntity): - _attr_operation_list = ["mode1", "mode2"] - _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_current_operation = "mode1" - _attr_supported_features = WaterHeaterEntityFeature.OPERATION_MODE.value - - entity = MockWaterHeaterEntity() - entity.hass = hass - assert entity.supported_features_compat is WaterHeaterEntityFeature(2) - assert "MockWaterHeaterEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "WaterHeaterEntityFeature.OPERATION_MODE" in caplog.text - caplog.clear() - assert entity.supported_features_compat is WaterHeaterEntityFeature(2) - assert "is using deprecated supported features values" not in caplog.text - assert entity.state_attributes[ATTR_OPERATION_MODE] == "mode1" - assert entity.capability_attributes[ATTR_OPERATION_LIST] == ["mode1", "mode2"] From ee8f7202536b05cbf6a8318299da14fe1ef51ba7 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 9 Dec 2024 10:50:37 +0100 Subject: [PATCH 0369/1198] Add tip connected detection to IronOS (#131946) * Add binary platform and tip connected detection to IronOS * suggested changes * fix * fix mypy * revert accidental overwriting * Remove binary sensor * snapshot --- .../components/iron_os/coordinator.py | 11 ++++++ homeassistant/components/iron_os/sensor.py | 36 +++++++++++-------- tests/components/iron_os/test_sensor.py | 35 ++++++++++++++++-- 3 files changed, 65 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index 690dd6f1893..82c7c3b99cd 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -92,6 +92,17 @@ class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): except CommunicationError as e: raise UpdateFailed("Cannot connect to device") from e + @property + def has_tip(self) -> bool: + """Return True if the tip is connected.""" + if ( + self.data.max_tip_temp_ability is not None + and self.data.live_temp is not None + ): + threshold = self.data.max_tip_temp_ability - 5 + return self.data.live_temp <= threshold + return False + class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]): """IronOS coordinator for retrieving update information from github.""" diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index 34f0f6af6b2..d178b46723f 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -28,6 +28,7 @@ from homeassistant.helpers.typing import StateType from . import IronOSConfigEntry from .const import OHM +from .coordinator import IronOSLiveDataCoordinator from .entity import IronOSBaseEntity # Coordinator is used to centralize the data updates @@ -57,7 +58,7 @@ class PinecilSensor(StrEnum): class IronOSSensorEntityDescription(SensorEntityDescription): """IronOS sensor entity descriptions.""" - value_fn: Callable[[LiveDataResponse], StateType] + value_fn: Callable[[LiveDataResponse, bool], StateType] PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( @@ -67,7 +68,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.live_temp, + value_fn=lambda data, has_tip: data.live_temp if has_tip else None, ), IronOSSensorEntityDescription( key=PinecilSensor.DC_VOLTAGE, @@ -75,7 +76,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.dc_voltage, + value_fn=lambda data, _: data.dc_voltage, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -84,7 +85,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.handle_temp, + value_fn=lambda data, _: data.handle_temp, ), IronOSSensorEntityDescription( key=PinecilSensor.PWMLEVEL, @@ -93,7 +94,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( suggested_display_precision=0, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.pwm_level, + value_fn=lambda data, _: data.pwm_level, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -101,14 +102,16 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.POWER_SRC, device_class=SensorDeviceClass.ENUM, options=[item.name.lower() for item in PowerSource], - value_fn=lambda data: data.power_src.name.lower() if data.power_src else None, + value_fn=( + lambda data, _: data.power_src.name.lower() if data.power_src else None + ), entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( key=PinecilSensor.TIP_RESISTANCE, translation_key=PinecilSensor.TIP_RESISTANCE, native_unit_of_measurement=OHM, - value_fn=lambda data: data.tip_resistance, + value_fn=lambda data, has_tip: data.tip_resistance if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), @@ -118,7 +121,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda data: data.uptime, + value_fn=lambda data, _: data.uptime, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -127,7 +130,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.movement_time, + value_fn=lambda data, _: data.movement_time, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -135,7 +138,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.MAX_TIP_TEMP_ABILITY, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, - value_fn=lambda data: data.max_tip_temp_ability, + value_fn=lambda data, has_tip: data.max_tip_temp_ability if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -145,7 +148,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=0, - value_fn=lambda data: data.tip_voltage, + value_fn=lambda data, has_tip: data.tip_voltage if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -153,7 +156,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.HALL_SENSOR, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, - value_fn=lambda data: data.hall_sensor, + value_fn=lambda data, _: data.hall_sensor, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -162,7 +165,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.ENUM, options=[item.name.lower() for item in OperatingMode], value_fn=( - lambda data: data.operating_mode.name.lower() + lambda data, _: data.operating_mode.name.lower() if data.operating_mode else None ), @@ -173,7 +176,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.estimated_power, + value_fn=lambda data, _: data.estimated_power, ), ) @@ -196,8 +199,11 @@ class IronOSSensorEntity(IronOSBaseEntity, SensorEntity): """Representation of a IronOS sensor entity.""" entity_description: IronOSSensorEntityDescription + coordinator: IronOSLiveDataCoordinator @property def native_value(self) -> StateType: """Return sensor state.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn( + self.coordinator.data, self.coordinator.has_tip + ) diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py index 2f79487a7fd..fec111c5799 100644 --- a/tests/components/iron_os/test_sensor.py +++ b/tests/components/iron_os/test_sensor.py @@ -4,13 +4,13 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory -from pynecil import CommunicationError +from pynecil import CommunicationError, LiveDataResponse import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.iron_os.coordinator import SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -71,3 +71,34 @@ async def test_sensors_unavailable( ) for entity_entry in entity_entries: assert hass.states.get(entity_entry.entity_id).state == STATE_UNAVAILABLE + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "ble_device", "mock_pynecil" +) +async def test_tip_detection( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, +) -> None: + """Test sensor state is unknown when tip is disconnected.""" + + mock_pynecil.get_live_data.return_value = LiveDataResponse( + live_temp=479, + max_tip_temp_ability=460, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + entities = { + "sensor.pinecil_tip_temperature", + "sensor.pinecil_max_tip_temperature", + "sensor.pinecil_raw_tip_voltage", + "sensor.pinecil_tip_resistance", + } + for entity_id in entities: + assert hass.states.get(entity_id).state == STATE_UNKNOWN From 6cf10cd0b221dc92a41dbdefd9e2580a1b856873 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:25:18 +0100 Subject: [PATCH 0370/1198] Remove deprecated supported features warning in Update (#132667) --- homeassistant/components/update/__init__.py | 25 ++---- tests/components/update/test_init.py | 94 +-------------------- 2 files changed, 7 insertions(+), 112 deletions(-) diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index 6f0b56b14e8..8ef9f44237f 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -136,7 +136,7 @@ async def async_install(entity: UpdateEntity, service_call: ServiceCall) -> None # If version is specified, but not supported by the entity. if ( version is not None - and UpdateEntityFeature.SPECIFIC_VERSION not in entity.supported_features_compat + and UpdateEntityFeature.SPECIFIC_VERSION not in entity.supported_features ): raise HomeAssistantError( f"Installing a specific version is not supported for {entity.entity_id}" @@ -145,7 +145,7 @@ async def async_install(entity: UpdateEntity, service_call: ServiceCall) -> None # If backup is requested, but not supported by the entity. if ( backup := service_call.data[ATTR_BACKUP] - ) and UpdateEntityFeature.BACKUP not in entity.supported_features_compat: + ) and UpdateEntityFeature.BACKUP not in entity.supported_features: raise HomeAssistantError(f"Backup is not supported for {entity.entity_id}") # Update is already in progress. @@ -279,7 +279,7 @@ class UpdateEntity( return self._attr_entity_category if hasattr(self, "entity_description"): return self.entity_description.entity_category - if UpdateEntityFeature.INSTALL in self.supported_features_compat: + if UpdateEntityFeature.INSTALL in self.supported_features: return EntityCategory.CONFIG return EntityCategory.DIAGNOSTIC @@ -337,19 +337,6 @@ class UpdateEntity( """ return self._attr_title - @property - def supported_features_compat(self) -> UpdateEntityFeature: - """Return the supported features as UpdateEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = UpdateEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - @cached_property def update_percentage(self) -> int | float | None: """Update installation progress. @@ -451,7 +438,7 @@ class UpdateEntity( # If entity supports progress, return the in_progress value. # Otherwise, we use the internal progress value. - if UpdateEntityFeature.PROGRESS in self.supported_features_compat: + if UpdateEntityFeature.PROGRESS in self.supported_features: in_progress = self.in_progress update_percentage = self.update_percentage if in_progress else None if type(in_progress) is not bool and isinstance(in_progress, int): @@ -494,7 +481,7 @@ class UpdateEntity( Handles setting the in_progress state in case the entity doesn't support it natively. """ - if UpdateEntityFeature.PROGRESS not in self.supported_features_compat: + if UpdateEntityFeature.PROGRESS not in self.supported_features: self.__in_progress = True self.async_write_ha_state() @@ -539,7 +526,7 @@ async def websocket_release_notes( ) return - if UpdateEntityFeature.RELEASE_NOTES not in entity.supported_features_compat: + if UpdateEntityFeature.RELEASE_NOTES not in entity.supported_features: connection.send_error( msg["id"], websocket_api.ERR_NOT_SUPPORTED, diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index a35f7bb0f12..d4916de8039 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -896,98 +896,6 @@ async def test_name(hass: HomeAssistant) -> None: assert expected.items() <= state.attributes.items() -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockUpdateEntity(UpdateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockUpdateEntity() - assert entity.supported_features_compat is UpdateEntityFeature(1) - assert "MockUpdateEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "UpdateEntityFeature.INSTALL" in caplog.text - caplog.clear() - assert entity.supported_features_compat is UpdateEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - -async def test_deprecated_supported_features_ints_with_service_call( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test deprecated supported features ints with install service.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - - class MockUpdateEntity(UpdateEntity): - _attr_supported_features = 1 | 2 - - def install(self, version: str | None = None, backup: bool = False) -> None: - """Install an update.""" - - entity = MockUpdateEntity() - entity.entity_id = ( - "update.test_deprecated_supported_features_ints_with_service_call" - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test update platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert "is using deprecated supported features values" in caplog.text - - assert isinstance(entity.supported_features, int) - - with pytest.raises( - HomeAssistantError, - match="Backup is not supported for update.test_deprecated_supported_features_ints_with_service_call", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_INSTALL, - { - ATTR_VERSION: "0.9.9", - ATTR_BACKUP: True, - ATTR_ENTITY_ID: "update.test_deprecated_supported_features_ints_with_service_call", - }, - blocking=True, - ) - - async def test_custom_version_is_newer(hass: HomeAssistant) -> None: """Test UpdateEntity with overridden version_is_newer method.""" @@ -1032,7 +940,7 @@ async def test_custom_version_is_newer(hass: HomeAssistant) -> None: ("supported_features", "extra_expected_attributes"), [ ( - 0, + UpdateEntityFeature(0), [ {}, {}, From 97cd3cd7dc388c6e1b87ab7e9bf8b0a35c7b238e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 9 Dec 2024 11:51:58 +0100 Subject: [PATCH 0371/1198] Add slightly more detailed descriptions for Counter actions (#132576) --- homeassistant/components/counter/strings.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/counter/strings.json b/homeassistant/components/counter/strings.json index fb1f6467f4a..2c52fb43b9f 100644 --- a/homeassistant/components/counter/strings.json +++ b/homeassistant/components/counter/strings.json @@ -29,19 +29,19 @@ "services": { "decrement": { "name": "Decrement", - "description": "Decrements a counter." + "description": "Decrements a counter by its step size." }, "increment": { "name": "Increment", - "description": "Increments a counter." + "description": "Increments a counter by its step size." }, "reset": { "name": "Reset", - "description": "Resets a counter." + "description": "Resets a counter to its initial value." }, "set_value": { "name": "Set", - "description": "Sets the counter value.", + "description": "Sets the counter to a specific value.", "fields": { "value": { "name": "Value", From ad34082435c57065da78a07f150c8fdff1ebb429 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:18:45 +0100 Subject: [PATCH 0372/1198] Set quality scale to silver for Husqvarna Automower (#132293) --- homeassistant/components/husqvarna_automower/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index d22d23583ba..0f35e60c219 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], + "quality_scale": "silver", "requirements": ["aioautomower==2024.10.3"] } From fa9ee2adc66f7d99a0d85a4901292d013a2690be Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:27:15 +0100 Subject: [PATCH 0373/1198] Bump plugwise to v1.6.3 (#132673) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index d7fcec3bbae..60de4496779 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.2"], + "requirements": ["plugwise==1.6.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 02e2f1f048d..35affc2b491 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.2 +plugwise==1.6.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 85b31f9c95b..3c0b93ec31a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1332,7 +1332,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.2 +plugwise==1.6.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From b1791aae637015f46cccb02d6e66ccfe6bf2bf0f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:53:24 +0100 Subject: [PATCH 0374/1198] Use ATTR_COLOR_TEMP_KELVIN in emulated_hue light (#132693) --- homeassistant/components/emulated_hue/hue_api.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/emulated_hue/hue_api.py b/homeassistant/components/emulated_hue/hue_api.py index 8194d31823d..e13112f20bb 100644 --- a/homeassistant/components/emulated_hue/hue_api.py +++ b/homeassistant/components/emulated_hue/hue_api.py @@ -39,7 +39,7 @@ from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.components.humidifier import ATTR_HUMIDITY, SERVICE_SET_HUMIDITY from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -67,6 +67,7 @@ from homeassistant.const import ( ) from homeassistant.core import Event, EventStateChangedData, State from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.util import color as color_util from homeassistant.util.json import json_loads from homeassistant.util.network import is_local @@ -500,7 +501,11 @@ class HueOneLightChangeView(HomeAssistantView): light.color_temp_supported(color_modes) and parsed[STATE_COLOR_TEMP] is not None ): - data[ATTR_COLOR_TEMP] = parsed[STATE_COLOR_TEMP] + data[ATTR_COLOR_TEMP_KELVIN] = ( + color_util.color_temperature_mired_to_kelvin( + parsed[STATE_COLOR_TEMP] + ) + ) if ( entity_features & LightEntityFeature.TRANSITION @@ -702,7 +707,12 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]: else: data[STATE_HUE] = HUE_API_STATE_HUE_MIN data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN - data[STATE_COLOR_TEMP] = attributes.get(ATTR_COLOR_TEMP) or 0 + kelvin = attributes.get(ATTR_COLOR_TEMP_KELVIN) + data[STATE_COLOR_TEMP] = ( + color_util.color_temperature_kelvin_to_mired(kelvin) + if kelvin is not None + else 0 + ) else: data[STATE_BRIGHTNESS] = 0 From 549afbc27ec482ddfdb0166c00ef6efbbe19e393 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:55:39 +0100 Subject: [PATCH 0375/1198] Use ATTR_COLOR_TEMP_KELVIN in baf light (#132692) --- homeassistant/components/baf/light.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/baf/light.py b/homeassistant/components/baf/light.py index 2fb36ed874f..10450df1ba2 100644 --- a/homeassistant/components/baf/light.py +++ b/homeassistant/components/baf/light.py @@ -8,16 +8,13 @@ from aiobafi6 import Device, OffOnAuto from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ColorMode, LightEntity, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) +from homeassistant.util.color import color_temperature_kelvin_to_mired from . import BAFConfigEntry from .entity import BAFEntity @@ -94,8 +91,6 @@ class BAFStandaloneLight(BAFLight): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None: - self._device.light_color_temperature = color_temperature_mired_to_kelvin( - color_temp - ) + if (color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None: + self._device.light_color_temperature = color_temp await super().async_turn_on(**kwargs) From 4bb3d6123deac7f7921547093350888935a85bb0 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Mon, 9 Dec 2024 13:37:17 +0100 Subject: [PATCH 0376/1198] Move SABnzbd action setup to async_setup (#132629) --- homeassistant/components/sabnzbd/__init__.py | 30 ++++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index e6a99c858c3..2e3d6dd613c 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -14,6 +14,7 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import config_validation as cv import homeassistant.helpers.issue_registry as ir +from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_API_KEY, @@ -48,6 +49,8 @@ SERVICE_SPEED_SCHEMA = SERVICE_BASE_SCHEMA.extend( } ) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + @callback def async_get_entry_for_service_call( @@ -63,17 +66,9 @@ def async_get_entry_for_service_call( raise ValueError(f"No api for API key: {call_data_api_key}") -async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the SabNzbd Component.""" - sab_api = await get_client(hass, entry.data) - if not sab_api: - raise ConfigEntryNotReady - - coordinator = SabnzbdUpdateCoordinator(hass, entry, sab_api) - await coordinator.async_config_entry_first_refresh() - entry.runtime_data = coordinator - @callback def extract_api( func: Callable[ @@ -147,11 +142,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> b (SERVICE_RESUME, async_resume_queue, SERVICE_BASE_SCHEMA), (SERVICE_SET_SPEED, async_set_queue_speed, SERVICE_SPEED_SCHEMA), ): - if hass.services.has_service(DOMAIN, service): - continue - hass.services.async_register(DOMAIN, service, method, schema=schema) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: + """Set up the SabNzbd Component.""" + + sab_api = await get_client(hass, entry.data) + if not sab_api: + raise ConfigEntryNotReady + + coordinator = SabnzbdUpdateCoordinator(hass, entry, sab_api) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True From f4e48c31bd61666559fe1f61505482ae53497a0a Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 9 Dec 2024 13:37:38 +0100 Subject: [PATCH 0377/1198] Add binary platform to IronOS (#132691) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/iron_os/__init__.py | 7 +- .../components/iron_os/binary_sensor.py | 54 +++++++++++++ homeassistant/components/iron_os/icons.json | 8 ++ homeassistant/components/iron_os/strings.json | 5 ++ .../iron_os/snapshots/test_binary_sensor.ambr | 48 ++++++++++++ .../components/iron_os/test_binary_sensor.py | 77 +++++++++++++++++++ 6 files changed, 198 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/iron_os/binary_sensor.py create mode 100644 tests/components/iron_os/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/iron_os/test_binary_sensor.py diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 35b426d11ab..225bf0ff582 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -26,7 +26,12 @@ from .coordinator import ( IronOSSettingsCoordinator, ) -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.UPDATE] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.NUMBER, + Platform.SENSOR, + Platform.UPDATE, +] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) diff --git a/homeassistant/components/iron_os/binary_sensor.py b/homeassistant/components/iron_os/binary_sensor.py new file mode 100644 index 00000000000..81ba0e08c95 --- /dev/null +++ b/homeassistant/components/iron_os/binary_sensor.py @@ -0,0 +1,54 @@ +"""Binary sensor platform for IronOS integration.""" + +from __future__ import annotations + +from enum import StrEnum + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IronOSConfigEntry +from .coordinator import IronOSLiveDataCoordinator +from .entity import IronOSBaseEntity + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +class PinecilBinarySensor(StrEnum): + """Pinecil Binary Sensors.""" + + TIP_CONNECTED = "tip_connected" + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensors from a config entry.""" + coordinator = entry.runtime_data.live_data + + entity_description = BinarySensorEntityDescription( + key=PinecilBinarySensor.TIP_CONNECTED, + translation_key=PinecilBinarySensor.TIP_CONNECTED, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + ) + + async_add_entities([IronOSBinarySensorEntity(coordinator, entity_description)]) + + +class IronOSBinarySensorEntity(IronOSBaseEntity, BinarySensorEntity): + """Representation of a IronOS binary sensor entity.""" + + coordinator: IronOSLiveDataCoordinator + + @property + def is_on(self) -> bool | None: + """Return true if the binary sensor is on.""" + return self.coordinator.has_tip diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index 24d27457689..eadcc17bb37 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -1,5 +1,13 @@ { "entity": { + "binary_sensor": { + "tip_connected": { + "default": "mdi:pencil-outline", + "state": { + "off": "mdi:pencil-off-outline" + } + } + }, "number": { "setpoint_temperature": { "default": "mdi:thermometer" diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index c474b704677..13528104f8c 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -20,6 +20,11 @@ } }, "entity": { + "binary_sensor": { + "tip_connected": { + "name": "Soldering tip" + } + }, "number": { "setpoint_temperature": { "name": "Setpoint temperature" diff --git a/tests/components/iron_os/snapshots/test_binary_sensor.ambr b/tests/components/iron_os/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..17b49c1d687 --- /dev/null +++ b/tests/components/iron_os/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.pinecil_soldering_tip-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.pinecil_soldering_tip', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soldering tip', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_connected', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.pinecil_soldering_tip-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Pinecil Soldering tip', + }), + 'context': , + 'entity_id': 'binary_sensor.pinecil_soldering_tip', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/iron_os/test_binary_sensor.py b/tests/components/iron_os/test_binary_sensor.py new file mode 100644 index 00000000000..291fbf80573 --- /dev/null +++ b/tests/components/iron_os/test_binary_sensor.py @@ -0,0 +1,77 @@ +"""Tests for the Pinecil Binary Sensors.""" + +from collections.abc import AsyncGenerator +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import LiveDataResponse +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.binary_sensor import STATE_OFF, STATE_ON +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def binary_sensor_only() -> AsyncGenerator[None]: + """Enable only the binary sensor platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_binary_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test the Pinecil binary sensor platform.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "ble_device", "mock_pynecil" +) +async def test_tip_on_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test tip_connected binary sensor on/off states.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("binary_sensor.pinecil_soldering_tip").state == STATE_ON + + mock_pynecil.get_live_data.return_value = LiveDataResponse( + live_temp=479, + max_tip_temp_ability=460, + ) + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.pinecil_soldering_tip").state == STATE_OFF From 4e2e6619d0d7766ff9c7104a48dc5090f8f1f9ff Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 9 Dec 2024 13:52:51 +0100 Subject: [PATCH 0378/1198] Increase test coverage in yale_smart_alarm (#132650) --- .../yale_smart_alarm/test_config_flow.py | 17 +++++++++++----- .../yale_smart_alarm/test_switch.py | 20 +++++++++++++++++-- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index 51106751f03..0b008d4c696 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -455,10 +455,17 @@ async def test_options_flow( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"lock_code_digits": 6}, - ) + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=load_config_entry[1], + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"lock_code_digits": 4}, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {"lock_code_digits": 6} + assert result["data"] == {"lock_code_digits": 4} + + assert entry.state == config_entries.ConfigEntryState.LOADED diff --git a/tests/components/yale_smart_alarm/test_switch.py b/tests/components/yale_smart_alarm/test_switch.py index b189a3fd003..369f8f8f10c 100644 --- a/tests/components/yale_smart_alarm/test_switch.py +++ b/tests/components/yale_smart_alarm/test_switch.py @@ -8,8 +8,12 @@ import pytest from syrupy.assertion import SnapshotAssertion from yalesmartalarmclient import YaleSmartAlarmData -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, Platform +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -44,3 +48,15 @@ async def test_switch( state = hass.states.get("switch.device1_autolock") assert state.state == STATE_OFF + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.device1_autolock", + }, + blocking=True, + ) + + state = hass.states.get("switch.device1_autolock") + assert state.state == STATE_ON From bd0da03eb9d77848262cb40e8e34f7002281c3af Mon Sep 17 00:00:00 2001 From: dotvav Date: Mon, 9 Dec 2024 14:02:17 +0100 Subject: [PATCH 0379/1198] Palazzetti power control (#131833) * Add number entity * Catch exceptions * Add test coverage * Add translation * Fix exception message * Simplify number.py * Remove dead code --- .../components/palazzetti/__init__.py | 2 +- homeassistant/components/palazzetti/number.py | 66 +++++++++++++++++ .../components/palazzetti/strings.json | 8 +++ tests/components/palazzetti/conftest.py | 2 + .../palazzetti/snapshots/test_number.ambr | 57 +++++++++++++++ tests/components/palazzetti/test_number.py | 72 +++++++++++++++++++ 6 files changed, 206 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/palazzetti/number.py create mode 100644 tests/components/palazzetti/snapshots/test_number.ambr create mode 100644 tests/components/palazzetti/test_number.py diff --git a/homeassistant/components/palazzetti/__init__.py b/homeassistant/components/palazzetti/__init__.py index 4bea4434496..f20b3d11261 100644 --- a/homeassistant/components/palazzetti/__init__.py +++ b/homeassistant/components/palazzetti/__init__.py @@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant from .coordinator import PalazzettiConfigEntry, PalazzettiDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: diff --git a/homeassistant/components/palazzetti/number.py b/homeassistant/components/palazzetti/number.py new file mode 100644 index 00000000000..06114bfef54 --- /dev/null +++ b/homeassistant/components/palazzetti/number.py @@ -0,0 +1,66 @@ +"""Number platform for Palazzetti settings.""" + +from __future__ import annotations + +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.components.number import NumberDeviceClass, NumberEntity +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PalazzettiConfigEntry +from .const import DOMAIN +from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti number platform.""" + async_add_entities([PalazzettiCombustionPowerEntity(config_entry.runtime_data)]) + + +class PalazzettiCombustionPowerEntity(PalazzettiEntity, NumberEntity): + """Representation of Palazzetti number entity for Combustion power.""" + + _attr_translation_key = "combustion_power" + _attr_device_class = NumberDeviceClass.POWER_FACTOR + _attr_native_min_value = 1 + _attr_native_max_value = 5 + _attr_native_step = 1 + + def __init__( + self, + coordinator: PalazzettiDataUpdateCoordinator, + ) -> None: + """Initialize the Palazzetti number entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.unique_id}-combustion_power" + + @property + def native_value(self) -> float: + """Return the state of the setting entity.""" + return self.coordinator.client.power_mode + + async def async_set_native_value(self, value: float) -> None: + """Update the setting.""" + try: + await self.coordinator.client.set_power_mode(int(value)) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_combustion_power", + translation_placeholders={ + "value": str(value), + }, + ) from err + + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index 435ec0aab85..60c6e20c402 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -30,6 +30,9 @@ "invalid_target_temperature": { "message": "Target temperature {value} is invalid." }, + "invalid_combustion_power": { + "message": "Combustion power {value} is invalid." + }, "cannot_connect": { "message": "Could not connect to the device." } @@ -48,6 +51,11 @@ } } }, + "number": { + "combustion_power": { + "name": "Combustion power" + } + }, "sensor": { "pellet_quantity": { "name": "Pellet quantity" diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py index ec58afc324a..a9f76b259c3 100644 --- a/tests/components/palazzetti/conftest.py +++ b/tests/components/palazzetti/conftest.py @@ -87,6 +87,8 @@ def mock_palazzetti_client() -> Generator[AsyncMock]: mock_client.set_fan_silent.return_value = True mock_client.set_fan_high.return_value = True mock_client.set_fan_auto.return_value = True + mock_client.set_power_mode.return_value = True + mock_client.power_mode = 3 mock_client.list_temperatures.return_value = [ TemperatureDefinition( description_key=TemperatureDescriptionKey.ROOM_TEMP, diff --git a/tests/components/palazzetti/snapshots/test_number.ambr b/tests/components/palazzetti/snapshots/test_number.ambr new file mode 100644 index 00000000000..0a25a1cfa8b --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_all_entities[number.stove_combustion_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.stove_combustion_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Combustion power', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'combustion_power', + 'unique_id': '11:22:33:44:55:66-combustion_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.stove_combustion_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Stove Combustion power', + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.stove_combustion_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/palazzetti/test_number.py b/tests/components/palazzetti/test_number.py new file mode 100644 index 00000000000..939c7c72c19 --- /dev/null +++ b/tests/components/palazzetti/test_number.py @@ -0,0 +1,72 @@ +"""Tests for the Palazzetti sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from pypalazzetti.exceptions import CommunicationError, ValidationError +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "number.stove_combustion_power" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting number data via service call.""" + await setup_integration(hass, mock_config_entry) + + # Set value: Success + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) + mock_palazzetti_client.set_power_mode.assert_called_once_with(1) + mock_palazzetti_client.set_on.reset_mock() + + # Set value: Error + mock_palazzetti_client.set_power_mode.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) + mock_palazzetti_client.set_on.reset_mock() + + mock_palazzetti_client.set_power_mode.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) From 72de5d0fa34c4cc799384081d781b761c69d96dc Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 15:14:24 +0100 Subject: [PATCH 0380/1198] Fix reading of max mireds from Matter lights (#132710) --- homeassistant/components/matter/light.py | 2 +- tests/components/matter/fixtures/config_entry_diagnostics.json | 2 +- .../matter/fixtures/config_entry_diagnostics_redacted.json | 2 +- tests/components/matter/fixtures/nodes/device_diagnostics.json | 2 +- .../components/matter/fixtures/nodes/multi_endpoint_light.json | 2 +- .../components/matter/fixtures/nodes/onoff_light_alt_name.json | 2 +- tests/components/matter/fixtures/nodes/onoff_light_no_name.json | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 6d184bcc01f..6d83fc31722 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -372,7 +372,7 @@ class MatterLight(MatterEntity, LightEntity): max_mireds = self.get_matter_attribute_value( clusters.ColorControl.Attributes.ColorTempPhysicalMaxMireds ) - if min_mireds > 0: + if max_mireds > 0: self._attr_max_mireds = max_mireds supported_color_modes = filter_supported_color_modes(supported_color_modes) diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index 000b0d4e2e6..8cc9d068caf 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -647,7 +647,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 95447783bbc..28c93de5e11 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -460,7 +460,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/device_diagnostics.json b/tests/components/matter/fixtures/nodes/device_diagnostics.json index 1d1d450e1f0..5600a7e801b 100644 --- a/tests/components/matter/fixtures/nodes/device_diagnostics.json +++ b/tests/components/matter/fixtures/nodes/device_diagnostics.json @@ -444,7 +444,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/multi_endpoint_light.json b/tests/components/matter/fixtures/nodes/multi_endpoint_light.json index e3a01da9e7c..3b9be24d9ab 100644 --- a/tests/components/matter/fixtures/nodes/multi_endpoint_light.json +++ b/tests/components/matter/fixtures/nodes/multi_endpoint_light.json @@ -1620,7 +1620,7 @@ "6/768/16385": 0, "6/768/16394": 25, "6/768/16395": 0, - "6/768/16396": 65279, + "6/768/16396": 0, "6/768/16397": 0, "6/768/16400": 0, "6/768/65532": 25, diff --git a/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json b/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json index 46575640adf..ac462cd7951 100644 --- a/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json +++ b/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json @@ -384,7 +384,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/onoff_light_no_name.json b/tests/components/matter/fixtures/nodes/onoff_light_no_name.json index a6c73564af0..19cd58bf5cb 100644 --- a/tests/components/matter/fixtures/nodes/onoff_light_no_name.json +++ b/tests/components/matter/fixtures/nodes/onoff_light_no_name.json @@ -384,7 +384,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, From 74eddce3d3378f0b7b213a2e9e040c909d2058fb Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 9 Dec 2024 15:23:21 +0100 Subject: [PATCH 0381/1198] Change to module function in statistics (#132648) --- .../components/statistics/config_flow.py | 4 +- homeassistant/components/statistics/sensor.py | 635 ++++++++++-------- 2 files changed, 375 insertions(+), 264 deletions(-) diff --git a/homeassistant/components/statistics/config_flow.py b/homeassistant/components/statistics/config_flow.py index 4280c92131a..4c78afbde9c 100644 --- a/homeassistant/components/statistics/config_flow.py +++ b/homeassistant/components/statistics/config_flow.py @@ -57,9 +57,9 @@ async def get_state_characteristics(handler: SchemaCommonFlowHandler) -> vol.Sch split_entity_id(handler.options[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN ) if is_binary: - options = STATS_BINARY_SUPPORT + options = list(STATS_BINARY_SUPPORT) else: - options = STATS_NUMERIC_SUPPORT + options = list(STATS_NUMERIC_SUPPORT) return vol.Schema( { diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index b6f1844f774..8988e0cdd63 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -53,7 +53,7 @@ from homeassistant.helpers.event import ( async_track_state_report_event, ) from homeassistant.helpers.reload import async_setup_reload_service -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum @@ -97,47 +97,379 @@ STAT_VALUE_MAX = "value_max" STAT_VALUE_MIN = "value_min" STAT_VARIANCE = "variance" + +def _callable_characteristic_fn( + characteristic: str, binary: bool +) -> Callable[ + [deque[bool | float], deque[datetime], int], float | int | datetime | None +]: + """Return the function callable of one characteristic function.""" + Callable[[deque[bool | float], deque[datetime], int], datetime | int | float | None] + if binary: + return STATS_BINARY_SUPPORT[characteristic] + return STATS_NUMERIC_SUPPORT[characteristic] + + +# Statistics for numeric sensor + + +def _stat_average_linear( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + area: float = 0 + for i in range(1, len(states)): + area += ( + 0.5 + * (states[i] + states[i - 1]) + * (ages[i] - ages[i - 1]).total_seconds() + ) + age_range_seconds = (ages[-1] - ages[0]).total_seconds() + return area / age_range_seconds + return None + + +def _stat_average_step( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + area: float = 0 + for i in range(1, len(states)): + area += states[i - 1] * (ages[i] - ages[i - 1]).total_seconds() + age_range_seconds = (ages[-1] - ages[0]).total_seconds() + return area / age_range_seconds + return None + + +def _stat_average_timeless( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + return _stat_mean(states, ages, percentile) + + +def _stat_change( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return states[-1] - states[0] + return None + + +def _stat_change_sample( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 1: + return (states[-1] - states[0]) / (len(states) - 1) + return None + + +def _stat_change_second( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 1: + age_range_seconds = (ages[-1] - ages[0]).total_seconds() + if age_range_seconds > 0: + return (states[-1] - states[0]) / age_range_seconds + return None + + +def _stat_count( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> int | None: + return len(states) + + +def _stat_datetime_newest( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + if len(states) > 0: + return ages[-1] + return None + + +def _stat_datetime_oldest( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + if len(states) > 0: + return ages[0] + return None + + +def _stat_datetime_value_max( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + if len(states) > 0: + return ages[states.index(max(states))] + return None + + +def _stat_datetime_value_min( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + if len(states) > 0: + return ages[states.index(min(states))] + return None + + +def _stat_distance_95_percent_of_values( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) >= 1: + return ( + 2 * 1.96 * cast(float, _stat_standard_deviation(states, ages, percentile)) + ) + return None + + +def _stat_distance_99_percent_of_values( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) >= 1: + return ( + 2 * 2.58 * cast(float, _stat_standard_deviation(states, ages, percentile)) + ) + return None + + +def _stat_distance_absolute( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return max(states) - min(states) + return None + + +def _stat_mean( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return statistics.mean(states) + return None + + +def _stat_mean_circular( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + sin_sum = sum(math.sin(math.radians(x)) for x in states) + cos_sum = sum(math.cos(math.radians(x)) for x in states) + return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360 + return None + + +def _stat_median( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return statistics.median(states) + return None + + +def _stat_noisiness( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return cast(float, _stat_sum_differences(states, ages, percentile)) / ( + len(states) - 1 + ) + return None + + +def _stat_percentile( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + percentiles = statistics.quantiles(states, n=100, method="exclusive") + return percentiles[percentile - 1] + return None + + +def _stat_standard_deviation( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return statistics.stdev(states) + return None + + +def _stat_sum( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return sum(states) + return None + + +def _stat_sum_differences( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return sum( + abs(j - i) for i, j in zip(list(states), list(states)[1:], strict=False) + ) + return None + + +def _stat_sum_differences_nonnegative( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return sum( + (j - i if j >= i else j - 0) + for i, j in zip(list(states), list(states)[1:], strict=False) + ) + return None + + +def _stat_total( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + return _stat_sum(states, ages, percentile) + + +def _stat_value_max( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return max(states) + return None + + +def _stat_value_min( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return min(states) + return None + + +def _stat_variance( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return statistics.variance(states) + return None + + +# Statistics for binary sensor + + +def _stat_binary_average_step( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) == 1: + return 100.0 * int(states[0] is True) + if len(states) >= 2: + on_seconds: float = 0 + for i in range(1, len(states)): + if states[i - 1] is True: + on_seconds += (ages[i] - ages[i - 1]).total_seconds() + age_range_seconds = (ages[-1] - ages[0]).total_seconds() + return 100 / age_range_seconds * on_seconds + return None + + +def _stat_binary_average_timeless( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + return _stat_binary_mean(states, ages, percentile) + + +def _stat_binary_count( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> int | None: + return len(states) + + +def _stat_binary_count_on( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> int | None: + return states.count(True) + + +def _stat_binary_count_off( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> int | None: + return states.count(False) + + +def _stat_binary_datetime_newest( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + return _stat_datetime_newest(states, ages, percentile) + + +def _stat_binary_datetime_oldest( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> datetime | None: + return _stat_datetime_oldest(states, ages, percentile) + + +def _stat_binary_mean( + states: deque[bool | float], ages: deque[datetime], percentile: int +) -> float | None: + if len(states) > 0: + return 100.0 / len(states) * states.count(True) + return None + + # Statistics supported by a sensor source (numeric) STATS_NUMERIC_SUPPORT = { - STAT_AVERAGE_LINEAR, - STAT_AVERAGE_STEP, - STAT_AVERAGE_TIMELESS, - STAT_CHANGE_SAMPLE, - STAT_CHANGE_SECOND, - STAT_CHANGE, - STAT_COUNT, - STAT_DATETIME_NEWEST, - STAT_DATETIME_OLDEST, - STAT_DATETIME_VALUE_MAX, - STAT_DATETIME_VALUE_MIN, - STAT_DISTANCE_95P, - STAT_DISTANCE_99P, - STAT_DISTANCE_ABSOLUTE, - STAT_MEAN, - STAT_MEAN_CIRCULAR, - STAT_MEDIAN, - STAT_NOISINESS, - STAT_PERCENTILE, - STAT_STANDARD_DEVIATION, - STAT_SUM, - STAT_SUM_DIFFERENCES, - STAT_SUM_DIFFERENCES_NONNEGATIVE, - STAT_TOTAL, - STAT_VALUE_MAX, - STAT_VALUE_MIN, - STAT_VARIANCE, + STAT_AVERAGE_LINEAR: _stat_average_linear, + STAT_AVERAGE_STEP: _stat_average_step, + STAT_AVERAGE_TIMELESS: _stat_average_timeless, + STAT_CHANGE_SAMPLE: _stat_change_sample, + STAT_CHANGE_SECOND: _stat_change_second, + STAT_CHANGE: _stat_change, + STAT_COUNT: _stat_count, + STAT_DATETIME_NEWEST: _stat_datetime_newest, + STAT_DATETIME_OLDEST: _stat_datetime_oldest, + STAT_DATETIME_VALUE_MAX: _stat_datetime_value_max, + STAT_DATETIME_VALUE_MIN: _stat_datetime_value_min, + STAT_DISTANCE_95P: _stat_distance_95_percent_of_values, + STAT_DISTANCE_99P: _stat_distance_99_percent_of_values, + STAT_DISTANCE_ABSOLUTE: _stat_distance_absolute, + STAT_MEAN: _stat_mean, + STAT_MEAN_CIRCULAR: _stat_mean_circular, + STAT_MEDIAN: _stat_median, + STAT_NOISINESS: _stat_noisiness, + STAT_PERCENTILE: _stat_percentile, + STAT_STANDARD_DEVIATION: _stat_standard_deviation, + STAT_SUM: _stat_sum, + STAT_SUM_DIFFERENCES: _stat_sum_differences, + STAT_SUM_DIFFERENCES_NONNEGATIVE: _stat_sum_differences_nonnegative, + STAT_TOTAL: _stat_total, + STAT_VALUE_MAX: _stat_value_max, + STAT_VALUE_MIN: _stat_value_min, + STAT_VARIANCE: _stat_variance, } # Statistics supported by a binary_sensor source STATS_BINARY_SUPPORT = { - STAT_AVERAGE_STEP, - STAT_AVERAGE_TIMELESS, - STAT_COUNT, - STAT_COUNT_BINARY_ON, - STAT_COUNT_BINARY_OFF, - STAT_DATETIME_NEWEST, - STAT_DATETIME_OLDEST, - STAT_MEAN, + STAT_AVERAGE_STEP: _stat_binary_average_step, + STAT_AVERAGE_TIMELESS: _stat_binary_average_timeless, + STAT_COUNT: _stat_binary_count, + STAT_COUNT_BINARY_ON: _stat_binary_count_on, + STAT_COUNT_BINARY_OFF: _stat_binary_count_off, + STAT_DATETIME_NEWEST: _stat_binary_datetime_newest, + STAT_DATETIME_OLDEST: _stat_binary_datetime_oldest, + STAT_MEAN: _stat_binary_mean, } STATS_NOT_A_NUMBER = { @@ -366,9 +698,10 @@ class StatisticsSensor(SensorEntity): self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) self._attr_extra_state_attributes = {} - self._state_characteristic_fn: Callable[[], float | int | datetime | None] = ( - self._callable_characteristic_fn(self._state_characteristic) - ) + self._state_characteristic_fn: Callable[ + [deque[bool | float], deque[datetime], int], + float | int | datetime | None, + ] = _callable_characteristic_fn(self._state_characteristic, self.is_binary) self._update_listener: CALLBACK_TYPE | None = None self._preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None @@ -754,7 +1087,7 @@ class StatisticsSensor(SensorEntity): One of the _stat_*() functions is represented by self._state_characteristic_fn(). """ - value = self._state_characteristic_fn() + value = self._state_characteristic_fn(self.states, self.ages, self._percentile) _LOGGER.debug( "Updating value: states: %s, ages: %s => %s", self.states, self.ages, value ) @@ -764,225 +1097,3 @@ class StatisticsSensor(SensorEntity): if self._precision == 0: value = int(value) self._attr_native_value = value - - def _callable_characteristic_fn( - self, characteristic: str - ) -> Callable[[], float | int | datetime | None]: - """Return the function callable of one characteristic function.""" - function: Callable[[], float | int | datetime | None] = getattr( - self, - f"_stat_binary_{characteristic}" - if self.is_binary - else f"_stat_{characteristic}", - ) - return function - - # Statistics for numeric sensor - - def _stat_average_linear(self) -> StateType: - if len(self.states) == 1: - return self.states[0] - if len(self.states) >= 2: - area: float = 0 - for i in range(1, len(self.states)): - area += ( - 0.5 - * (self.states[i] + self.states[i - 1]) - * (self.ages[i] - self.ages[i - 1]).total_seconds() - ) - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return area / age_range_seconds - return None - - def _stat_average_step(self) -> StateType: - if len(self.states) == 1: - return self.states[0] - if len(self.states) >= 2: - area: float = 0 - for i in range(1, len(self.states)): - area += ( - self.states[i - 1] - * (self.ages[i] - self.ages[i - 1]).total_seconds() - ) - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return area / age_range_seconds - return None - - def _stat_average_timeless(self) -> StateType: - return self._stat_mean() - - def _stat_change(self) -> StateType: - if len(self.states) > 0: - return self.states[-1] - self.states[0] - return None - - def _stat_change_sample(self) -> StateType: - if len(self.states) > 1: - return (self.states[-1] - self.states[0]) / (len(self.states) - 1) - return None - - def _stat_change_second(self) -> StateType: - if len(self.states) > 1: - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - if age_range_seconds > 0: - return (self.states[-1] - self.states[0]) / age_range_seconds - return None - - def _stat_count(self) -> StateType: - return len(self.states) - - def _stat_datetime_newest(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[-1] - return None - - def _stat_datetime_oldest(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[0] - return None - - def _stat_datetime_value_max(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[self.states.index(max(self.states))] - return None - - def _stat_datetime_value_min(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[self.states.index(min(self.states))] - return None - - def _stat_distance_95_percent_of_values(self) -> StateType: - if len(self.states) >= 1: - return 2 * 1.96 * cast(float, self._stat_standard_deviation()) - return None - - def _stat_distance_99_percent_of_values(self) -> StateType: - if len(self.states) >= 1: - return 2 * 2.58 * cast(float, self._stat_standard_deviation()) - return None - - def _stat_distance_absolute(self) -> StateType: - if len(self.states) > 0: - return max(self.states) - min(self.states) - return None - - def _stat_mean(self) -> StateType: - if len(self.states) > 0: - return statistics.mean(self.states) - return None - - def _stat_mean_circular(self) -> StateType: - if len(self.states) > 0: - sin_sum = sum(math.sin(math.radians(x)) for x in self.states) - cos_sum = sum(math.cos(math.radians(x)) for x in self.states) - return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360 - return None - - def _stat_median(self) -> StateType: - if len(self.states) > 0: - return statistics.median(self.states) - return None - - def _stat_noisiness(self) -> StateType: - if len(self.states) == 1: - return 0.0 - if len(self.states) >= 2: - return cast(float, self._stat_sum_differences()) / (len(self.states) - 1) - return None - - def _stat_percentile(self) -> StateType: - if len(self.states) == 1: - return self.states[0] - if len(self.states) >= 2: - percentiles = statistics.quantiles(self.states, n=100, method="exclusive") - return percentiles[self._percentile - 1] - return None - - def _stat_standard_deviation(self) -> StateType: - if len(self.states) == 1: - return 0.0 - if len(self.states) >= 2: - return statistics.stdev(self.states) - return None - - def _stat_sum(self) -> StateType: - if len(self.states) > 0: - return sum(self.states) - return None - - def _stat_sum_differences(self) -> StateType: - if len(self.states) == 1: - return 0.0 - if len(self.states) >= 2: - return sum( - abs(j - i) - for i, j in zip(list(self.states), list(self.states)[1:], strict=False) - ) - return None - - def _stat_sum_differences_nonnegative(self) -> StateType: - if len(self.states) == 1: - return 0.0 - if len(self.states) >= 2: - return sum( - (j - i if j >= i else j - 0) - for i, j in zip(list(self.states), list(self.states)[1:], strict=False) - ) - return None - - def _stat_total(self) -> StateType: - return self._stat_sum() - - def _stat_value_max(self) -> StateType: - if len(self.states) > 0: - return max(self.states) - return None - - def _stat_value_min(self) -> StateType: - if len(self.states) > 0: - return min(self.states) - return None - - def _stat_variance(self) -> StateType: - if len(self.states) == 1: - return 0.0 - if len(self.states) >= 2: - return statistics.variance(self.states) - return None - - # Statistics for binary sensor - - def _stat_binary_average_step(self) -> StateType: - if len(self.states) == 1: - return 100.0 * int(self.states[0] is True) - if len(self.states) >= 2: - on_seconds: float = 0 - for i in range(1, len(self.states)): - if self.states[i - 1] is True: - on_seconds += (self.ages[i] - self.ages[i - 1]).total_seconds() - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return 100 / age_range_seconds * on_seconds - return None - - def _stat_binary_average_timeless(self) -> StateType: - return self._stat_binary_mean() - - def _stat_binary_count(self) -> StateType: - return len(self.states) - - def _stat_binary_count_on(self) -> StateType: - return self.states.count(True) - - def _stat_binary_count_off(self) -> StateType: - return self.states.count(False) - - def _stat_binary_datetime_newest(self) -> datetime | None: - return self._stat_datetime_newest() - - def _stat_binary_datetime_oldest(self) -> datetime | None: - return self._stat_datetime_oldest() - - def _stat_binary_mean(self) -> StateType: - if len(self.states) > 0: - return 100.0 / len(self.states) * self.states.count(True) - return None From 8d72443fd6191462f1fb91e0e2403bb9fd56dda0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 9 Dec 2024 15:47:40 +0100 Subject: [PATCH 0382/1198] Set unique_id in myuplink config entry (#132672) --- homeassistant/components/myuplink/__init__.py | 28 +++++++++++++++ .../components/myuplink/config_flow.py | 13 +++++++ .../components/myuplink/strings.json | 1 + tests/components/myuplink/conftest.py | 24 +++++++++++-- tests/components/myuplink/const.py | 1 + tests/components/myuplink/test_config_flow.py | 8 +++-- tests/components/myuplink/test_init.py | 36 ++++++++++++++++++- 7 files changed, 105 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index d801f27817d..c3ff8b6988b 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -3,8 +3,10 @@ from __future__ import annotations from http import HTTPStatus +import logging from aiohttp import ClientError, ClientResponseError +import jwt from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name from homeassistant.config_entries import ConfigEntry @@ -22,6 +24,8 @@ from .api import AsyncConfigEntryAuth from .const import DOMAIN, OAUTH2_SCOPES from .coordinator import MyUplinkDataCoordinator +_LOGGER = logging.getLogger(__name__) + PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.NUMBER, @@ -109,3 +113,27 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( (DOMAIN, device_id) for device_id in myuplink_data.data.devices ) + + +async def async_migrate_entry( + hass: HomeAssistant, config_entry: MyUplinkConfigEntry +) -> bool: + """Migrate old entry.""" + + # Use sub(ject) from access_token as unique_id + if config_entry.version == 1 and config_entry.minor_version == 1: + token = jwt.decode( + config_entry.data["token"]["access_token"], + options={"verify_signature": False}, + ) + uid = token["sub"] + hass.config_entries.async_update_entry( + config_entry, unique_id=uid, minor_version=2 + ) + _LOGGER.info( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True diff --git a/homeassistant/components/myuplink/config_flow.py b/homeassistant/components/myuplink/config_flow.py index 554347cfd19..15bff643185 100644 --- a/homeassistant/components/myuplink/config_flow.py +++ b/homeassistant/components/myuplink/config_flow.py @@ -4,6 +4,8 @@ from collections.abc import Mapping import logging from typing import Any +import jwt + from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow @@ -15,6 +17,8 @@ class OAuth2FlowHandler( ): """Config flow to handle myUplink OAuth2 authentication.""" + VERSION = 1 + MINOR_VERSION = 2 DOMAIN = DOMAIN @property @@ -46,8 +50,17 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create or update the config entry.""" + + token = jwt.decode( + data["token"]["access_token"], options={"verify_signature": False} + ) + uid = token["sub"] + await self.async_set_unique_id(uid) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="account_mismatch") return self.async_update_reload_and_abort( self._get_reauth_entry(), data=data ) + self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index 997c6fe54b6..bd60a3c7bb3 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -23,6 +23,7 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "account_mismatch": "The used account does not match the original account", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" }, "create_entry": { diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index 9ede11146ef..3ab186b61a8 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -15,10 +15,11 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.myuplink.const import DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component from homeassistant.util.json import json_loads -from .const import CLIENT_ID, CLIENT_SECRET +from .const import CLIENT_ID, CLIENT_SECRET, UNIQUE_ID from tests.common import MockConfigEntry, load_fixture @@ -33,7 +34,7 @@ def mock_expires_at() -> float: def mock_config_entry(hass: HomeAssistant, expires_at: float) -> MockConfigEntry: """Return the default mocked config entry.""" config_entry = MockConfigEntry( - version=1, + minor_version=2, domain=DOMAIN, title="myUplink test", data={ @@ -48,6 +49,7 @@ def mock_config_entry(hass: HomeAssistant, expires_at: float) -> MockConfigEntry }, }, entry_id="myuplink_test", + unique_id=UNIQUE_ID, ) config_entry.add_to_hass(hass) return config_entry @@ -189,3 +191,21 @@ async def setup_platform( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() yield + + +@pytest.fixture +async def access_token(hass: HomeAssistant) -> str: + """Return a valid access token.""" + return config_entry_oauth2_flow._encode_jwt( + hass, + { + "sub": UNIQUE_ID, + "aud": [], + "scp": [ + "WRITESYSTEM", + "READSYSTEM", + "offline_access", + ], + "ou_code": "NA", + }, + ) diff --git a/tests/components/myuplink/const.py b/tests/components/myuplink/const.py index 6001cb151c0..4cb6db952f1 100644 --- a/tests/components/myuplink/const.py +++ b/tests/components/myuplink/const.py @@ -2,3 +2,4 @@ CLIENT_ID = "12345" CLIENT_SECRET = "67890" +UNIQUE_ID = "uid" diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index c24d26057de..509af19db8c 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -29,6 +29,7 @@ async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + access_token: str, setup_credentials, ) -> None: """Check full flow.""" @@ -59,7 +60,7 @@ async def test_full_flow( OAUTH2_TOKEN, json={ "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", + "access_token": access_token, "type": "Bearer", "expires_in": 60, }, @@ -81,6 +82,7 @@ async def test_flow_reauth( aioclient_mock: AiohttpClientMocker, setup_credentials: None, mock_config_entry: MockConfigEntry, + access_token: str, expires_at: float, ) -> None: """Test reauth step.""" @@ -89,7 +91,7 @@ async def test_flow_reauth( OLD_SCOPE_TOKEN = { "auth_implementation": DOMAIN, "token": { - "access_token": "Fake_token", + "access_token": access_token, "scope": OLD_SCOPE, "expires_in": 86399, "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", @@ -137,7 +139,7 @@ async def test_flow_reauth( OAUTH2_TOKEN, json={ "refresh_token": "updated-refresh-token", - "access_token": "updated-access-token", + "access_token": access_token, "type": "Bearer", "expires_in": "60", "scope": CURRENT_SCOPE, diff --git a/tests/components/myuplink/test_init.py b/tests/components/myuplink/test_init.py index b474db731d1..440002311e9 100644 --- a/tests/components/myuplink/test_init.py +++ b/tests/components/myuplink/test_init.py @@ -12,6 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_integration +from .const import UNIQUE_ID from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker @@ -92,7 +93,40 @@ async def test_devices_multiple_created_count( mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test that multiple device are created.""" + """Test that multiple devices are created.""" await setup_integration(hass, mock_config_entry) assert len(device_registry.devices) == 2 + + +async def test_migrate_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_myuplink_client: MagicMock, + expires_at: float, + access_token: str, +) -> None: + """Test migration of config entry.""" + mock_entry_v1_1 = MockConfigEntry( + version=1, + minor_version=1, + domain=DOMAIN, + title="myUplink test", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": access_token, + "scope": "WRITESYSTEM READSYSTEM offline_access", + "expires_in": 86399, + "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", + "token_type": "Bearer", + "expires_at": expires_at, + }, + }, + entry_id="myuplink_test", + ) + + await setup_integration(hass, mock_entry_v1_1) + assert mock_entry_v1_1.version == 1 + assert mock_entry_v1_1.minor_version == 2 + assert mock_entry_v1_1.unique_id == UNIQUE_ID From ac791bdd2088b6d47511d8ababa8142e358852d6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 15:55:07 +0100 Subject: [PATCH 0383/1198] Migrate opple lights to use Kelvin (#132697) --- homeassistant/components/opple/light.py | 38 +++++++------------------ 1 file changed, 11 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/opple/light.py b/homeassistant/components/opple/light.py index a4aa98bbf69..da2993d1996 100644 --- a/homeassistant/components/opple/light.py +++ b/homeassistant/components/opple/light.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, LightEntity, @@ -20,10 +20,6 @@ from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) _LOGGER = logging.getLogger(__name__) @@ -58,6 +54,8 @@ class OppleLight(LightEntity): _attr_color_mode = ColorMode.COLOR_TEMP _attr_supported_color_modes = {ColorMode.COLOR_TEMP} + _attr_min_color_temp_kelvin = 3000 # 333 Mireds + _attr_max_color_temp_kelvin = 5700 # 175 Mireds def __init__(self, name, host): """Initialize an Opple light.""" @@ -67,7 +65,6 @@ class OppleLight(LightEntity): self._name = name self._is_on = None self._brightness = None - self._color_temp = None @property def available(self) -> bool: @@ -94,21 +91,6 @@ class OppleLight(LightEntity): """Return the brightness of the light.""" return self._brightness - @property - def color_temp(self): - """Return the color temperature of this light.""" - return kelvin_to_mired(self._color_temp) - - @property - def min_mireds(self): - """Return minimum supported color temperature.""" - return 175 - - @property - def max_mireds(self): - """Return maximum supported color temperature.""" - return 333 - def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) @@ -118,9 +100,11 @@ class OppleLight(LightEntity): if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: - color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) - self._device.color_temperature = color_temp + if ( + ATTR_COLOR_TEMP_KELVIN in kwargs + and self.color_temp_kelvin != kwargs[ATTR_COLOR_TEMP_KELVIN] + ): + self._device.color_temperature = kwargs[ATTR_COLOR_TEMP_KELVIN] def turn_off(self, **kwargs: Any) -> None: """Instruct the light to turn off.""" @@ -136,7 +120,7 @@ class OppleLight(LightEntity): prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness - and self._color_temp == self._device.color_temperature + and self._attr_color_temp_kelvin == self._device.color_temperature ): return @@ -146,7 +130,7 @@ class OppleLight(LightEntity): self._is_on = self._device.power_on self._brightness = self._device.brightness - self._color_temp = self._device.color_temperature + self._attr_color_temp_kelvin = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) @@ -155,5 +139,5 @@ class OppleLight(LightEntity): "Update light %s success: power on brightness %s color temperature %s", self._device.ip, self._brightness, - self._color_temp, + self._attr_color_temp_kelvin, ) From 786a417ac9227c64331524ff7886693c8a2d0389 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:00:59 +0100 Subject: [PATCH 0384/1198] Use kelvin attributes in baf (#132725) --- homeassistant/components/baf/light.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/baf/light.py b/homeassistant/components/baf/light.py index 10450df1ba2..4c0b1e353fe 100644 --- a/homeassistant/components/baf/light.py +++ b/homeassistant/components/baf/light.py @@ -14,7 +14,6 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import color_temperature_kelvin_to_mired from . import BAFConfigEntry from .entity import BAFEntity @@ -74,20 +73,14 @@ class BAFStandaloneLight(BAFLight): def __init__(self, device: Device) -> None: """Init a standalone light.""" super().__init__(device) - self._attr_min_mireds = color_temperature_kelvin_to_mired( - device.light_warmest_color_temperature - ) - self._attr_max_mireds = color_temperature_kelvin_to_mired( - device.light_coolest_color_temperature - ) + self._attr_max_color_temp_kelvin = device.light_warmest_color_temperature + self._attr_min_color_temp_kelvin = device.light_coolest_color_temperature @callback def _async_update_attrs(self) -> None: """Update attrs from device.""" super()._async_update_attrs() - self._attr_color_temp = color_temperature_kelvin_to_mired( - self._device.light_color_temperature - ) + self._attr_color_temp_kelvin = self._device.light_color_temperature async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" From 3be0d0d0858fc4af5da93ce8ae2e835f7071f7ca Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 9 Dec 2024 16:04:47 +0100 Subject: [PATCH 0385/1198] Add myself as code owner to statistics (#132719) --- CODEOWNERS | 4 ++-- homeassistant/components/statistics/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 782f999601f..8adb39b464b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1415,8 +1415,8 @@ build.json @home-assistant/supervisor /tests/components/starline/ @anonym-tsk /homeassistant/components/starlink/ @boswelja /tests/components/starlink/ @boswelja -/homeassistant/components/statistics/ @ThomDietrich -/tests/components/statistics/ @ThomDietrich +/homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST +/tests/components/statistics/ @ThomDietrich @gjohansson-ST /homeassistant/components/steam_online/ @tkdrob /tests/components/steam_online/ @tkdrob /homeassistant/components/steamist/ @bdraco diff --git a/homeassistant/components/statistics/manifest.json b/homeassistant/components/statistics/manifest.json index 24d4b4914cb..8eaed552edd 100644 --- a/homeassistant/components/statistics/manifest.json +++ b/homeassistant/components/statistics/manifest.json @@ -2,7 +2,7 @@ "domain": "statistics", "name": "Statistics", "after_dependencies": ["recorder"], - "codeowners": ["@ThomDietrich"], + "codeowners": ["@ThomDietrich", "@gjohansson-ST"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/statistics", "integration_type": "helper", From 49800f9aaa473428e7038710996044efd22c7a82 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:05:40 +0100 Subject: [PATCH 0386/1198] Update pylint to 3.3.2 and astroid to 3.3.6 (#132718) * Update pylint to 3.3.2 and astroid to 3.3.6 * Fix --- homeassistant/components/music_assistant/media_player.py | 1 - requirements_test.txt | 4 ++-- tests/components/samsungtv/test_config_flow.py | 2 -- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index fdf3a0c0c48..847a71b0061 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -545,7 +545,6 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): self, player: Player, queue: PlayerQueue | None ) -> None: """Update media attributes for the active queue item.""" - # pylint: disable=too-many-statements self._attr_media_artist = None self._attr_media_album_artist = None self._attr_media_album_name = None diff --git a/requirements_test.txt b/requirements_test.txt index 1725624a8cd..06a0fd035d3 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,7 +7,7 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.3.5 +astroid==3.3.6 coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 @@ -15,7 +15,7 @@ mock-open==1.4.0 mypy-dev==1.14.0a6 pre-commit==4.0.0 pydantic==1.10.19 -pylint==3.3.1 +pylint==3.3.2 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 pytest-asyncio==0.24.0 diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 3a849c9d4b1..eb78332b7b3 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -14,8 +14,6 @@ from samsungtvws.exceptions import ( UnauthorizedError, ) from websockets import frames - -# pylint: disable-next=no-name-in-module from websockets.exceptions import ConnectionClosedError, WebSocketException from homeassistant import config_entries From 21a2ce6b59bf616036e16033340d2b5ab5fece84 Mon Sep 17 00:00:00 2001 From: David Rapan Date: Mon, 9 Dec 2024 16:19:23 +0100 Subject: [PATCH 0387/1198] Add Starlink consumption sensors (#132262) --- .../components/starlink/coordinator.py | 8 +- homeassistant/components/starlink/sensor.py | 16 +++ .../fixtures/history_stats_success.json | 112 ++++++++++++++++++ tests/components/starlink/patchers.py | 5 + .../starlink/snapshots/test_diagnostics.ambr | 7 ++ tests/components/starlink/test_diagnostics.py | 2 + tests/components/starlink/test_init.py | 3 + 7 files changed, 151 insertions(+), 2 deletions(-) create mode 100644 tests/components/starlink/fixtures/history_stats_success.json diff --git a/homeassistant/components/starlink/coordinator.py b/homeassistant/components/starlink/coordinator.py index a891941fb8e..81ee56db3b4 100644 --- a/homeassistant/components/starlink/coordinator.py +++ b/homeassistant/components/starlink/coordinator.py @@ -14,8 +14,10 @@ from starlink_grpc import ( GrpcError, LocationDict, ObstructionDict, + PowerDict, StatusDict, get_sleep_config, + history_stats, location_data, reboot, set_sleep_config, @@ -39,6 +41,7 @@ class StarlinkData: status: StatusDict obstruction: ObstructionDict alert: AlertDict + consumption: PowerDict class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): @@ -58,10 +61,11 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): def _get_starlink_data(self) -> StarlinkData: """Retrieve Starlink data.""" channel_context = self.channel_context - status = status_data(channel_context) location = location_data(channel_context) sleep = get_sleep_config(channel_context) - return StarlinkData(location, sleep, *status) + status, obstruction, alert = status_data(channel_context) + statistics = history_stats(parse_samples=-1, context=channel_context) + return StarlinkData(location, sleep, status, obstruction, alert, statistics[-1]) async def _async_update_data(self) -> StarlinkData: async with asyncio.timeout(4): diff --git a/homeassistant/components/starlink/sensor.py b/homeassistant/components/starlink/sensor.py index 21f2400022c..4b33a7f4337 100644 --- a/homeassistant/components/starlink/sensor.py +++ b/homeassistant/components/starlink/sensor.py @@ -18,6 +18,8 @@ from homeassistant.const import ( PERCENTAGE, EntityCategory, UnitOfDataRate, + UnitOfEnergy, + UnitOfPower, UnitOfTime, ) from homeassistant.core import HomeAssistant @@ -120,4 +122,18 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, value_fn=lambda data: data.status["pop_ping_drop_rate"] * 100, ), + StarlinkSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_fn=lambda data: data.consumption["latest_power"], + ), + StarlinkSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: data.consumption["total_energy"], + ), ) diff --git a/tests/components/starlink/fixtures/history_stats_success.json b/tests/components/starlink/fixtures/history_stats_success.json new file mode 100644 index 00000000000..5a228dd34af --- /dev/null +++ b/tests/components/starlink/fixtures/history_stats_success.json @@ -0,0 +1,112 @@ +[ + { + "samples": 900, + "end_counter": 119395 + }, + { + "total_ping_drop": 2.4592087380588055, + "count_full_ping_drop": 0, + "count_obstructed": 0, + "total_obstructed_ping_drop": 0, + "count_full_obstructed_ping_drop": 0, + "count_unscheduled": 0, + "total_unscheduled_ping_drop": 0, + "count_full_unscheduled_ping_drop": 0 + }, + { + "init_run_fragment": 0, + "final_run_fragment": 0, + "run_seconds[1,]": [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + "run_minutes[1,]": [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + }, + { + "mean_all_ping_latency": 31.55747121333472, + "deciles_all_ping_latency[]": [ + 21.005102157592773, 22.67989158630371, 25.310760498046875, + 26.85667610168457, 27.947458267211914, 29.192155838012695, + 31.26323890686035, 34.226768493652344, 38.54373550415039, + 42.308048248291016, 60.11151885986328 + ], + "mean_full_ping_latency": 31.526783029284427, + "deciles_full_ping_latency[]": [ + 21.070240020751953, 22.841461181640625, 25.34041976928711, + 26.908039093017578, 27.947458267211914, 29.135879516601562, + 31.122955322265625, 34.1280403137207, 38.49388122558594, + 42.308048248291016, 60.11151885986328 + ], + "stdev_full_ping_latency": 7.8141330200011785 + }, + { + "load_bucket_samples[]": [738, 24, 39, 55, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + "load_bucket_min_latency[]": [ + 21.070240020751953, + 21.35713768005371, + 21.156545639038086, + 24.763751983642578, + 24.7109317779541, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + "load_bucket_median_latency[]": [ + 29.2450590133667, + 27.031108856201172, + 25.726211547851562, + 31.845806121826172, + 28.919479370117188, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + "load_bucket_max_latency[]": [ + 60.11151885986328, + 40.572628021240234, + 48.063961029052734, + 53.505126953125, + 38.7435302734375, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + { + "download_usage": 72504227, + "upload_usage": 5719755 + }, + { + "latest_power": 27.54502296447754, + "mean_power": 31.449254739549424, + "min_power": 21.826229095458984, + "max_power": 41.71160888671875, + "total_energy": 0.007862313684887356 + } +] diff --git a/tests/components/starlink/patchers.py b/tests/components/starlink/patchers.py index f8179f07bed..08e82548ef8 100644 --- a/tests/components/starlink/patchers.py +++ b/tests/components/starlink/patchers.py @@ -24,6 +24,11 @@ SLEEP_DATA_SUCCESS_PATCHER = patch( return_value=json.loads(load_fixture("sleep_data_success.json", "starlink")), ) +HISTORY_STATS_SUCCESS_PATCHER = patch( + "homeassistant.components.starlink.coordinator.history_stats", + return_value=json.loads(load_fixture("history_stats_success.json", "starlink")), +) + DEVICE_FOUND_PATCHER = patch( "homeassistant.components.starlink.config_flow.get_id", return_value="some-valid-id" ) diff --git a/tests/components/starlink/snapshots/test_diagnostics.ambr b/tests/components/starlink/snapshots/test_diagnostics.ambr index 4c85ad84ca7..c0b1b93085b 100644 --- a/tests/components/starlink/snapshots/test_diagnostics.ambr +++ b/tests/components/starlink/snapshots/test_diagnostics.ambr @@ -16,6 +16,13 @@ 'alert_thermal_throttle': False, 'alert_unexpected_location': False, }), + 'consumption': dict({ + 'latest_power': 27.54502296447754, + 'max_power': 41.71160888671875, + 'mean_power': 31.449254739549424, + 'min_power': 21.826229095458984, + 'total_energy': 0.007862313684887356, + }), 'location': dict({ 'altitude': '**REDACTED**', 'latitude': '**REDACTED**', diff --git a/tests/components/starlink/test_diagnostics.py b/tests/components/starlink/test_diagnostics.py index c5876e5e9f2..cd36dd0367e 100644 --- a/tests/components/starlink/test_diagnostics.py +++ b/tests/components/starlink/test_diagnostics.py @@ -7,6 +7,7 @@ from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant from .patchers import ( + HISTORY_STATS_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, STATUS_DATA_SUCCESS_PATCHER, @@ -32,6 +33,7 @@ async def test_diagnostics( STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) diff --git a/tests/components/starlink/test_init.py b/tests/components/starlink/test_init.py index 62a1ee41236..7e04c21562a 100644 --- a/tests/components/starlink/test_init.py +++ b/tests/components/starlink/test_init.py @@ -6,6 +6,7 @@ from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant from .patchers import ( + HISTORY_STATS_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, STATUS_DATA_SUCCESS_PATCHER, @@ -25,6 +26,7 @@ async def test_successful_entry(hass: HomeAssistant) -> None: STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) @@ -46,6 +48,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) From a20347963e09b76dcc4818319a8719e3f0a1fd42 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:25:15 +0100 Subject: [PATCH 0388/1198] Migrate flux_led lights to use Kelvin (#132687) --- homeassistant/components/flux_led/light.py | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/flux_led/light.py b/homeassistant/components/flux_led/light.py index f4982a13c3a..ca7fb7aeea2 100644 --- a/homeassistant/components/flux_led/light.py +++ b/homeassistant/components/flux_led/light.py @@ -14,7 +14,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -30,10 +30,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from .const import ( CONF_COLORS, @@ -67,7 +63,7 @@ _LOGGER = logging.getLogger(__name__) MODE_ATTRS = { ATTR_EFFECT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -205,8 +201,8 @@ class FluxLight( ) -> None: """Initialize the light.""" super().__init__(coordinator, base_unique_id, None) - self._attr_min_mireds = color_temperature_kelvin_to_mired(self._device.max_temp) - self._attr_max_mireds = color_temperature_kelvin_to_mired(self._device.min_temp) + self._attr_min_color_temp_kelvin = self._device.min_temp + self._attr_max_color_temp_kelvin = self._device.max_temp self._attr_supported_color_modes = _hass_color_modes(self._device) custom_effects: list[str] = [] if custom_effect_colors: @@ -222,9 +218,9 @@ class FluxLight( return self._device.brightness @property - def color_temp(self) -> int: - """Return the kelvin value of this light in mired.""" - return color_temperature_kelvin_to_mired(self._device.color_temp) + def color_temp_kelvin(self) -> int: + """Return the kelvin value of this light.""" + return self._device.color_temp @property def rgb_color(self) -> tuple[int, int, int]: @@ -304,8 +300,7 @@ class FluxLight( await self._async_set_effect(effect, brightness) return # Handle switch to CCT Color Mode - if color_temp_mired := kwargs.get(ATTR_COLOR_TEMP): - color_temp_kelvin = color_temperature_mired_to_kelvin(color_temp_mired) + if color_temp_kelvin := kwargs.get(ATTR_COLOR_TEMP_KELVIN): if ( ATTR_BRIGHTNESS not in kwargs and self.color_mode in MULTI_BRIGHTNESS_COLOR_MODES From 46e513615e3604970f11c7f5c81d84b94a02a855 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:25:25 +0100 Subject: [PATCH 0389/1198] Migrate switchbot lights to use Kelvin (#132695) --- homeassistant/components/switchbot/light.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/switchbot/light.py b/homeassistant/components/switchbot/light.py index 836ba1bd4f3..927ad5120c7 100644 --- a/homeassistant/components/switchbot/light.py +++ b/homeassistant/components/switchbot/light.py @@ -8,17 +8,13 @@ from switchbot import ColorMode as SwitchBotColorMode, SwitchbotBaseLight from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ColorMode, LightEntity, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator from .entity import SwitchbotEntity @@ -50,8 +46,8 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): """Initialize the Switchbot light.""" super().__init__(coordinator) device = self._device - self._attr_min_mireds = color_temperature_kelvin_to_mired(device.max_temp) - self._attr_max_mireds = color_temperature_kelvin_to_mired(device.min_temp) + self._attr_max_color_temp_kelvin = device.max_temp + self._attr_min_color_temp_kelvin = device.min_temp self._attr_supported_color_modes = { SWITCHBOT_COLOR_MODE_TO_HASS[mode] for mode in device.color_modes } @@ -64,7 +60,7 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): self._attr_is_on = self._device.on self._attr_brightness = max(0, min(255, round(device.brightness * 2.55))) if device.color_mode == SwitchBotColorMode.COLOR_TEMP: - self._attr_color_temp = color_temperature_kelvin_to_mired(device.color_temp) + self._attr_color_temp_kelvin = device.color_temp self._attr_color_mode = ColorMode.COLOR_TEMP return self._attr_rgb_color = device.rgb @@ -77,10 +73,9 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): if ( self.supported_color_modes and ColorMode.COLOR_TEMP in self.supported_color_modes - and ATTR_COLOR_TEMP in kwargs + and ATTR_COLOR_TEMP_KELVIN in kwargs ): - color_temp = kwargs[ATTR_COLOR_TEMP] - kelvin = max(2700, min(6500, color_temperature_mired_to_kelvin(color_temp))) + kelvin = max(2700, min(6500, kwargs[ATTR_COLOR_TEMP_KELVIN])) await self._device.set_color_temp(brightness, kelvin) return if ATTR_RGB_COLOR in kwargs: From 887f1621e586162883a8a23e098e9374975c2718 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 9 Dec 2024 10:08:58 -0600 Subject: [PATCH 0390/1198] Bump intents to 2024.12.9 (#132726) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 72e1cebf462..41c9a2d2691 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.4"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 053e2b21279..050a6267b85 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.6 -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 35affc2b491..9dc0995640f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ holidays==0.62 home-assistant-frontend==20241127.6 # homeassistant.components.conversation -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3c0b93ec31a..28e250ec867 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ holidays==0.62 home-assistant-frontend==20241127.6 # homeassistant.components.conversation -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 70ee2971278..98edb9c458f 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index a3edd4fa51c..8023d1ee6fa 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -571,7 +571,7 @@ 'name': 'HassGetState', }), 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} []', 'slots': dict({ 'area': 'kitchen', 'domain': 'lights', From 241026ef675fd035a7f26ab15413f066d96d61e3 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 9 Dec 2024 17:09:17 +0100 Subject: [PATCH 0391/1198] Bump yt-dlp to 2024.12.06 (#132684) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index f85f1561bb9..195dc678bc2 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.03"], + "requirements": ["yt-dlp[default]==2024.12.06"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 9dc0995640f..f807275c415 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3063,7 +3063,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.03 +yt-dlp[default]==2024.12.06 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 28e250ec867..50d6aa2a575 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2452,7 +2452,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.03 +yt-dlp[default]==2024.12.06 # homeassistant.components.zamg zamg==0.3.6 From 5b06acfabdab2caa6fba726a3ea921f6a43a899d Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 9 Dec 2024 17:10:52 +0100 Subject: [PATCH 0392/1198] Update frontend to 20241127.7 (#132729) Co-authored-by: Franck Nijhof --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index e68b9312081..bfc08c6e11e 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.6"] + "requirements": ["home-assistant-frontend==20241127.7"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 050a6267b85..2a580edf3a2 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f807275c415..509662800a9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 # homeassistant.components.conversation home-assistant-intents==2024.12.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 50d6aa2a575..a74942be69e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 # homeassistant.components.conversation home-assistant-intents==2024.12.9 From 85ed1d2ac826fe0be41c20c0f88186d97e8adc5e Mon Sep 17 00:00:00 2001 From: Simone Rescio Date: Mon, 9 Dec 2024 17:19:10 +0100 Subject: [PATCH 0393/1198] Revert "Bump pyezviz to 0.2.2.3" (#132715) --- homeassistant/components/ezviz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ezviz/manifest.json b/homeassistant/components/ezviz/manifest.json index 7c796c74ef7..53976bf3002 100644 --- a/homeassistant/components/ezviz/manifest.json +++ b/homeassistant/components/ezviz/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/ezviz", "iot_class": "cloud_polling", "loggers": ["paho_mqtt", "pyezviz"], - "requirements": ["pyezviz==0.2.2.3"] + "requirements": ["pyezviz==0.2.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 509662800a9..f5ac42950bf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1907,7 +1907,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.2.3 +pyezviz==0.2.1.2 # homeassistant.components.fibaro pyfibaro==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a74942be69e..737742350bb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1539,7 +1539,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.2.3 +pyezviz==0.2.1.2 # homeassistant.components.fibaro pyfibaro==0.8.0 From 9d79d905a4dd8d18c7116467cead5a92ac2443c7 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Mon, 9 Dec 2024 17:44:13 +0100 Subject: [PATCH 0394/1198] Bump uiprotect to 6.8.0 (#132735) Update uiprotect to version 6.8.0 --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index c4327e4a2f9..9e8a0ea6c21 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.7.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.8.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index f5ac42950bf..87806eed8bd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2894,7 +2894,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.7.0 +uiprotect==6.8.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 737742350bb..a0f2d85d3de 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2310,7 +2310,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.7.0 +uiprotect==6.8.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From b1217f5792b58e3c96815ed78d8bcc85f15dfaa9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 18:01:24 +0100 Subject: [PATCH 0395/1198] Use ATTR_COLOR_TEMP_KELVIN in alexa (#132733) --- homeassistant/components/alexa/handlers.py | 2 +- tests/components/alexa/test_capabilities.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 89e47673f07..21365076def 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -359,7 +359,7 @@ async def async_api_set_color_temperature( await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_KELVIN: kelvin}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: kelvin}, blocking=False, context=context, ) diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index a41c2f47b2d..823afd515b2 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -159,7 +159,7 @@ async def test_api_set_color_temperature(hass: HomeAssistant) -> None: assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["kelvin"] == 7500 + assert call_light[0].data["color_temp_kelvin"] == 7500 assert msg["header"]["name"] == "Response" From 0c08e88953941b62c15d0e5b85b61132dd95ef38 Mon Sep 17 00:00:00 2001 From: Tom Date: Mon, 9 Dec 2024 19:00:51 +0100 Subject: [PATCH 0396/1198] Improve Plugwise tests (#132677) --- .../components/plugwise/quality_scale.yaml | 8 +-- tests/components/plugwise/test_config_flow.py | 68 +++++++++++++++++++ 2 files changed, 70 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index b2801319e91..ea5cb61bc14 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -2,12 +2,8 @@ rules: ## Bronze config-flow: done test-before-configure: done - unique-config-entry: - status: todo - comment: Add tests preventing second entry for same device - config-flow-test-coverage: - status: todo - comment: Cover test_form and zeroconf + unique-config-entry: done + config-flow-test-coverage: done runtime-data: done test-before-setup: done appropriate-polling: done diff --git a/tests/components/plugwise/test_config_flow.py b/tests/components/plugwise/test_config_flow.py index baf6edea9c7..9e1e29f4a48 100644 --- a/tests/components/plugwise/test_config_flow.py +++ b/tests/components/plugwise/test_config_flow.py @@ -35,6 +35,7 @@ TEST_PASSWORD = "test_password" TEST_PORT = 81 TEST_USERNAME = "smile" TEST_USERNAME2 = "stretch" +MOCK_SMILE_ID = "smile12345" TEST_DISCOVERY = ZeroconfServiceInfo( ip_address=ip_address(TEST_HOST), @@ -128,6 +129,8 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 + assert result2["result"].unique_id == MOCK_SMILE_ID + @pytest.mark.parametrize( ("discovery", "username"), @@ -172,6 +175,8 @@ async def test_zeroconf_flow( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 + assert result2["result"].unique_id == MOCK_SMILE_ID + async def test_zeroconf_flow_stretch( hass: HomeAssistant, @@ -311,6 +316,69 @@ async def test_flow_errors( assert len(mock_smile_config_flow.connect.mock_calls) == 2 +async def test_user_abort_existing_anna( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_smile_config_flow: MagicMock, +) -> None: + """Test the full user configuration flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + title=CONF_NAME, + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + unique_id=MOCK_SMILE_ID, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: TEST_HOST, + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + await hass.async_block_till_done() + + assert result2.get("type") is FlowResultType.ABORT + assert result2.get("reason") == "already_configured" + + +async def test_zeroconf_abort_existing_anna( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_smile_config_flow: MagicMock, +) -> None: + """Test the full user configuration flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + title=CONF_NAME, + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + unique_id=TEST_HOSTNAME, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_ZEROCONF}, + data=TEST_DISCOVERY_ANNA, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + async def test_zeroconf_abort_anna_with_existing_config_entries( hass: HomeAssistant, mock_smile_adam: MagicMock, From 674d42d8a018576e39b97a8242147edb50494c8d Mon Sep 17 00:00:00 2001 From: Tom Date: Mon, 9 Dec 2024 19:05:10 +0100 Subject: [PATCH 0397/1198] Plugwise improve exception translations (#132663) --- .../components/plugwise/coordinator.py | 23 +++++++++++++++---- .../components/plugwise/quality_scale.yaml | 16 ++++++------- .../components/plugwise/strings.json | 19 +++++++++++++-- homeassistant/components/plugwise/util.py | 11 ++++++--- 4 files changed, 50 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/plugwise/coordinator.py b/homeassistant/components/plugwise/coordinator.py index bf9e7d31cc0..7ac0cc21c51 100644 --- a/homeassistant/components/plugwise/coordinator.py +++ b/homeassistant/components/plugwise/coordinator.py @@ -73,17 +73,30 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): await self._connect() data = await self.api.async_update() except ConnectionFailedError as err: - raise UpdateFailed("Failed to connect") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="failed_to_connect", + ) from err except InvalidAuthentication as err: - raise ConfigEntryError("Authentication failed") from err + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="authentication_failed", + ) from err except (InvalidXMLError, ResponseError) as err: raise UpdateFailed( - "Invalid XML data, or error indication received from the Plugwise Adam/Smile/Stretch" + translation_domain=DOMAIN, + translation_key="invalid_xml_data", ) from err except PlugwiseError as err: - raise UpdateFailed("Data incomplete or missing") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="data_incomplete_or_missing", + ) from err except UnsupportedDeviceError as err: - raise ConfigEntryError("Device with unsupported firmware") from err + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="unsupported_firmware", + ) from err self._async_add_remove_devices(data, self.config_entry) return data diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index ea5cb61bc14..4bbafc09004 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -19,7 +19,7 @@ rules: comment: Verify entity for async_added_to_hass usage (discard?) docs-high-level-description: status: todo - comment: Rewrite top section, docs PR prepared + comment: Rewrite top section, docs PR prepared waiting for 36087 merge docs-installation-instructions: status: todo comment: Docs PR 36087 @@ -56,9 +56,7 @@ rules: discovery: done stale-devices: done diagnostics: done - exception-translations: - status: todo - comment: Add coordinator, util exceptions (climate done in core 132175) + exception-translations: done icon-translations: done reconfiguration-flow: status: todo @@ -70,23 +68,23 @@ rules: comment: This integration does not have repairs docs-use-cases: status: todo - comment: Check for completeness, PR prepared + comment: Check for completeness, PR prepared waiting for 36087 merge docs-supported-devices: status: todo - comment: The list is there but could be improved for readability, PR prepared + comment: The list is there but could be improved for readability, PR prepared waiting for 36087 merge docs-supported-functions: status: todo - comment: Check for completeness + comment: Check for completeness, PR prepared waiting for 36087 merge docs-data-update: done docs-known-limitations: status: todo comment: Partial in 36087 but could be more elaborate docs-troubleshooting: status: todo - comment: Check for completeness, PR prepared + comment: Check for completeness, PR prepared waiting for 36087 merge docs-examples: status: todo - comment: Check for completeness + comment: Check for completeness, PR prepared waiting for 36087 merge ## Platinum async-dependency: done inject-websession: done diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index badd522e78b..87a8e120591 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -286,8 +286,23 @@ } }, "exceptions": { - "invalid_temperature_change_requested": { - "message": "Invalid temperature change requested." + "authentication_failed": { + "message": "[%key:common::config_flow::error::invalid_auth%]" + }, + "data_incomplete_or_missing": { + "message": "Data incomplete or missing." + }, + "error_communicating_with_api": { + "message": "Error communicating with API: {error}." + }, + "failed_to_connect": { + "message": "[%key:common::config_flow::error::cannot_connect%]" + }, + "invalid_xml_data": { + "message": "[%key:component::plugwise::config::error::response_error%]" + }, + "unsupported_firmware": { + "message": "[%key:component::plugwise::config::error::unsupported%]" }, "unsupported_hvac_mode_requested": { "message": "Unsupported mode {hvac_mode} requested, valid modes are: {hvac_modes}." diff --git a/homeassistant/components/plugwise/util.py b/homeassistant/components/plugwise/util.py index d998711f2b9..c830e5f69f3 100644 --- a/homeassistant/components/plugwise/util.py +++ b/homeassistant/components/plugwise/util.py @@ -7,6 +7,7 @@ from plugwise.exceptions import PlugwiseException from homeassistant.exceptions import HomeAssistantError +from .const import DOMAIN from .entity import PlugwiseEntity @@ -24,10 +25,14 @@ def plugwise_command[_PlugwiseEntityT: PlugwiseEntity, **_P, _R]( ) -> _R: try: return await func(self, *args, **kwargs) - except PlugwiseException as error: + except PlugwiseException as err: raise HomeAssistantError( - f"Error communicating with API: {error}" - ) from error + translation_domain=DOMAIN, + translation_key="error_communicating_with_api", + translation_placeholders={ + "error": str(err), + }, + ) from err finally: await self.coordinator.async_request_refresh() From c6bcd5a036c3f181880b59e1a5cf76699ed6248f Mon Sep 17 00:00:00 2001 From: adam-the-hero <132444842+adam-the-hero@users.noreply.github.com> Date: Mon, 9 Dec 2024 19:40:13 +0100 Subject: [PATCH 0398/1198] Add Watergate Sonic Local Integration (#129686) Co-authored-by: Mark Breen --- CODEOWNERS | 2 + .../components/watergate/__init__.py | 107 ++++++++++++++++++ .../components/watergate/config_flow.py | 62 ++++++++++ homeassistant/components/watergate/const.py | 5 + .../components/watergate/coordinator.py | 35 ++++++ homeassistant/components/watergate/entity.py | 30 +++++ .../components/watergate/manifest.json | 11 ++ .../components/watergate/quality_scale.yaml | 43 +++++++ .../components/watergate/strings.json | 21 ++++ homeassistant/components/watergate/valve.py | 82 ++++++++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/watergate/__init__.py | 11 ++ tests/components/watergate/conftest.py | 77 +++++++++++++ tests/components/watergate/const.py | 27 +++++ .../watergate/snapshots/test_valve.ambr | 16 +++ .../components/watergate/test_config_flow.py | 107 ++++++++++++++++++ tests/components/watergate/test_init.py | 81 +++++++++++++ tests/components/watergate/test_valve.py | 72 ++++++++++++ 21 files changed, 802 insertions(+) create mode 100644 homeassistant/components/watergate/__init__.py create mode 100644 homeassistant/components/watergate/config_flow.py create mode 100644 homeassistant/components/watergate/const.py create mode 100644 homeassistant/components/watergate/coordinator.py create mode 100644 homeassistant/components/watergate/entity.py create mode 100644 homeassistant/components/watergate/manifest.json create mode 100644 homeassistant/components/watergate/quality_scale.yaml create mode 100644 homeassistant/components/watergate/strings.json create mode 100644 homeassistant/components/watergate/valve.py create mode 100644 tests/components/watergate/__init__.py create mode 100644 tests/components/watergate/conftest.py create mode 100644 tests/components/watergate/const.py create mode 100644 tests/components/watergate/snapshots/test_valve.ambr create mode 100644 tests/components/watergate/test_config_flow.py create mode 100644 tests/components/watergate/test_init.py create mode 100644 tests/components/watergate/test_valve.py diff --git a/CODEOWNERS b/CODEOWNERS index 8adb39b464b..16e9c7d8062 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1644,6 +1644,8 @@ build.json @home-assistant/supervisor /tests/components/waqi/ @joostlek /homeassistant/components/water_heater/ @home-assistant/core /tests/components/water_heater/ @home-assistant/core +/homeassistant/components/watergate/ @adam-the-hero +/tests/components/watergate/ @adam-the-hero /homeassistant/components/watson_tts/ @rutkai /homeassistant/components/watttime/ @bachya /tests/components/watttime/ @bachya diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py new file mode 100644 index 00000000000..1cf38876556 --- /dev/null +++ b/homeassistant/components/watergate/__init__.py @@ -0,0 +1,107 @@ +"""The Watergate integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +import logging + +from watergate_local_api import WatergateLocalApiClient +from watergate_local_api.models import WebhookEvent + +from homeassistant.components.http import HomeAssistantView +from homeassistant.components.webhook import ( + Request, + Response, + async_generate_url, + async_register, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import WatergateDataCoordinator + +_LOGGER = logging.getLogger(__name__) + + +PLATFORMS: list[Platform] = [ + Platform.VALVE, +] + +type WatergateConfigEntry = ConfigEntry[WatergateDataCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: WatergateConfigEntry) -> bool: + """Set up Watergate from a config entry.""" + sonic_address = entry.data[CONF_IP_ADDRESS] + webhook_id = entry.data[CONF_WEBHOOK_ID] + + _LOGGER.debug( + "Setting up watergate local api integration for device: IP: %s)", + sonic_address, + ) + + watergate_client = WatergateLocalApiClient( + sonic_address if sonic_address.startswith("http") else f"http://{sonic_address}" + ) + + coordinator = WatergateDataCoordinator(hass, watergate_client) + entry.runtime_data = coordinator + + async_register( + hass, DOMAIN, "Watergate", webhook_id, get_webhook_handler(coordinator) + ) + + _LOGGER.debug("Registered webhook: %s", webhook_id) + + await coordinator.async_config_entry_first_refresh() + + await watergate_client.async_set_webhook_url( + async_generate_url(hass, webhook_id, allow_ip=True, prefer_external=False) + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: WatergateConfigEntry) -> bool: + """Unload a config entry.""" + webhook_id = entry.data[CONF_WEBHOOK_ID] + hass.components.webhook.async_unregister(webhook_id) + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +def get_webhook_handler( + coordinator: WatergateDataCoordinator, +) -> Callable[[HomeAssistant, str, Request], Awaitable[Response | None]]: + """Return webhook handler.""" + + async def async_webhook_handler( + hass: HomeAssistant, webhook_id: str, request: Request + ) -> Response | None: + # Handle http post calls to the path. + if not request.body_exists: + return HomeAssistantView.json( + result="No Body", status_code=HTTPStatus.BAD_REQUEST + ) + + body = await request.json() + + _LOGGER.debug("Received webhook: %s", body) + + data = WebhookEvent.parse_webhook_event(body) + + body_type = body.get("type") + + coordinator_data = coordinator.data + if body_type == Platform.VALVE and coordinator_data: + coordinator_data.valve_state = data.state + + coordinator.async_set_updated_data(coordinator_data) + + return HomeAssistantView.json(result="OK", status_code=HTTPStatus.OK) + + return async_webhook_handler diff --git a/homeassistant/components/watergate/config_flow.py b/homeassistant/components/watergate/config_flow.py new file mode 100644 index 00000000000..de8494053a3 --- /dev/null +++ b/homeassistant/components/watergate/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Watergate.""" + +import logging + +import voluptuous as vol +from watergate_local_api.watergate_api import ( + WatergateApiException, + WatergateLocalApiClient, +) + +from homeassistant.components.webhook import async_generate_id as webhook_generate_id +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +SONIC = "Sonic" +WATERGATE_SCHEMA = vol.Schema( + { + vol.Required(CONF_IP_ADDRESS): str, + } +) + + +class WatergateConfigFlow(ConfigFlow, domain=DOMAIN): + """Watergate config flow.""" + + async def async_step_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + watergate_client = WatergateLocalApiClient( + self.prepare_ip_address(user_input[CONF_IP_ADDRESS]) + ) + try: + state = await watergate_client.async_get_device_state() + except WatergateApiException as exception: + _LOGGER.error("Error connecting to Watergate device: %s", exception) + errors[CONF_IP_ADDRESS] = "cannot_connect" + else: + if state is None: + _LOGGER.error("Device state returned as None") + errors[CONF_IP_ADDRESS] = "cannot_connect" + else: + await self.async_set_unique_id(state.serial_number) + self._abort_if_unique_id_configured() + return self.async_create_entry( + data={**user_input, CONF_WEBHOOK_ID: webhook_generate_id()}, + title=SONIC, + ) + + return self.async_show_form( + step_id="user", data_schema=WATERGATE_SCHEMA, errors=errors + ) + + def prepare_ip_address(self, ip_address: str) -> str: + """Prepare the IP address for the Watergate device.""" + return ip_address if ip_address.startswith("http") else f"http://{ip_address}" diff --git a/homeassistant/components/watergate/const.py b/homeassistant/components/watergate/const.py new file mode 100644 index 00000000000..22a14330af9 --- /dev/null +++ b/homeassistant/components/watergate/const.py @@ -0,0 +1,5 @@ +"""Constants for the Watergate integration.""" + +DOMAIN = "watergate" + +MANUFACTURER = "Watergate" diff --git a/homeassistant/components/watergate/coordinator.py b/homeassistant/components/watergate/coordinator.py new file mode 100644 index 00000000000..c0b87feed30 --- /dev/null +++ b/homeassistant/components/watergate/coordinator.py @@ -0,0 +1,35 @@ +"""Coordinator for Watergate API.""" + +from datetime import timedelta +import logging + +from watergate_local_api import WatergateApiException, WatergateLocalApiClient +from watergate_local_api.models import DeviceState + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): + """Class to manage fetching watergate data.""" + + def __init__(self, hass: HomeAssistant, api: WatergateLocalApiClient) -> None: + """Initialize.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(minutes=2), + ) + self.api = api + + async def _async_update_data(self) -> DeviceState: + try: + state = await self.api.async_get_device_state() + except WatergateApiException as exc: + raise UpdateFailed from exc + return state diff --git a/homeassistant/components/watergate/entity.py b/homeassistant/components/watergate/entity.py new file mode 100644 index 00000000000..977a7fbedb4 --- /dev/null +++ b/homeassistant/components/watergate/entity.py @@ -0,0 +1,30 @@ +"""Watergate Base Entity Definition.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import WatergateDataCoordinator + + +class WatergateEntity(CoordinatorEntity[WatergateDataCoordinator]): + """Define a base Watergate entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_name: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._api_client = coordinator.api + self._attr_unique_id = f"{coordinator.data.serial_number}.{entity_name}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.data.serial_number)}, + name="Sonic", + serial_number=coordinator.data.serial_number, + manufacturer=MANUFACTURER, + sw_version=coordinator.data.firmware_version if coordinator.data else None, + ) diff --git a/homeassistant/components/watergate/manifest.json b/homeassistant/components/watergate/manifest.json new file mode 100644 index 00000000000..46a80e15671 --- /dev/null +++ b/homeassistant/components/watergate/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "watergate", + "name": "Watergate", + "codeowners": ["@adam-the-hero"], + "config_flow": true, + "dependencies": ["http", "webhook"], + "documentation": "https://www.home-assistant.io/integrations/watergate", + "iot_class": "local_push", + "quality_scale": "bronze", + "requirements": ["watergate-local-api==2024.4.1"] +} diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml new file mode 100644 index 00000000000..c6027f6a548 --- /dev/null +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -0,0 +1,43 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: todo diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json new file mode 100644 index 00000000000..2a75c4d103d --- /dev/null +++ b/homeassistant/components/watergate/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "step": { + "user": { + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "title": "Configure Watergate device", + "data_description": { + "ip_address": "Provide an IP address of your Watergate device." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/watergate/valve.py b/homeassistant/components/watergate/valve.py new file mode 100644 index 00000000000..aecaf3fbca9 --- /dev/null +++ b/homeassistant/components/watergate/valve.py @@ -0,0 +1,82 @@ +"""Support for Watergate Valve.""" + +from homeassistant.components.sensor import Any, HomeAssistant +from homeassistant.components.valve import ( + ValveDeviceClass, + ValveEntity, + ValveEntityFeature, + ValveState, +) +from homeassistant.core import callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import WatergateConfigEntry +from .coordinator import WatergateDataCoordinator +from .entity import WatergateEntity + +ENTITY_NAME = "valve" +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up all entries for Watergate Platform.""" + + async_add_entities([SonicValve(config_entry.runtime_data)]) + + +class SonicValve(WatergateEntity, ValveEntity): + """Define a Sonic Valve entity.""" + + _attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + _attr_reports_position = False + _valve_state: str | None = None + _attr_device_class = ValveDeviceClass.WATER + _attr_name = None + + def __init__( + self, + coordinator: WatergateDataCoordinator, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, ENTITY_NAME) + self._valve_state = coordinator.data.valve_state if coordinator.data else None + + @property + def is_closed(self) -> bool: + """Return if the valve is closed or not.""" + return self._valve_state == ValveState.CLOSED + + @property + def is_opening(self) -> bool | None: + """Return if the valve is opening or not.""" + return self._valve_state == ValveState.OPENING + + @property + def is_closing(self) -> bool | None: + """Return if the valve is closing or not.""" + return self._valve_state == ValveState.CLOSING + + @callback + def _handle_coordinator_update(self) -> None: + """Handle data update.""" + self._attr_available = self.coordinator.data is not None + self._valve_state = ( + self.coordinator.data.valve_state if self.coordinator.data else None + ) + self.async_write_ha_state() + + async def async_open_valve(self, **kwargs: Any) -> None: + """Open the valve.""" + await self._api_client.async_set_valve_state(ValveState.OPEN) + self._valve_state = ValveState.OPENING + self.async_write_ha_state() + + async def async_close_valve(self, **kwargs: Any) -> None: + """Close the valve.""" + await self._api_client.async_set_valve_state(ValveState.CLOSED) + self._valve_state = ValveState.CLOSING + self.async_write_ha_state() diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 37ffc8868fd..e710480caaa 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -665,6 +665,7 @@ FLOWS = { "wake_on_lan", "wallbox", "waqi", + "watergate", "watttime", "waze_travel_time", "weatherflow", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index b1b52332045..d708660b32b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6892,6 +6892,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "watergate": { + "name": "Watergate", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "watttime": { "name": "WattTime", "integration_type": "service", diff --git a/requirements_all.txt b/requirements_all.txt index 87806eed8bd..18099e9f462 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2977,6 +2977,9 @@ watchdog==2.3.1 # homeassistant.components.waterfurnace waterfurnace==1.1.0 +# homeassistant.components.watergate +watergate-local-api==2024.4.1 + # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0f2d85d3de..edddf1256bf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2378,6 +2378,9 @@ wallbox==0.7.0 # homeassistant.components.folder_watcher watchdog==2.3.1 +# homeassistant.components.watergate +watergate-local-api==2024.4.1 + # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 diff --git a/tests/components/watergate/__init__.py b/tests/components/watergate/__init__.py new file mode 100644 index 00000000000..c69129e4720 --- /dev/null +++ b/tests/components/watergate/__init__.py @@ -0,0 +1,11 @@ +"""Tests for the Watergate integration.""" + +from homeassistant.core import HomeAssistant + + +async def init_integration(hass: HomeAssistant, mock_entry) -> None: + """Set up the Watergate integration in Home Assistant.""" + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/watergate/conftest.py b/tests/components/watergate/conftest.py new file mode 100644 index 00000000000..d29b90431a4 --- /dev/null +++ b/tests/components/watergate/conftest.py @@ -0,0 +1,77 @@ +"""Fixtures for watergate platform tests.""" + +from collections.abc import Generator + +import pytest + +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.const import CONF_IP_ADDRESS + +from .const import ( + DEFAULT_DEVICE_STATE, + DEFAULT_SERIAL_NUMBER, + MOCK_CONFIG, + MOCK_WEBHOOK_ID, +) + +from tests.common import AsyncMock, MockConfigEntry, patch + + +@pytest.fixture +def mock_watergate_client() -> Generator[AsyncMock]: + """Fixture to mock WatergateLocalApiClient.""" + with ( + patch( + "homeassistant.components.watergate.WatergateLocalApiClient", + autospec=True, + ) as mock_client_main, + patch( + "homeassistant.components.watergate.config_flow.WatergateLocalApiClient", + new=mock_client_main, + ), + ): + mock_client_instance = mock_client_main.return_value + + mock_client_instance.async_get_device_state = AsyncMock( + return_value=DEFAULT_DEVICE_STATE + ) + yield mock_client_instance + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.watergate.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_webhook_id_generation() -> Generator[None]: + """Fixture to mock webhook_id generation.""" + with patch( + "homeassistant.components.watergate.config_flow.webhook_generate_id", + return_value=MOCK_WEBHOOK_ID, + ): + yield + + +@pytest.fixture +def mock_entry() -> MockConfigEntry: + """Create full mocked entry to be used in config_flow tests.""" + return MockConfigEntry( + domain=DOMAIN, + title="Sonic", + data=MOCK_CONFIG, + entry_id="12345", + unique_id=DEFAULT_SERIAL_NUMBER, + ) + + +@pytest.fixture +def user_input() -> dict[str, str]: + """Create user input for config_flow tests.""" + return { + CONF_IP_ADDRESS: "192.168.1.100", + } diff --git a/tests/components/watergate/const.py b/tests/components/watergate/const.py new file mode 100644 index 00000000000..4297b3321ad --- /dev/null +++ b/tests/components/watergate/const.py @@ -0,0 +1,27 @@ +"""Constants for the Watergate tests.""" + +from watergate_local_api.models import DeviceState + +from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME, CONF_WEBHOOK_ID + +MOCK_WEBHOOK_ID = "webhook_id" + +MOCK_CONFIG = { + CONF_NAME: "Sonic", + CONF_IP_ADDRESS: "http://localhost", + CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID, +} + +DEFAULT_SERIAL_NUMBER = "a63182948ce2896a" + +DEFAULT_DEVICE_STATE = DeviceState( + "open", + "on", + True, + True, + "battery", + "1.0.0", + 100, + {"volume": 1.2, "duration": 100}, + DEFAULT_SERIAL_NUMBER, +) diff --git a/tests/components/watergate/snapshots/test_valve.ambr b/tests/components/watergate/snapshots/test_valve.ambr new file mode 100644 index 00000000000..1df1a0c748d --- /dev/null +++ b/tests/components/watergate/snapshots/test_valve.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_change_valve_state_snapshot + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Sonic', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.sonic', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/watergate/test_config_flow.py b/tests/components/watergate/test_config_flow.py new file mode 100644 index 00000000000..176047f5e23 --- /dev/null +++ b/tests/components/watergate/test_config_flow.py @@ -0,0 +1,107 @@ +"""Tests for the Watergate config flow.""" + +from collections.abc import Generator + +import pytest +from watergate_local_api import WatergateApiException + +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID +from homeassistant.data_entry_flow import FlowResultType + +from .const import DEFAULT_DEVICE_STATE, DEFAULT_SERIAL_NUMBER, MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, HomeAssistant, MockConfigEntry + + +async def test_step_user_form( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_webhook_id_generation: Generator[None], + user_input: dict[str, str], +) -> None: + """Test checking if registration form works end to end.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert CONF_IP_ADDRESS in result["data_schema"].schema + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Sonic" + assert result["data"] == {**user_input, CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID} + assert result["result"].unique_id == DEFAULT_SERIAL_NUMBER + + +@pytest.mark.parametrize( + "client_result", + [AsyncMock(return_value=None), AsyncMock(side_effect=WatergateApiException)], +) +async def test_step_user_form_with_exception( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + user_input: dict[str, str], + client_result: AsyncMock, + mock_webhook_id_generation: Generator[None], +) -> None: + """Test checking if errors will be displayed when Exception is thrown while checking device state.""" + mock_watergate_client.async_get_device_state = client_result + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"][CONF_IP_ADDRESS] == "cannot_connect" + + mock_watergate_client.async_get_device_state = AsyncMock( + return_value=DEFAULT_DEVICE_STATE + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Sonic" + assert result["data"] == {**user_input, CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID} + + +async def test_abort_if_id_is_not_unique( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, + user_input: dict[str, str], +) -> None: + """Test checking if we will inform user that this entity is already registered.""" + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert CONF_IP_ADDRESS in result["data_schema"].schema + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/watergate/test_init.py b/tests/components/watergate/test_init.py new file mode 100644 index 00000000000..71eb99d6470 --- /dev/null +++ b/tests/components/watergate/test_init.py @@ -0,0 +1,81 @@ +"""Tests for the Watergate integration init module.""" + +from collections.abc import Generator +from unittest.mock import patch + +from homeassistant.components.valve import ValveState +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration +from .const import MOCK_WEBHOOK_ID + +from tests.common import ANY, AsyncMock, MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_async_setup_entry( + hass: HomeAssistant, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test setting up the Watergate integration.""" + hass.config.internal_url = "http://hassio.local" + + with ( + patch("homeassistant.components.watergate.async_register") as mock_webhook, + ): + await init_integration(hass, mock_entry) + + assert mock_entry.state is ConfigEntryState.LOADED + + mock_webhook.assert_called_once_with( + hass, + DOMAIN, + "Watergate", + MOCK_WEBHOOK_ID, + ANY, + ) + mock_watergate_client.async_set_webhook_url.assert_called_once_with( + f"http://hassio.local/api/webhook/{MOCK_WEBHOOK_ID}" + ) + mock_watergate_client.async_get_device_state.assert_called_once() + + +async def test_handle_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test handling webhook events.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + + valve_change_data = { + "type": "valve", + "data": {"state": "closed"}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=valve_change_data) + + await hass.async_block_till_done() # Ensure the webhook is processed + + assert hass.states.get(entity_id).state == ValveState.CLOSED + + valve_change_data = { + "type": "valve", + "data": {"state": "open"}, + } + + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=valve_change_data) + + await hass.async_block_till_done() # Ensure the webhook is processed + + assert hass.states.get(entity_id).state == ValveState.OPEN diff --git a/tests/components/watergate/test_valve.py b/tests/components/watergate/test_valve.py new file mode 100644 index 00000000000..b22f6967665 --- /dev/null +++ b/tests/components/watergate/test_valve.py @@ -0,0 +1,72 @@ +"""Tests for the Watergate valve platform.""" + +from collections.abc import Generator + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import AsyncMock, MockConfigEntry + + +async def test_change_valve_state_snapshot( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, +) -> None: + """Test entities become unavailable after failed update.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + assert registered_entity == snapshot + + +async def test_change_valve_state( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, +) -> None: + """Test entities become unavailable after failed update.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + + await hass.services.async_call( + VALVE_DOMAIN, + SERVICE_CLOSE_VALVE, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.CLOSING + + mock_watergate_client.async_set_valve_state.assert_called_once_with("closed") + mock_watergate_client.async_set_valve_state.reset_mock() + + await hass.services.async_call( + VALVE_DOMAIN, + SERVICE_OPEN_VALVE, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPENING + + mock_watergate_client.async_set_valve_state.assert_called_once_with("open") From 7ba50385091234fd95bae390ace3835931a56bbe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 20:15:46 +0100 Subject: [PATCH 0399/1198] Remove YAML support from cert_expiry (#132350) * Deprecate yaml import in cert_expiry * Simplify * Do full cleanup * Cleanup more --- .../components/cert_expiry/config_flow.py | 7 - .../components/cert_expiry/sensor.py | 53 +------ .../cert_expiry/test_config_flow.py | 129 +----------------- tests/components/cert_expiry/test_init.py | 37 +---- 4 files changed, 7 insertions(+), 219 deletions(-) diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 3fbb1c08c9b..c351435a73e 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -94,10 +94,3 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=self._errors, ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry. - - Only host was required in the yaml file all other fields are optional - """ - return await self.async_step_user(import_data) diff --git a/homeassistant/components/cert_expiry/sensor.py b/homeassistant/components/cert_expiry/sensor.py index a6f163b51be..4fd0846f0f3 100644 --- a/homeassistant/components/cert_expiry/sensor.py +++ b/homeassistant/components/cert_expiry/sensor.py @@ -2,63 +2,18 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import datetime -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, -) -from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_START -from homeassistant.core import Event, HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import CertExpiryConfigEntry -from .const import DEFAULT_PORT, DOMAIN +from .const import DOMAIN from .coordinator import CertExpiryDataUpdateCoordinator from .entity import CertExpiryEntity -SCAN_INTERVAL = timedelta(hours=12) - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up certificate expiry sensor.""" - - @callback - def schedule_import(_: Event) -> None: - """Schedule delayed import after HA is fully started.""" - async_call_later(hass, 10, do_import) - - @callback - def do_import(_: datetime) -> None: - """Process YAML import.""" - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=dict(config) - ) - ) - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_import) - async def async_setup_entry( hass: HomeAssistant, diff --git a/tests/components/cert_expiry/test_config_flow.py b/tests/components/cert_expiry/test_config_flow.py index 3fd696f5953..907071d8b1f 100644 --- a/tests/components/cert_expiry/test_config_flow.py +++ b/tests/components/cert_expiry/test_config_flow.py @@ -7,13 +7,12 @@ from unittest.mock import patch import pytest from homeassistant import config_entries -from homeassistant.components.cert_expiry.const import DEFAULT_PORT, DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.components.cert_expiry.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .const import HOST, PORT -from .helpers import future_timestamp from tests.common import MockConfigEntry @@ -64,122 +63,6 @@ async def test_user_with_bad_cert(hass: HomeAssistant) -> None: assert result["result"].unique_id == f"{HOST}:{PORT}" -async def test_import_host_only(hass: HomeAssistant) -> None: - """Test import with host only.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT - assert result["result"].unique_id == f"{HOST}:{DEFAULT_PORT}" - - -async def test_import_host_and_port(hass: HomeAssistant) -> None: - """Test import with host and port.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: PORT}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["result"].unique_id == f"{HOST}:{PORT}" - - -async def test_import_non_default_port(hass: HomeAssistant) -> None: - """Test import with host and non-default port.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: 888}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"{HOST}:888" - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == 888 - assert result["result"].unique_id == f"{HOST}:888" - - -async def test_import_with_name(hass: HomeAssistant) -> None: - """Test import with name (deprecated).""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_NAME: "legacy", CONF_HOST: HOST, CONF_PORT: PORT}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["result"].unique_id == f"{HOST}:{PORT}" - - -async def test_bad_import(hass: HomeAssistant) -> None: - """Test import step.""" - with patch( - "homeassistant.components.cert_expiry.helper.async_get_cert", - side_effect=ConnectionRefusedError(), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "import_failed" - - async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test we abort if the cert is already setup.""" MockConfigEntry( @@ -188,14 +71,6 @@ async def test_abort_if_already_setup(hass: HomeAssistant) -> None: unique_id=f"{HOST}:{PORT}", ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: PORT}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/cert_expiry/test_init.py b/tests/components/cert_expiry/test_init.py index e2c333cc6f3..5ba63ad1af1 100644 --- a/tests/components/cert_expiry/test_init.py +++ b/tests/components/cert_expiry/test_init.py @@ -1,59 +1,24 @@ """Tests for Cert Expiry setup.""" -from datetime import timedelta from unittest.mock import patch from freezegun import freeze_time from homeassistant.components.cert_expiry.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_PORT, - EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util from .const import HOST, PORT from .helpers import future_timestamp, static_datetime -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_setup_with_config(hass: HomeAssistant) -> None: - """Test setup component with config.""" - assert hass.state is CoreState.running - - config = { - SENSOR_DOMAIN: [ - {"platform": DOMAIN, CONF_HOST: HOST, CONF_PORT: PORT}, - {"platform": DOMAIN, CONF_HOST: HOST, CONF_PORT: 888}, - ], - } - - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - assert await async_setup_component(hass, SENSOR_DOMAIN, config) is True - await hass.async_block_till_done() - hass.bus.async_fire(EVENT_HOMEASSISTANT_START) - await hass.async_block_till_done() - next_update = dt_util.utcnow() + timedelta(seconds=20) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 2 +from tests.common import MockConfigEntry async def test_update_unique_id(hass: HomeAssistant) -> None: From e91cb99512b01e8c1c7a262d030386c4be699122 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 9 Dec 2024 20:18:21 +0100 Subject: [PATCH 0400/1198] Improve name and description of Include list, fix `holidays` keyword name (#132188) * Improve description of Include list, fix the keyword name * Use "Days to include / exclude" to make more user-friendly * Reworded both descriptions as suggested * Updated up the exclude description, re-added reference to docs --- homeassistant/components/workday/strings.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index e74dc0160d9..87fa294dbba 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -14,9 +14,9 @@ "options": { "description": "Set additional options for {name} configured for country {country}", "data": { - "excludes": "Excludes", + "excludes": "Days to exclude", "days_offset": "Offset", - "workdays": "Workdays", + "workdays": "Days to include", "add_holidays": "Add holidays", "remove_holidays": "Remove Holidays", "province": "Subdivision of country", @@ -24,9 +24,9 @@ "category": "Additional category as holiday" }, "data_description": { - "excludes": "List of workdays to exclude, notice the keyword `holiday` and read the documentation on how to use it correctly", + "excludes": "Select which weekdays to exclude as workdays.\nThe key `holidays` adds those for the configured country, customizable by all the settings below. Read the documentation on how to use them correctly.", "days_offset": "Days offset from current day", - "workdays": "List of working days", + "workdays": "Select which weekdays to include as possible workdays.", "add_holidays": "Add custom holidays as YYYY-MM-DD or as range using `,` as separator", "remove_holidays": "Remove holidays as YYYY-MM-DD, as range using `,` as separator or by using partial of name", "province": "State, territory, province or region of country", From d3fab7d87acfa1a696ae10440ef502ff9c945afb Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Mon, 9 Dec 2024 21:19:15 +0200 Subject: [PATCH 0401/1198] Add Ituran integration (#129067) --- CODEOWNERS | 2 + homeassistant/components/ituran/__init__.py | 28 +++ .../components/ituran/config_flow.py | 109 +++++++++ homeassistant/components/ituran/const.py | 13 ++ .../components/ituran/coordinator.py | 76 +++++++ .../components/ituran/device_tracker.py | 49 ++++ homeassistant/components/ituran/entity.py | 47 ++++ homeassistant/components/ituran/icons.json | 9 + homeassistant/components/ituran/manifest.json | 10 + .../components/ituran/quality_scale.yaml | 92 ++++++++ homeassistant/components/ituran/strings.json | 41 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/ituran/__init__.py | 13 ++ tests/components/ituran/conftest.py | 83 +++++++ tests/components/ituran/const.py | 24 ++ .../ituran/snapshots/test_device_tracker.ambr | 51 +++++ .../ituran/snapshots/test_init.ambr | 35 +++ tests/components/ituran/test_config_flow.py | 211 ++++++++++++++++++ .../components/ituran/test_device_tracker.py | 61 +++++ tests/components/ituran/test_init.py | 113 ++++++++++ 23 files changed, 1080 insertions(+) create mode 100644 homeassistant/components/ituran/__init__.py create mode 100644 homeassistant/components/ituran/config_flow.py create mode 100644 homeassistant/components/ituran/const.py create mode 100644 homeassistant/components/ituran/coordinator.py create mode 100644 homeassistant/components/ituran/device_tracker.py create mode 100644 homeassistant/components/ituran/entity.py create mode 100644 homeassistant/components/ituran/icons.json create mode 100644 homeassistant/components/ituran/manifest.json create mode 100644 homeassistant/components/ituran/quality_scale.yaml create mode 100644 homeassistant/components/ituran/strings.json create mode 100644 tests/components/ituran/__init__.py create mode 100644 tests/components/ituran/conftest.py create mode 100644 tests/components/ituran/const.py create mode 100644 tests/components/ituran/snapshots/test_device_tracker.ambr create mode 100644 tests/components/ituran/snapshots/test_init.ambr create mode 100644 tests/components/ituran/test_config_flow.py create mode 100644 tests/components/ituran/test_device_tracker.py create mode 100644 tests/components/ituran/test_init.py diff --git a/CODEOWNERS b/CODEOWNERS index 16e9c7d8062..3a407308275 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -753,6 +753,8 @@ build.json @home-assistant/supervisor /tests/components/ista_ecotrend/ @tr4nt0r /homeassistant/components/isy994/ @bdraco @shbatm /tests/components/isy994/ @bdraco @shbatm +/homeassistant/components/ituran/ @shmuelzon +/tests/components/ituran/ @shmuelzon /homeassistant/components/izone/ @Swamp-Ig /tests/components/izone/ @Swamp-Ig /homeassistant/components/jellyfin/ @j-stienstra @ctalkington diff --git a/homeassistant/components/ituran/__init__.py b/homeassistant/components/ituran/__init__.py new file mode 100644 index 00000000000..b0a26cf7db2 --- /dev/null +++ b/homeassistant/components/ituran/__init__.py @@ -0,0 +1,28 @@ +"""The Ituran integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import IturanConfigEntry, IturanDataUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.DEVICE_TRACKER, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: IturanConfigEntry) -> bool: + """Set up Ituran from a config entry.""" + + coordinator = IturanDataUpdateCoordinator(hass, entry=entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: IturanConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ituran/config_flow.py b/homeassistant/components/ituran/config_flow.py new file mode 100644 index 00000000000..48e898a9d0a --- /dev/null +++ b/homeassistant/components/ituran/config_flow.py @@ -0,0 +1,109 @@ +"""Config flow for Ituran integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyituran import Ituran +from pyituran.exceptions import IturanApiError, IturanAuthError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_OTP, + CONF_PHONE_NUMBER, + DOMAIN, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_ID_OR_PASSPORT): str, + vol.Required(CONF_PHONE_NUMBER): str, + } +) + +STEP_OTP_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_OTP): str, + } +) + + +class IturanConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Ituran.""" + + _user_info: dict[str, Any] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the inial step.""" + errors: dict[str, str] = {} + if user_input is not None: + await self.async_set_unique_id(user_input[CONF_ID_OR_PASSPORT]) + self._abort_if_unique_id_configured() + + ituran = Ituran( + user_input[CONF_ID_OR_PASSPORT], + user_input[CONF_PHONE_NUMBER], + ) + user_input[CONF_MOBILE_ID] = ituran.mobile_id + try: + authenticated = await ituran.is_authenticated() + if not authenticated: + await ituran.request_otp() + except IturanApiError: + errors["base"] = "cannot_connect" + except IturanAuthError: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if authenticated: + return self.async_create_entry( + title=f"Ituran {user_input[CONF_ID_OR_PASSPORT]}", + data=user_input, + ) + self._user_info = user_input + return await self.async_step_otp() + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_otp( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the inial step.""" + errors: dict[str, str] = {} + if user_input is not None: + ituran = Ituran( + self._user_info[CONF_ID_OR_PASSPORT], + self._user_info[CONF_PHONE_NUMBER], + self._user_info[CONF_MOBILE_ID], + ) + try: + await ituran.authenticate(user_input[CONF_OTP]) + except IturanApiError: + errors["base"] = "cannot_connect" + except IturanAuthError: + errors["base"] = "invalid_otp" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=f"Ituran {self._user_info[CONF_ID_OR_PASSPORT]}", + data=self._user_info, + ) + + return self.async_show_form( + step_id="otp", data_schema=STEP_OTP_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/ituran/const.py b/homeassistant/components/ituran/const.py new file mode 100644 index 00000000000..b17271490ee --- /dev/null +++ b/homeassistant/components/ituran/const.py @@ -0,0 +1,13 @@ +"""Constants for the Ituran integration.""" + +from datetime import timedelta +from typing import Final + +DOMAIN = "ituran" + +CONF_ID_OR_PASSPORT: Final = "id_or_passport" +CONF_PHONE_NUMBER: Final = "phone_number" +CONF_MOBILE_ID: Final = "mobile_id" +CONF_OTP: Final = "otp" + +UPDATE_INTERVAL = timedelta(seconds=300) diff --git a/homeassistant/components/ituran/coordinator.py b/homeassistant/components/ituran/coordinator.py new file mode 100644 index 00000000000..93d07b71267 --- /dev/null +++ b/homeassistant/components/ituran/coordinator.py @@ -0,0 +1,76 @@ +"""Coordinator for Ituran.""" + +import logging + +from pyituran import Ituran, Vehicle +from pyituran.exceptions import IturanApiError, IturanAuthError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, + UPDATE_INTERVAL, +) + +_LOGGER = logging.getLogger(__name__) + +type IturanConfigEntry = ConfigEntry[IturanDataUpdateCoordinator] + + +class IturanDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Vehicle]]): + """Class to manage fetching Ituran data.""" + + config_entry: IturanConfigEntry + + def __init__(self, hass: HomeAssistant, entry: IturanConfigEntry) -> None: + """Initialize account-wide Ituran data updater.""" + super().__init__( + hass, + _LOGGER, + name=f"{DOMAIN}-{entry.data[CONF_ID_OR_PASSPORT]}", + update_interval=UPDATE_INTERVAL, + config_entry=entry, + ) + self.ituran = Ituran( + entry.data[CONF_ID_OR_PASSPORT], + entry.data[CONF_PHONE_NUMBER], + entry.data[CONF_MOBILE_ID], + ) + + async def _async_update_data(self) -> dict[str, Vehicle]: + """Fetch data from Ituran.""" + + try: + vehicles = await self.ituran.get_vehicles() + except IturanApiError as e: + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from e + except IturanAuthError as e: + raise ConfigEntryError( + translation_domain=DOMAIN, translation_key="auth_error" + ) from e + + updated_data = {vehicle.license_plate: vehicle for vehicle in vehicles} + self._cleanup_removed_vehicles(updated_data) + + return updated_data + + def _cleanup_removed_vehicles(self, data: dict[str, Vehicle]) -> None: + account_vehicles = {(DOMAIN, license_plate) for license_plate in data} + device_registry = dr.async_get(self.hass) + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry_id=self.config_entry.entry_id + ) + for device in device_entries: + if not device.identifiers.intersection(account_vehicles): + device_registry.async_update_device( + device.id, remove_config_entry_id=self.config_entry.entry_id + ) diff --git a/homeassistant/components/ituran/device_tracker.py b/homeassistant/components/ituran/device_tracker.py new file mode 100644 index 00000000000..37796570c61 --- /dev/null +++ b/homeassistant/components/ituran/device_tracker.py @@ -0,0 +1,49 @@ +"""Device tracker for Ituran vehicles.""" + +from __future__ import annotations + +from homeassistant.components.device_tracker import TrackerEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IturanConfigEntry +from .coordinator import IturanDataUpdateCoordinator +from .entity import IturanBaseEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IturanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Ituran tracker from config entry.""" + coordinator = config_entry.runtime_data + async_add_entities( + IturanDeviceTracker(coordinator, license_plate) + for license_plate in coordinator.data + ) + + +class IturanDeviceTracker(IturanBaseEntity, TrackerEntity): + """Ituran device tracker.""" + + _attr_translation_key = "car" + _attr_name = None + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + ) -> None: + """Initialize the device tracker.""" + super().__init__(coordinator, license_plate, "device_tracker") + + @property + def latitude(self) -> float | None: + """Return latitude value of the device.""" + return self.vehicle.gps_coordinates[0] + + @property + def longitude(self) -> float | None: + """Return longitude value of the device.""" + return self.vehicle.gps_coordinates[1] diff --git a/homeassistant/components/ituran/entity.py b/homeassistant/components/ituran/entity.py new file mode 100644 index 00000000000..597cdac9513 --- /dev/null +++ b/homeassistant/components/ituran/entity.py @@ -0,0 +1,47 @@ +"""Base for all turan entities.""" + +from __future__ import annotations + +from pyituran import Vehicle + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import IturanDataUpdateCoordinator + + +class IturanBaseEntity(CoordinatorEntity[IturanDataUpdateCoordinator]): + """Common base for Ituran entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + unique_key: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._license_plate = license_plate + self._attr_unique_id = f"{license_plate}-{unique_key}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.vehicle.license_plate)}, + manufacturer=self.vehicle.make, + model=self.vehicle.model, + name=self.vehicle.model, + serial_number=self.vehicle.license_plate, + ) + + @property + def available(self) -> bool: + """Return True if vehicle is still included in the account.""" + return super().available and self._license_plate in self.coordinator.data + + @property + def vehicle(self) -> Vehicle: + """Return the vehicle information associated with this entity.""" + return self.coordinator.data[self._license_plate] diff --git a/homeassistant/components/ituran/icons.json b/homeassistant/components/ituran/icons.json new file mode 100644 index 00000000000..a20ea5b7304 --- /dev/null +++ b/homeassistant/components/ituran/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "device_tracker": { + "car": { + "default": "mdi:car" + } + } + } +} diff --git a/homeassistant/components/ituran/manifest.json b/homeassistant/components/ituran/manifest.json new file mode 100644 index 00000000000..570b4582a8a --- /dev/null +++ b/homeassistant/components/ituran/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "ituran", + "name": "Ituran", + "codeowners": ["@shmuelzon"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ituran", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["pyituran==0.1.3"] +} diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml new file mode 100644 index 00000000000..71f82aa1971 --- /dev/null +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + reauthentication-flow: todo + parallel-updates: + status: exempt + comment: | + Read only platforms and coordinator. + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + No options flow. + # Gold + entity-translations: done + entity-device-class: + status: exempt + comment: | + Only device_tracker platform. + devices: done + entity-category: todo + entity-disabled-by-default: + status: exempt + comment: | + No noisy entities + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users credentials to get the data. + stale-devices: todo + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users credentials to get the data. + repair-issues: + status: exempt + comment: | + No repairs/issues. + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json new file mode 100644 index 00000000000..e9f785289b8 --- /dev/null +++ b/homeassistant/components/ituran/strings.json @@ -0,0 +1,41 @@ +{ + "config": { + "step": { + "user": { + "data": { + "id_or_passport": "ID or passport number", + "phone_number": "Mobile phone number" + }, + "data_description": { + "id_or_passport": "The goverment ID or passport number provided when registering with Ituran.", + "phone_number": "The mobile phone number provided when registering with Ituran. A one-time password will be sent to this mobile number." + } + }, + "otp": { + "data": { + "otp": "OTP" + }, + "data_description": { + "otp": "A one-time-password sent as a text message to the mobile phone number provided before." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_otp": "OTP invalid", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + }, + "exceptions": { + "api_error": { + "message": "An error occured while communicating with the Ituran service." + }, + "auth_error": { + "message": "Failed authenticating with the Ituran service, please remove and re-add integration." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index e710480caaa..a3858fd176f 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -296,6 +296,7 @@ FLOWS = { "iss", "ista_ecotrend", "isy994", + "ituran", "izone", "jellyfin", "jewish_calendar", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index d708660b32b..5128578b606 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2983,6 +2983,12 @@ "config_flow": true, "iot_class": "local_push" }, + "ituran": { + "name": "Ituran", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "izone": { "name": "iZone", "integration_type": "hub", diff --git a/requirements_all.txt b/requirements_all.txt index 18099e9f462..87baa60f52a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1996,6 +1996,9 @@ pyisy==3.1.14 # homeassistant.components.itach pyitachip2ir==0.0.7 +# homeassistant.components.ituran +pyituran==0.1.3 + # homeassistant.components.jvc_projector pyjvcprojector==1.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index edddf1256bf..a2b73f7e272 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1610,6 +1610,9 @@ pyiss==1.0.1 # homeassistant.components.isy994 pyisy==3.1.14 +# homeassistant.components.ituran +pyituran==0.1.3 + # homeassistant.components.jvc_projector pyjvcprojector==1.1.2 diff --git a/tests/components/ituran/__init__.py b/tests/components/ituran/__init__.py new file mode 100644 index 00000000000..52fccaad138 --- /dev/null +++ b/tests/components/ituran/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Ituran integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ituran/conftest.py b/tests/components/ituran/conftest.py new file mode 100644 index 00000000000..ef22c90591d --- /dev/null +++ b/tests/components/ituran/conftest.py @@ -0,0 +1,83 @@ +"""Mocks for the Ituran integration.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, PropertyMock, patch + +import pytest + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, +) + +from .const import MOCK_CONFIG_DATA + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ituran.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title=f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}", + domain=DOMAIN, + data={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + CONF_MOBILE_ID: MOCK_CONFIG_DATA[CONF_MOBILE_ID], + }, + unique_id=MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + ) + + +class MockVehicle: + """Mock vehicle.""" + + def __init__(self) -> None: + """Initialize mock vehicle.""" + self.license_plate = "12345678" + self.make = "mock make" + self.model = "mock model" + self.mileage = 1000 + self.speed = 20 + self.gps_coordinates = (25.0, -71.0) + self.address = "Bermuda Triangle" + self.heading = 150 + self.last_update = datetime(2024, 1, 1, 0, 0, 0) + + +@pytest.fixture +def mock_ituran() -> Generator[AsyncMock]: + """Return a mocked PalazzettiClient.""" + with ( + patch( + "homeassistant.components.ituran.coordinator.Ituran", + autospec=True, + ) as ituran, + patch( + "homeassistant.components.ituran.config_flow.Ituran", + new=ituran, + ), + ): + mock_ituran = ituran.return_value + mock_ituran.is_authenticated.return_value = False + mock_ituran.authenticate.return_value = True + mock_ituran.get_vehicles.return_value = [MockVehicle()] + type(mock_ituran).mobile_id = PropertyMock( + return_value=MOCK_CONFIG_DATA[CONF_MOBILE_ID] + ) + + yield mock_ituran diff --git a/tests/components/ituran/const.py b/tests/components/ituran/const.py new file mode 100644 index 00000000000..b566caebbbe --- /dev/null +++ b/tests/components/ituran/const.py @@ -0,0 +1,24 @@ +"""Constants for tests of the Ituran component.""" + +from typing import Any + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, +) + +MOCK_CONFIG_DATA: dict[str, str] = { + CONF_ID_OR_PASSPORT: "12345678", + CONF_PHONE_NUMBER: "0501234567", + CONF_MOBILE_ID: "0123456789abcdef", +} + +MOCK_CONFIG_ENTRY: dict[str, Any] = { + "domain": DOMAIN, + "entry_id": "1", + "source": "user", + "title": MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + "data": MOCK_CONFIG_DATA, +} diff --git a/tests/components/ituran/snapshots/test_device_tracker.ambr b/tests/components/ituran/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..3b650f7927f --- /dev/null +++ b/tests/components/ituran/snapshots/test_device_tracker.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_device_tracker[device_tracker.mock_model-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.mock_model', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'car', + 'unique_id': '12345678-device_tracker', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.mock_model-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model', + 'gps_accuracy': 0, + 'latitude': 25.0, + 'longitude': -71.0, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.mock_model', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/ituran/snapshots/test_init.ambr b/tests/components/ituran/snapshots/test_init.ambr new file mode 100644 index 00000000000..1e64ef9e850 --- /dev/null +++ b/tests/components/ituran/snapshots/test_init.ambr @@ -0,0 +1,35 @@ +# serializer version: 1 +# name: test_device + list([ + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ituran', + '12345678', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'mock make', + 'model': 'mock model', + 'model_id': None, + 'name': 'mock model', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '12345678', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }), + ]) +# --- diff --git a/tests/components/ituran/test_config_flow.py b/tests/components/ituran/test_config_flow.py new file mode 100644 index 00000000000..0e0f6f63b9a --- /dev/null +++ b/tests/components/ituran/test_config_flow.py @@ -0,0 +1,211 @@ +"""Test the Ituran config flow.""" + +from unittest.mock import AsyncMock + +from pyituran.exceptions import IturanApiError, IturanAuthError +import pytest + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_OTP, + CONF_PHONE_NUMBER, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONFIG_DATA + + +async def __do_successful_user_step( + hass: HomeAssistant, result: ConfigFlowResult, mock_ituran: AsyncMock +): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + return result + + +async def __do_successful_otp_step( + hass: HomeAssistant, + result: ConfigFlowResult, + mock_ituran: AsyncMock, +): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}" + assert result["data"][CONF_ID_OR_PASSPORT] == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] + assert result["data"][CONF_MOBILE_ID] is not None + assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert len(mock_ituran.is_authenticated.mock_calls) > 0 + assert len(mock_ituran.authenticate.mock_calls) > 0 + + return result + + +async def test_full_user_flow( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_invalid_auth( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test invalid credentials configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.request_otp.side_effect = IturanAuthError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_auth"} + + mock_ituran.request_otp.side_effect = None + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_invalid_otp( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test invalid OTP configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await __do_successful_user_step(hass, result, mock_ituran) + + mock_ituran.authenticate.side_effect = IturanAuthError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_otp"} + + mock_ituran.authenticate.side_effect = None + await __do_successful_otp_step(hass, result, mock_ituran) + + +@pytest.mark.parametrize( + ("exception", "expected_error"), + [(IturanApiError, "cannot_connect"), (Exception, "unknown")], +) +async def test_errors( + hass: HomeAssistant, + mock_ituran: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + expected_error: str, +) -> None: + """Test connection errors during configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.request_otp.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": expected_error} + + mock_ituran.request_otp.side_effect = None + result = await __do_successful_user_step(hass, result, mock_ituran) + + mock_ituran.authenticate.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + mock_ituran.authenticate.side_effect = None + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_already_authenticated( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test user already authenticated configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.is_authenticated.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}" + assert result["data"][CONF_ID_OR_PASSPORT] == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] + assert result["data"][CONF_MOBILE_ID] == MOCK_CONFIG_DATA[CONF_MOBILE_ID] + assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] diff --git a/tests/components/ituran/test_device_tracker.py b/tests/components/ituran/test_device_tracker.py new file mode 100644 index 00000000000..7bcb314cde7 --- /dev/null +++ b/tests/components/ituran/test_device_tracker.py @@ -0,0 +1,61 @@ +"""Test the Ituran device_tracker.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pyituran.exceptions import IturanApiError +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.ituran.const import UPDATE_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_device_tracker( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state of device_tracker.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_availability( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test device is marked as unavailable when we can't reach the Ituran service.""" + entity_id = "device_tracker.mock_model" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = IturanApiError + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = None + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/ituran/test_init.py b/tests/components/ituran/test_init.py new file mode 100644 index 00000000000..3dfe946cdf9 --- /dev/null +++ b/tests/components/ituran/test_init.py @@ -0,0 +1,113 @@ +"""Tests for the Ituran integration.""" + +from unittest.mock import AsyncMock + +from pyituran.exceptions import IturanApiError, IturanAuthError +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, +) -> None: + """Test the Ituran configuration entry loading/unloading.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + snapshot: SnapshotAssertion, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the device information.""" + await setup_integration(hass, mock_config_entry) + + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + assert device_entries == snapshot + + +async def test_remove_stale_devices( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test that devices not returned by the service are removed.""" + await setup_integration(hass, mock_config_entry) + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.return_value = [] + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 0 + + +async def test_recover_from_errors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Verify we can recover from service Errors.""" + + await setup_integration(hass, mock_config_entry) + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = IturanApiError + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = IturanAuthError + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = None + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 From aa7b69afd49fbbd29f21dcdda9b4ac97c58b207d Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Mon, 9 Dec 2024 14:39:09 -0500 Subject: [PATCH 0402/1198] Add reconfigure flow to Cambridge Audio (#131091) * Add reconfigure flow to Cambridge Audio * Update * Add reconfigure flow to Cambridge Audio * Fix * Add helper method to reconfigure tests * Update quality scale --- .../components/cambridge_audio/config_flow.py | 27 +++++++++- .../cambridge_audio/quality_scale.yaml | 2 +- .../components/cambridge_audio/strings.json | 11 ++++ .../cambridge_audio/test_config_flow.py | 54 ++++++++++++++++++- 4 files changed, 90 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/cambridge_audio/config_flow.py b/homeassistant/components/cambridge_audio/config_flow.py index ca587ee9a48..6f5a92feac0 100644 --- a/homeassistant/components/cambridge_audio/config_flow.py +++ b/homeassistant/components/cambridge_audio/config_flow.py @@ -7,12 +7,18 @@ from aiostreammagic import StreamMagicClient import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS +DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): """Cambridge Audio configuration flow.""" @@ -64,6 +70,17 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): }, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + if not user_input: + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + ) + return await self.async_step_user(user_input) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -82,6 +99,12 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id( client.info.unit_id, raise_on_progress=False ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={CONF_HOST: user_input[CONF_HOST]}, + ) self._abort_if_unique_id_configured() return self.async_create_entry( title=client.info.name, @@ -91,6 +114,6 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): await client.disconnect() return self.async_show_form( step_id="user", - data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + data_schema=DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/cambridge_audio/quality_scale.yaml b/homeassistant/components/cambridge_audio/quality_scale.yaml index 65b921268f4..e5cafdd6368 100644 --- a/homeassistant/components/cambridge_audio/quality_scale.yaml +++ b/homeassistant/components/cambridge_audio/quality_scale.yaml @@ -56,7 +56,7 @@ rules: diagnostics: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: status: exempt comment: | diff --git a/homeassistant/components/cambridge_audio/strings.json b/homeassistant/components/cambridge_audio/strings.json index c368ba060a7..9f5e031815b 100644 --- a/homeassistant/components/cambridge_audio/strings.json +++ b/homeassistant/components/cambridge_audio/strings.json @@ -13,12 +13,23 @@ }, "discovery_confirm": { "description": "Do you want to setup {name}?" + }, + "reconfigure": { + "description": "Reconfigure your Cambridge Audio Streamer.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::cambridge_audio::config::step::user::data_description::host%]" + } } }, "error": { "cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect." }, "abort": { + "wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } diff --git a/tests/components/cambridge_audio/test_config_flow.py b/tests/components/cambridge_audio/test_config_flow.py index 9a2d077b8f8..8d01db6e015 100644 --- a/tests/components/cambridge_audio/test_config_flow.py +++ b/tests/components/cambridge_audio/test_config_flow.py @@ -7,7 +7,7 @@ from aiostreammagic import StreamMagicError from homeassistant.components.cambridge_audio.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -192,3 +192,55 @@ async def test_zeroconf_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_HOST: "192.168.20.219"}, + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: "192.168.20.219", + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + mock_stream_magic_client.info.unit_id = "different_udn" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" From e4ba94f93994dff1aa10cb37ed8bc43be0df5d1a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 9 Dec 2024 21:41:08 +0100 Subject: [PATCH 0403/1198] Fix `LazyState` compatibility with `State` `under_cached_property` change (#132752) --- homeassistant/components/recorder/models/state.py | 15 +++++++++++++++ tests/components/recorder/test_models.py | 4 ++++ 2 files changed, 19 insertions(+) diff --git a/homeassistant/components/recorder/models/state.py b/homeassistant/components/recorder/models/state.py index 89281a85c15..f5e49881b8f 100644 --- a/homeassistant/components/recorder/models/state.py +++ b/homeassistant/components/recorder/models/state.py @@ -96,6 +96,21 @@ class LazyState(State): assert self._last_updated_ts is not None return dt_util.utc_from_timestamp(self._last_updated_ts) + @cached_property + def last_updated_timestamp(self) -> float: # type: ignore[override] + """Last updated timestamp.""" + if TYPE_CHECKING: + assert self._last_updated_ts is not None + return self._last_updated_ts + + @cached_property + def last_changed_timestamp(self) -> float: # type: ignore[override] + """Last changed timestamp.""" + ts = self._last_changed_ts or self._last_updated_ts + if TYPE_CHECKING: + assert ts is not None + return ts + def as_dict(self) -> dict[str, Any]: # type: ignore[override] """Return a dict representation of the LazyState. diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index 9078b2e861c..a0703f1f2c5 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -346,6 +346,8 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( "last_updated": "2021-06-12T03:04:01.000323+00:00", "state": "off", } + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts async def test_lazy_state_handles_same_last_updated_and_last_changed( @@ -379,3 +381,5 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( "last_updated": "2021-06-12T03:04:01.000323+00:00", "state": "off", } + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts From b139af9a9c8ed581f18b25e7bc79c2df998583f7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 21:46:46 +0100 Subject: [PATCH 0404/1198] Migrate deconz lights to use Kelvin (#132698) * Use ATTR_COLOR_TEMP_KELVIN in kelvin light * Adjust --- homeassistant/components/deconz/light.py | 36 ++++++++++++++++-------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index 95a97959d5b..acfbff98297 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -12,7 +12,7 @@ from pydeconz.models.light.light import Light, LightAlert, LightColorMode, Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -30,7 +30,11 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import color_hs_to_xy +from homeassistant.util.color import ( + color_hs_to_xy, + color_temperature_kelvin_to_mired, + color_temperature_mired_to_kelvin, +) from .const import DOMAIN as DECONZ_DOMAIN, POWER_PLUGS from .entity import DeconzDevice @@ -256,9 +260,11 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( return self._device.brightness @property - def color_temp(self) -> int | None: + def color_temp_kelvin(self) -> int | None: """Return the CT color value.""" - return self._device.color_temp + if self._device.color_temp is None: + return None + return color_temperature_mired_to_kelvin(self._device.color_temp) @property def hs_color(self) -> tuple[float, float] | None: @@ -284,8 +290,10 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( if ATTR_BRIGHTNESS in kwargs: data["brightness"] = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: - data["color_temperature"] = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + data["color_temperature"] = color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if ATTR_HS_COLOR in kwargs: if ColorMode.XY in self._attr_supported_color_modes: @@ -338,14 +346,18 @@ class DeconzLight(DeconzBaseLight[Light]): """Representation of a deCONZ light.""" @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._device.max_color_temp or super().max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + if max_color_temp_mireds := self._device.max_color_temp: + return color_temperature_mired_to_kelvin(max_color_temp_mireds) + return super().min_color_temp_kelvin @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._device.min_color_temp or super().min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + if min_color_temp_mireds := self._device.min_color_temp: + return color_temperature_mired_to_kelvin(min_color_temp_mireds) + return super().max_color_temp_kelvin @callback def async_update_callback(self) -> None: From af7caeae53eec79800ceb22899e71cd1727c0905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 9 Dec 2024 22:20:23 +0100 Subject: [PATCH 0405/1198] Add quality scale to myUplink - reflect current state (#131686) --- .../components/myuplink/quality_scale.yaml | 100 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/myuplink/quality_scale.yaml diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml new file mode 100644 index 00000000000..b876f4c329c --- /dev/null +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -0,0 +1,100 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: todo + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No configuration parameters + docs-installation-parameters: + status: done + comment: Described in installation instructions + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: Handled by coordinator + reauthentication-flow: done + test-coverage: + status: todo + comment: PR is pending review + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + Not possible to discover these devices. + discovery: + status: exempt + comment: | + Not possible to discover these devices. + docs-data-update: done + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: done + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: + status: done + comment: | + Datapoint names are read from the API metadata and used as entity names in HA. + It is not feasible to use the API names as translation keys as they can change between + firmware and API upgrades and the number of appliance models and firmware releases are huge. + Entity names translations are therefore not implemented for the time being. + exception-translations: + status: todo + comment: PR pending review \#191937 + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + No repair-issues are raised. + stale-devices: + status: done + comment: | + There is no way for the integration to know if a device is gone temporarily or permanently. User is allowed to delete a stale device from GUI. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index b1d7e597a07..ff67bbbe416 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -700,7 +700,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "mysensors", "mystrom", "mythicbeastsdns", - "myuplink", "nad", "nam", "namecheapdns", From 3a65d1b611e718f3a9bff9aeaf1fb43e1fc2aaa7 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:28:14 -0500 Subject: [PATCH 0406/1198] Mark Cambridge Audio quality scale as platinum (#132762) --- homeassistant/components/cambridge_audio/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/cambridge_audio/manifest.json b/homeassistant/components/cambridge_audio/manifest.json index 7b7e341e3c6..14a389587d2 100644 --- a/homeassistant/components/cambridge_audio/manifest.json +++ b/homeassistant/components/cambridge_audio/manifest.json @@ -7,6 +7,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aiostreammagic"], + "quality_scale": "platinum", "requirements": ["aiostreammagic==2.10.0"], "zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."] } From 2d4fe5853f44cdc20736a4ed5e5d823ce5590d61 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 9 Dec 2024 22:37:32 +0100 Subject: [PATCH 0407/1198] Add clearer descriptions to all Timer actions (#132571) Co-authored-by: Franck Nijhof --- homeassistant/components/timer/strings.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/timer/strings.json b/homeassistant/components/timer/strings.json index 064ec81df1d..4fd80f565a2 100644 --- a/homeassistant/components/timer/strings.json +++ b/homeassistant/components/timer/strings.json @@ -34,33 +34,33 @@ "services": { "start": { "name": "[%key:common::action::start%]", - "description": "Starts a timer.", + "description": "Starts a timer or restarts it with a provided duration.", "fields": { "duration": { "name": "Duration", - "description": "Duration the timer requires to finish. [optional]." + "description": "Custom duration to restart the timer with." } } }, "pause": { "name": "[%key:common::action::pause%]", - "description": "Pauses a timer." + "description": "Pauses a running timer, retaining the remaining duration for later continuation." }, "cancel": { "name": "Cancel", - "description": "Cancels a timer." + "description": "Resets a timer's duration to the last known initial value without firing the timer finished event." }, "finish": { "name": "Finish", - "description": "Finishes a timer." + "description": "Finishes a running timer earlier than scheduled." }, "change": { "name": "Change", - "description": "Changes a timer.", + "description": "Changes a timer by adding or subtracting a given duration.", "fields": { "duration": { "name": "Duration", - "description": "Duration to add or subtract to the running timer." + "description": "Duration to add to or subtract from the running timer." } } }, From da0454e24ef24b1dd42850af04e5b9b8f57a9b95 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:40:16 +0100 Subject: [PATCH 0408/1198] Migrate limitlessled lights to use Kelvin (#132689) --- homeassistant/components/limitlessled/light.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/limitlessled/light.py b/homeassistant/components/limitlessled/light.py index c6b3301081d..5f771a53e86 100644 --- a/homeassistant/components/limitlessled/light.py +++ b/homeassistant/components/limitlessled/light.py @@ -19,7 +19,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -38,7 +38,11 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.color import color_hs_to_RGB, color_temperature_mired_to_kelvin +from homeassistant.util.color import ( + color_hs_to_RGB, + color_temperature_kelvin_to_mired, + color_temperature_mired_to_kelvin, +) _LOGGER = logging.getLogger(__name__) @@ -325,12 +329,14 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): else: args["color"] = self.limitlessled_color() - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: assert self.supported_color_modes if ColorMode.HS in self.supported_color_modes: pipeline.white() self._attr_hs_color = WHITE - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp = color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) args["temperature"] = self.limitlessled_temperature() if args: From 07d877887085d2d78934c5b23a43105fedd509cc Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:49:47 +0100 Subject: [PATCH 0409/1198] Remove old compatibility code (and add new warning) in lifx (#132730) --- homeassistant/components/lifx/util.py | 15 ++++----------- tests/components/lifx/test_light.py | 12 +----------- 2 files changed, 5 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/lifx/util.py b/homeassistant/components/lifx/util.py index 9782fe4adba..62d0ea66f81 100644 --- a/homeassistant/components/lifx/util.py +++ b/homeassistant/components/lifx/util.py @@ -16,10 +16,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_XY_COLOR, ) @@ -114,18 +112,13 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] | saturation = int(saturation / 100 * 65535) kelvin = 3500 - if ATTR_KELVIN in kwargs: + if "color_temp" in kwargs: # old ATTR_COLOR_TEMP + # added in 2025.1, can be removed in 2026.1 _LOGGER.warning( - "The 'kelvin' parameter is deprecated. Please use 'color_temp_kelvin' for" + "The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for" " all service calls" ) - kelvin = kwargs.pop(ATTR_KELVIN) - saturation = 0 - - if ATTR_COLOR_TEMP in kwargs: - kelvin = color_util.color_temperature_mired_to_kelvin( - kwargs.pop(ATTR_COLOR_TEMP) - ) + kelvin = color_util.color_temperature_mired_to_kelvin(kwargs.pop("color_temp")) saturation = 0 if ATTR_COLOR_TEMP_KELVIN in kwargs: diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 084ea0c674b..88c2115ce47 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -35,7 +35,6 @@ from homeassistant.components.light import ( ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -1719,7 +1718,7 @@ async def test_lifx_set_state_color(hass: HomeAssistant) -> None: async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: - """Test set_state works with old and new kelvin parameter names.""" + """Test set_state works with kelvin parameter names.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL ) @@ -1748,15 +1747,6 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: assert bulb.set_power.calls[0][0][0] is False bulb.set_power.reset_mock() - await hass.services.async_call( - DOMAIN, - "set_state", - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_KELVIN: 3500}, - blocking=True, - ) - assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 3500] - bulb.set_color.reset_mock() - await hass.services.async_call( DOMAIN, "set_state", From abc79a9f1c32580d39f110ab5fa76fee1db55487 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 9 Dec 2024 22:53:17 +0100 Subject: [PATCH 0410/1198] Bump reolink-aio to 0.11.5 (#132757) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 72bf21ccfd9..7aced174e30 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.11.4"] + "requirements": ["reolink-aio==0.11.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 87baa60f52a..b14d35e09a6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2559,7 +2559,7 @@ renault-api==0.2.8 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.4 +reolink-aio==0.11.5 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a2b73f7e272..63eda9070b3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2053,7 +2053,7 @@ renault-api==0.2.8 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.4 +reolink-aio==0.11.5 # homeassistant.components.rflink rflink==0.0.66 From dcbedb5ae572bd78c8241463a57ad6df0e607955 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:55:06 +0100 Subject: [PATCH 0411/1198] Migrate smartthings lights to use Kelvin (#132699) --- homeassistant/components/smartthings/light.py | 20 ++++++++----------- 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/smartthings/light.py b/homeassistant/components/smartthings/light.py index fd4b87f0ee7..eb7c9af246b 100644 --- a/homeassistant/components/smartthings/light.py +++ b/homeassistant/components/smartthings/light.py @@ -10,7 +10,7 @@ from pysmartthings import Capability from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -21,7 +21,6 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -import homeassistant.util.color as color_util from .const import DATA_BROKERS, DOMAIN from .entity import SmartThingsEntity @@ -79,12 +78,12 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # lowest kelvin found supported across 20+ handlers. - _attr_max_mireds = 500 # 2000K + _attr_min_color_temp_kelvin = 2000 # 500 mireds # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # highest kelvin found supported across 20+ handlers. - _attr_min_mireds = 111 # 9000K + _attr_max_color_temp_kelvin = 9000 # 111 mireds def __init__(self, device): """Initialize a SmartThingsLight.""" @@ -122,8 +121,8 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): """Turn the light on.""" tasks = [] # Color temperature - if ATTR_COLOR_TEMP in kwargs: - tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP])) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])) # Color if ATTR_HS_COLOR in kwargs: tasks.append(self.async_set_color(kwargs[ATTR_HS_COLOR])) @@ -164,9 +163,7 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): ) # Color Temperature if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._attr_color_temp = color_util.color_temperature_kelvin_to_mired( - self._device.status.color_temperature - ) + self._attr_color_temp_kelvin = self._device.status.color_temperature # Color if ColorMode.HS in self._attr_supported_color_modes: self._attr_hs_color = ( @@ -181,10 +178,9 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): saturation = max(min(float(hs_color[1]), 100.0), 0.0) await self._device.set_color(hue, saturation, set_status=True) - async def async_set_color_temp(self, value: float): + async def async_set_color_temp(self, value: int): """Set the color temperature of the device.""" - kelvin = color_util.color_temperature_mired_to_kelvin(value) - kelvin = max(min(kelvin, 30000), 1) + kelvin = max(min(value, 30000), 1) await self._device.set_color_temperature(kelvin, set_status=True) async def async_set_level(self, brightness: int, transition: int): From 4cb23ce56248d5be3b62d36982c2ba62a5999ea5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:59:21 +0100 Subject: [PATCH 0412/1198] Migrate hive lights to use Kelvin (#132686) --- homeassistant/components/hive/light.py | 27 +++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/hive/light.py b/homeassistant/components/hive/light.py index 10de781bf1d..b510569eb47 100644 --- a/homeassistant/components/hive/light.py +++ b/homeassistant/components/hive/light.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -43,6 +43,9 @@ async def async_setup_entry( class HiveDeviceLight(HiveEntity, LightEntity): """Hive Active Light Device.""" + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds + def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: """Initialise hive light.""" super().__init__(hive, hive_device) @@ -56,9 +59,6 @@ class HiveDeviceLight(HiveEntity, LightEntity): self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} self._attr_color_mode = ColorMode.UNKNOWN - self._attr_min_mireds = 153 - self._attr_max_mireds = 370 - @refresh_system async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" @@ -71,9 +71,8 @@ class HiveDeviceLight(HiveEntity, LightEntity): new_brightness = int(round(percentage_brightness / 5.0) * 5.0) if new_brightness == 0: new_brightness = 5 - if ATTR_COLOR_TEMP in kwargs: - tmp_new_color_temp = kwargs[ATTR_COLOR_TEMP] - new_color_temp = round(1000000 / tmp_new_color_temp) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + new_color_temp = kwargs[ATTR_COLOR_TEMP_KELVIN] if ATTR_HS_COLOR in kwargs: get_new_color = kwargs[ATTR_HS_COLOR] hue = int(get_new_color[0]) @@ -102,12 +101,22 @@ class HiveDeviceLight(HiveEntity, LightEntity): self._attr_is_on = self.device["status"]["state"] self._attr_brightness = self.device["status"]["brightness"] if self.device["hiveType"] == "tuneablelight": - self._attr_color_temp = self.device["status"].get("color_temp") + color_temp = self.device["status"].get("color_temp") + self._attr_color_temp_kelvin = ( + None + if color_temp is None + else color_util.color_temperature_mired_to_kelvin(color_temp) + ) + if self.device["hiveType"] == "colourtuneablelight": if self.device["status"]["mode"] == "COLOUR": rgb = self.device["status"]["hs_color"] self._attr_hs_color = color_util.color_RGB_to_hs(*rgb) self._attr_color_mode = ColorMode.HS else: - self._attr_color_temp = self.device["status"].get("color_temp") + self._attr_color_temp_kelvin = ( + None + if color_temp is None + else color_util.color_temperature_mired_to_kelvin(color_temp) + ) self._attr_color_mode = ColorMode.COLOR_TEMP From 772b047d44ffe9b8d37f971672c08580e86522bc Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:00:38 +0100 Subject: [PATCH 0413/1198] Change BMW reauth/reconfigure to only allow password (#132767) Co-authored-by: Joost Lekkerkerker --- .../bmw_connected_drive/config_flow.py | 35 ++++++- .../bmw_connected_drive/strings.json | 10 +- .../bmw_connected_drive/test_config_flow.py | 94 ++----------------- 3 files changed, 45 insertions(+), 94 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 8831895c71e..95fec101c9d 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -53,6 +53,12 @@ DATA_SCHEMA = vol.Schema( }, extra=vol.REMOVE_EXTRA, ) +RECONFIGURE_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + }, + extra=vol.REMOVE_EXTRA, +) CAPTCHA_SCHEMA = vol.Schema( { vol.Required(CONF_CAPTCHA_TOKEN): str, @@ -111,9 +117,8 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}" await self.async_set_unique_id(unique_id) - if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: - self._abort_if_unique_id_mismatch(reason="account_mismatch") - else: + # Unique ID cannot change for reauth/reconfigure + if self.source not in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: self._abort_if_unique_id_configured() # Store user input for later use @@ -166,19 +171,39 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) + async def async_step_change_password( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show the change password step.""" + existing_data = ( + dict(self._existing_entry_data) if self._existing_entry_data else {} + ) + + if user_input is not None: + return await self.async_step_user(existing_data | user_input) + + return self.async_show_form( + step_id="change_password", + data_schema=RECONFIGURE_SCHEMA, + description_placeholders={ + CONF_USERNAME: existing_data[CONF_USERNAME], + CONF_REGION: existing_data[CONF_REGION], + }, + ) + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self._existing_entry_data = entry_data - return await self.async_step_user() + return await self.async_step_change_password() async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" self._existing_entry_data = self._get_reconfigure_entry().data - return await self.async_step_user() + return await self.async_step_change_password() async def async_step_captcha( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 8078971acd1..93abce5d73f 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -2,6 +2,7 @@ "config": { "step": { "user": { + "description": "Enter your MyBMW/MINI Connected credentials.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", @@ -17,6 +18,12 @@ "data_description": { "captcha_token": "One-time token retrieved from the captcha challenge." } + }, + "change_password": { + "description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -27,8 +34,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", - "account_mismatch": "Username and region are not allowed to change" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index 8fa9d9be22b..9c124261392 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -15,7 +15,7 @@ from homeassistant.components.bmw_connected_drive.const import ( CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) -from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -224,19 +224,11 @@ async def test_reauth(hass: HomeAssistant) -> None: result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - suggested_values = { - key: key.description.get("suggested_value") - for key in result["data_schema"].schema - } - assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert suggested_values[CONF_PASSWORD] == wrong_password - assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] + assert result["step_id"] == "change_password" + assert set(result["data_schema"].schema) == {CONF_PASSWORD} result = await hass.config_entries.flow.async_configure( - result["flow_id"], deepcopy(FIXTURE_USER_INPUT) + result["flow_id"], {CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD]} ) await hass.async_block_till_done() @@ -254,41 +246,6 @@ async def test_reauth(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 2 -async def test_reauth_unique_id_abort(hass: HomeAssistant) -> None: - """Test aborting the reauth form if unique_id changes.""" - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication.login", - side_effect=login_sideeffect, - autospec=True, - ): - wrong_password = "wrong" - - config_entry_with_wrong_password = deepcopy(FIXTURE_CONFIG_ENTRY) - config_entry_with_wrong_password["data"][CONF_PASSWORD] = wrong_password - - config_entry = MockConfigEntry(**config_entry_with_wrong_password) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.data == config_entry_with_wrong_password["data"] - - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {**FIXTURE_USER_INPUT, CONF_REGION: "north_america"} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "account_mismatch" - assert config_entry.data == config_entry_with_wrong_password["data"] - - async def test_reconfigure(hass: HomeAssistant) -> None: """Test the reconfiguration form.""" with patch( @@ -304,19 +261,11 @@ async def test_reconfigure(hass: HomeAssistant) -> None: result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - suggested_values = { - key: key.description.get("suggested_value") - for key in result["data_schema"].schema - } - assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert suggested_values[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] + assert result["step_id"] == "change_password" + assert set(result["data_schema"].schema) == {CONF_PASSWORD} result = await hass.config_entries.flow.async_configure( - result["flow_id"], FIXTURE_USER_INPUT + result["flow_id"], {CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD]} ) await hass.async_block_till_done() @@ -330,32 +279,3 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - - -async def test_reconfigure_unique_id_abort(hass: HomeAssistant) -> None: - """Test aborting the reconfiguration form if unique_id changes.""" - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication.login", - side_effect=login_sideeffect, - autospec=True, - ): - config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {**FIXTURE_USER_INPUT, CONF_USERNAME: "somebody@email.com"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "account_mismatch" - assert config_entry.data == FIXTURE_COMPLETE_ENTRY From f2500e5a3226558411c25a01432e7e72cf71a666 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:03:55 +0100 Subject: [PATCH 0414/1198] Remove deprecated supported features warning in MediaPlayer (#132365) --- .../components/media_player/__init__.py | 51 +++++++------------ homeassistant/helpers/entity.py | 27 +--------- tests/components/media_player/test_init.py | 22 +------- tests/helpers/test_entity.py | 26 ---------- 4 files changed, 20 insertions(+), 106 deletions(-) diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index 291b1ec1e2a..e7bbe1d19bd 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -773,19 +773,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag media player features that are supported.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> MediaPlayerEntityFeature: - """Return the supported features as MediaPlayerEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = MediaPlayerEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - def turn_on(self) -> None: """Turn the media player on.""" raise NotImplementedError @@ -925,87 +912,85 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @property def support_play(self) -> bool: """Boolean if play is supported.""" - return MediaPlayerEntityFeature.PLAY in self.supported_features_compat + return MediaPlayerEntityFeature.PLAY in self.supported_features @final @property def support_pause(self) -> bool: """Boolean if pause is supported.""" - return MediaPlayerEntityFeature.PAUSE in self.supported_features_compat + return MediaPlayerEntityFeature.PAUSE in self.supported_features @final @property def support_stop(self) -> bool: """Boolean if stop is supported.""" - return MediaPlayerEntityFeature.STOP in self.supported_features_compat + return MediaPlayerEntityFeature.STOP in self.supported_features @final @property def support_seek(self) -> bool: """Boolean if seek is supported.""" - return MediaPlayerEntityFeature.SEEK in self.supported_features_compat + return MediaPlayerEntityFeature.SEEK in self.supported_features @final @property def support_volume_set(self) -> bool: """Boolean if setting volume is supported.""" - return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features @final @property def support_volume_mute(self) -> bool: """Boolean if muting volume is supported.""" - return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features_compat + return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features @final @property def support_previous_track(self) -> bool: """Boolean if previous track command supported.""" - return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features_compat + return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features @final @property def support_next_track(self) -> bool: """Boolean if next track command supported.""" - return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features_compat + return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features @final @property def support_play_media(self) -> bool: """Boolean if play media command supported.""" - return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features_compat + return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features @final @property def support_select_source(self) -> bool: """Boolean if select source command supported.""" - return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features_compat + return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features @final @property def support_select_sound_mode(self) -> bool: """Boolean if select sound mode command supported.""" - return ( - MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features_compat - ) + return MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features @final @property def support_clear_playlist(self) -> bool: """Boolean if clear playlist command supported.""" - return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features_compat + return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features @final @property def support_shuffle_set(self) -> bool: """Boolean if shuffle is supported.""" - return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features_compat + return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features @final @property def support_grouping(self) -> bool: """Boolean if player grouping is supported.""" - return MediaPlayerEntityFeature.GROUPING in self.supported_features_compat + return MediaPlayerEntityFeature.GROUPING in self.supported_features async def async_toggle(self) -> None: """Toggle the power on the media player.""" @@ -1034,7 +1019,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ( self.volume_level is not None and self.volume_level < 1 - and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features ): await self.async_set_volume_level( min(1, self.volume_level + self.volume_step) @@ -1052,7 +1037,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ( self.volume_level is not None and self.volume_level > 0 - and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features ): await self.async_set_volume_level( max(0, self.volume_level - self.volume_step) @@ -1095,7 +1080,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def capability_attributes(self) -> dict[str, Any]: """Return capability attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features if ( source_list := self.source_list @@ -1301,7 +1286,7 @@ async def websocket_browse_media( connection.send_error(msg["id"], "entity_not_found", "Entity not found") return - if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features_compat: + if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features: connection.send_message( websocket_api.error_message( msg["id"], ERR_NOT_SUPPORTED, "Player does not support browsing media" diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 19076c4edc0..91845cdf521 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -7,7 +7,7 @@ import asyncio from collections import deque from collections.abc import Callable, Coroutine, Iterable, Mapping import dataclasses -from enum import Enum, IntFlag, auto +from enum import Enum, auto import functools as ft import logging import math @@ -1639,31 +1639,6 @@ class Entity( self.hass, integration_domain=platform_name, module=type(self).__module__ ) - @callback - def _report_deprecated_supported_features_values( - self, replacement: IntFlag - ) -> None: - """Report deprecated supported features values.""" - if self._deprecated_supported_features_reported is True: - return - self._deprecated_supported_features_reported = True - report_issue = self._suggest_report_issue() - report_issue += ( - " and reference " - "https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation" - ) - _LOGGER.warning( - ( - "Entity %s (%s) is using deprecated supported features" - " values which will be removed in HA Core 2025.1. Instead it should use" - " %s, please %s" - ), - self.entity_id, - type(self), - repr(replacement), - report_issue, - ) - class ToggleEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes toggle entities.""" diff --git a/tests/components/media_player/test_init.py b/tests/components/media_player/test_init.py index a45fa5b6668..7c64f846df1 100644 --- a/tests/components/media_player/test_init.py +++ b/tests/components/media_player/test_init.py @@ -129,7 +129,7 @@ def test_support_properties(property_suffix: str) -> None: entity3 = MediaPlayerEntity() entity3._attr_supported_features = feature entity4 = MediaPlayerEntity() - entity4._attr_supported_features = all_features - feature + entity4._attr_supported_features = all_features & ~feature assert getattr(entity1, f"support_{property_suffix}") is False assert getattr(entity2, f"support_{property_suffix}") is True @@ -447,23 +447,3 @@ async def test_get_async_get_browse_image_quoting( url = player.get_browse_image_url("album", media_content_id) await client.get(url) mock_browse_image.assert_called_with("album", media_content_id, None) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockMediaPlayerEntity(MediaPlayerEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockMediaPlayerEntity() - assert entity.supported_features_compat is MediaPlayerEntityFeature(1) - assert "MockMediaPlayerEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "MediaPlayerEntityFeature.PAUSE" in caplog.text - caplog.clear() - assert entity.supported_features_compat is MediaPlayerEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 2bf441f70fd..dc579ab6e8d 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -4,7 +4,6 @@ import asyncio from collections.abc import Iterable import dataclasses from datetime import timedelta -from enum import IntFlag import logging import threading from typing import Any @@ -2486,31 +2485,6 @@ async def test_cached_entity_property_override(hass: HomeAssistant) -> None: return "🤡" -async def test_entity_report_deprecated_supported_features_values( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test reporting deprecated supported feature values only happens once.""" - ent = entity.Entity() - - class MockEntityFeatures(IntFlag): - VALUE1 = 1 - VALUE2 = 2 - - ent._report_deprecated_supported_features_values(MockEntityFeatures(2)) - assert ( - "is using deprecated supported features values which will be removed" - in caplog.text - ) - assert "MockEntityFeatures.VALUE2" in caplog.text - - caplog.clear() - ent._report_deprecated_supported_features_values(MockEntityFeatures(2)) - assert ( - "is using deprecated supported features values which will be removed" - not in caplog.text - ) - - async def test_remove_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: From be34d302df5bcaf7dca1d916c472b989ecb449cf Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:04:32 +0100 Subject: [PATCH 0415/1198] Use local ATTR_KELVIN constant in yeelight (#132731) --- homeassistant/components/yeelight/light.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/yeelight/light.py b/homeassistant/components/yeelight/light.py index d0d53510859..7f705da68d1 100644 --- a/homeassistant/components/yeelight/light.py +++ b/homeassistant/components/yeelight/light.py @@ -20,7 +20,6 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, FLASH_LONG, @@ -71,6 +70,7 @@ from .entity import YeelightEntity _LOGGER = logging.getLogger(__name__) ATTR_MINUTES = "minutes" +ATTR_KELVIN = "kelvin" SERVICE_SET_MODE = "set_mode" SERVICE_SET_MUSIC_MODE = "set_music_mode" From f177336025bf47334696186497563359984bcd59 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 9 Dec 2024 23:08:01 +0100 Subject: [PATCH 0416/1198] Add missing `last_reported_timestamp` to `LazyState` (#132761) followup to #132752 --- .../components/recorder/models/state.py | 8 ++++ tests/components/recorder/test_models.py | 37 +++++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/homeassistant/components/recorder/models/state.py b/homeassistant/components/recorder/models/state.py index f5e49881b8f..fbf73e75025 100644 --- a/homeassistant/components/recorder/models/state.py +++ b/homeassistant/components/recorder/models/state.py @@ -111,6 +111,14 @@ class LazyState(State): assert ts is not None return ts + @cached_property + def last_reported_timestamp(self) -> float: # type: ignore[override] + """Last reported timestamp.""" + ts = self._last_reported_ts or self._last_updated_ts + if TYPE_CHECKING: + assert ts is not None + return ts + def as_dict(self) -> dict[str, Any]: # type: ignore[override] """Return a dict representation of the LazyState. diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index a0703f1f2c5..b2894883ff2 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -325,6 +325,7 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( state="off", attributes='{"shared":true}', last_updated_ts=now.timestamp(), + last_reported_ts=now.timestamp(), last_changed_ts=(now - timedelta(seconds=60)).timestamp(), ) lstate = LazyState( @@ -339,6 +340,7 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( } assert lstate.last_updated.timestamp() == row.last_updated_ts assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_updated_ts assert lstate.as_dict() == { "attributes": {"shared": True}, "entity_id": "sensor.valid", @@ -348,6 +350,7 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( } assert lstate.last_changed_timestamp == row.last_changed_ts assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_updated_ts async def test_lazy_state_handles_same_last_updated_and_last_changed( @@ -361,6 +364,7 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( attributes='{"shared":true}', last_updated_ts=now.timestamp(), last_changed_ts=now.timestamp(), + last_reported_ts=None, ) lstate = LazyState( row, {}, None, row.entity_id, row.state, row.last_updated_ts, False @@ -374,6 +378,7 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( } assert lstate.last_updated.timestamp() == row.last_updated_ts assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_updated_ts assert lstate.as_dict() == { "attributes": {"shared": True}, "entity_id": "sensor.valid", @@ -383,3 +388,35 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( } assert lstate.last_changed_timestamp == row.last_changed_ts assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_updated_ts + + +async def test_lazy_state_handles_different_last_reported( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the LazyState handles last_reported different from last_updated.""" + now = datetime(2021, 6, 12, 3, 4, 1, 323, tzinfo=dt_util.UTC) + row = PropertyMock( + entity_id="sensor.valid", + state="off", + attributes='{"shared":true}', + last_updated_ts=(now - timedelta(seconds=60)).timestamp(), + last_reported_ts=now.timestamp(), + last_changed_ts=(now - timedelta(seconds=60)).timestamp(), + ) + lstate = LazyState( + row, {}, None, row.entity_id, row.state, row.last_updated_ts, False + ) + assert lstate.as_dict() == { + "attributes": {"shared": True}, + "entity_id": "sensor.valid", + "last_changed": "2021-06-12T03:03:01.000323+00:00", + "last_updated": "2021-06-12T03:03:01.000323+00:00", + "state": "off", + } + assert lstate.last_updated.timestamp() == row.last_updated_ts + assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_reported_ts + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_reported_ts From 1929b368fe08c1037f97c2cdd67acd3db1292008 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Tue, 10 Dec 2024 08:11:23 +1000 Subject: [PATCH 0417/1198] Remove legacy behavior from Teslemetry (#132760) --- .../components/teslemetry/coordinator.py | 20 ------ .../teslemetry/fixtures/products.json | 2 +- .../teslemetry/fixtures/vehicle_data.json | 2 +- .../teslemetry/fixtures/vehicle_data_alt.json | 2 +- .../snapshots/test_binary_sensors.ambr | 46 +++++++------- .../teslemetry/snapshots/test_button.ambr | 12 ++-- .../teslemetry/snapshots/test_climate.ambr | 16 ++--- .../teslemetry/snapshots/test_cover.ambr | 30 ++++----- .../snapshots/test_device_tracker.ambr | 4 +- .../teslemetry/snapshots/test_init.ambr | 8 +-- .../teslemetry/snapshots/test_lock.ambr | 4 +- .../snapshots/test_media_player.ambr | 4 +- .../teslemetry/snapshots/test_number.ambr | 4 +- .../teslemetry/snapshots/test_select.ambr | 16 ++--- .../teslemetry/snapshots/test_sensor.ambr | 60 +++++++++--------- .../teslemetry/snapshots/test_switch.ambr | 12 ++-- .../teslemetry/snapshots/test_update.ambr | 4 +- tests/components/teslemetry/test_init.py | 62 +------------------ 18 files changed, 114 insertions(+), 194 deletions(-) diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index f37d0613de9..63f1bc27c5f 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -60,8 +60,6 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def _async_update_data(self) -> dict[str, Any]: """Update vehicle data using Teslemetry API.""" - self.update_interval = VEHICLE_INTERVAL - try: if self.data["state"] != TeslemetryState.ONLINE: response = await self.api.vehicle() @@ -85,24 +83,6 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self.updated_once = True - if self.api.pre2021 and data["state"] == TeslemetryState.ONLINE: - # Handle pre-2021 vehicles which cannot sleep by themselves - if ( - data["charge_state"].get("charging_state") == "Charging" - or data["vehicle_state"].get("is_user_present") - or data["vehicle_state"].get("sentry_mode") - ): - # Vehicle is active, reset timer - self.last_active = datetime.now() - else: - elapsed = datetime.now() - self.last_active - if elapsed > timedelta(minutes=20): - # Vehicle didn't sleep, try again in 15 minutes - self.last_active = datetime.now() - elif elapsed > timedelta(minutes=15): - # Let vehicle go to sleep now - self.update_interval = VEHICLE_WAIT - return flatten(data) diff --git a/tests/components/teslemetry/fixtures/products.json b/tests/components/teslemetry/fixtures/products.json index 8da921a33f4..56497a6d936 100644 --- a/tests/components/teslemetry/fixtures/products.json +++ b/tests/components/teslemetry/fixtures/products.json @@ -4,7 +4,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "display_name": "Test", diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index d99bc8de5a8..fcfa0707b2c 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -3,7 +3,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "granular_access": { diff --git a/tests/components/teslemetry/fixtures/vehicle_data_alt.json b/tests/components/teslemetry/fixtures/vehicle_data_alt.json index 76416982eba..9a74508833a 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data_alt.json +++ b/tests/components/teslemetry/fixtures/vehicle_data_alt.json @@ -3,7 +3,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "granular_access": { diff --git a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr index 383db58b336..95330840109 100644 --- a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr +++ b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr @@ -212,7 +212,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_heater_on', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_heater_on', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_heater_on', 'unit_of_measurement': None, }) # --- @@ -259,7 +259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_cabin_overheat_protection_actively_cooling', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection_actively_cooling', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection_actively_cooling', 'unit_of_measurement': None, }) # --- @@ -306,7 +306,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_conn_charge_cable', 'unit_of_measurement': None, }) # --- @@ -353,7 +353,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_phases', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_phases', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_phases', 'unit_of_measurement': None, }) # --- @@ -399,7 +399,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_dashcam_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dashcam_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_dashcam_state', 'unit_of_measurement': None, }) # --- @@ -446,7 +446,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_df', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_df', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_df', 'unit_of_measurement': None, }) # --- @@ -493,7 +493,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_fd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fd_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_fd_window', 'unit_of_measurement': None, }) # --- @@ -540,7 +540,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_pf', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pf', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_pf', 'unit_of_measurement': None, }) # --- @@ -587,7 +587,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_fp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fp_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_fp_window', 'unit_of_measurement': None, }) # --- @@ -634,7 +634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_is_preconditioning', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_is_preconditioning', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_is_preconditioning', 'unit_of_measurement': None, }) # --- @@ -680,7 +680,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_preconditioning_enabled', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_preconditioning_enabled', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_preconditioning_enabled', 'unit_of_measurement': None, }) # --- @@ -726,7 +726,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_dr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_dr', 'unit_of_measurement': None, }) # --- @@ -773,7 +773,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rd_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rd_window', 'unit_of_measurement': None, }) # --- @@ -820,7 +820,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_pr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_pr', 'unit_of_measurement': None, }) # --- @@ -867,7 +867,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rp_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rp_window', 'unit_of_measurement': None, }) # --- @@ -914,7 +914,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_scheduled_charging_pending', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_scheduled_charging_pending', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_scheduled_charging_pending', 'unit_of_measurement': None, }) # --- @@ -960,7 +960,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'state', - 'unique_id': 'LRWXF7EK4KC700000-state', + 'unique_id': 'LRW3F7EK4NC700000-state', 'unit_of_measurement': None, }) # --- @@ -1007,7 +1007,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_fl', 'unit_of_measurement': None, }) # --- @@ -1054,7 +1054,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_fr', 'unit_of_measurement': None, }) # --- @@ -1101,7 +1101,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_rl', 'unit_of_measurement': None, }) # --- @@ -1148,7 +1148,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_rr', 'unit_of_measurement': None, }) # --- @@ -1195,7 +1195,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_trip_charging', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_trip_charging', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_trip_charging', 'unit_of_measurement': None, }) # --- @@ -1241,7 +1241,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_is_user_present', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_is_user_present', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_is_user_present', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_button.ambr b/tests/components/teslemetry/snapshots/test_button.ambr index 84cf4c21078..6d3016186ae 100644 --- a/tests/components/teslemetry/snapshots/test_button.ambr +++ b/tests/components/teslemetry/snapshots/test_button.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'flash_lights', - 'unique_id': 'LRWXF7EK4KC700000-flash_lights', + 'unique_id': 'LRW3F7EK4NC700000-flash_lights', 'unit_of_measurement': None, }) # --- @@ -74,7 +74,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'homelink', - 'unique_id': 'LRWXF7EK4KC700000-homelink', + 'unique_id': 'LRW3F7EK4NC700000-homelink', 'unit_of_measurement': None, }) # --- @@ -120,7 +120,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'honk', - 'unique_id': 'LRWXF7EK4KC700000-honk', + 'unique_id': 'LRW3F7EK4NC700000-honk', 'unit_of_measurement': None, }) # --- @@ -166,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'enable_keyless_driving', - 'unique_id': 'LRWXF7EK4KC700000-enable_keyless_driving', + 'unique_id': 'LRW3F7EK4NC700000-enable_keyless_driving', 'unit_of_measurement': None, }) # --- @@ -212,7 +212,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'boombox', - 'unique_id': 'LRWXF7EK4KC700000-boombox', + 'unique_id': 'LRW3F7EK4NC700000-boombox', 'unit_of_measurement': None, }) # --- @@ -258,7 +258,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wake', - 'unique_id': 'LRWXF7EK4KC700000-wake', + 'unique_id': 'LRW3F7EK4NC700000-wake', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index 9d5e3827ffc..ab66ae7241d 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -43,7 +43,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -113,7 +113,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- @@ -184,7 +184,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -253,7 +253,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- @@ -322,7 +322,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -361,7 +361,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- @@ -403,7 +403,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -472,7 +472,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_cover.ambr b/tests/components/teslemetry/snapshots/test_cover.ambr index 7ffb9c4a1f9..24e1b02a5f8 100644 --- a/tests/components/teslemetry/snapshots/test_cover.ambr +++ b/tests/components/teslemetry/snapshots/test_cover.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -76,7 +76,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -124,7 +124,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -172,7 +172,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -220,7 +220,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- @@ -268,7 +268,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -316,7 +316,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -364,7 +364,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -412,7 +412,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -460,7 +460,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- @@ -508,7 +508,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -556,7 +556,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -604,7 +604,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -652,7 +652,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -700,7 +700,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_device_tracker.ambr b/tests/components/teslemetry/snapshots/test_device_tracker.ambr index 6c18cdf75c6..2b1f3d6175c 100644 --- a/tests/components/teslemetry/snapshots/test_device_tracker.ambr +++ b/tests/components/teslemetry/snapshots/test_device_tracker.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'location', - 'unique_id': 'LRWXF7EK4KC700000-location', + 'unique_id': 'LRW3F7EK4NC700000-location', 'unit_of_measurement': None, }) # --- @@ -78,7 +78,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'route', - 'unique_id': 'LRWXF7EK4KC700000-route', + 'unique_id': 'LRW3F7EK4NC700000-route', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_init.ambr b/tests/components/teslemetry/snapshots/test_init.ambr index e07f075b7d8..7d60ed82859 100644 --- a/tests/components/teslemetry/snapshots/test_init.ambr +++ b/tests/components/teslemetry/snapshots/test_init.ambr @@ -31,7 +31,7 @@ 'via_device_id': None, }) # --- -# name: test_devices[{('teslemetry', 'LRWXF7EK4KC700000')}] +# name: test_devices[{('teslemetry', 'LRW3F7EK4NC700000')}] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -45,19 +45,19 @@ 'identifiers': set({ tuple( 'teslemetry', - 'LRWXF7EK4KC700000', + 'LRW3F7EK4NC700000', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'Tesla', - 'model': 'Model X', + 'model': 'Model 3', 'model_id': None, 'name': 'Test', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 'LRWXF7EK4KC700000', + 'serial_number': 'LRW3F7EK4NC700000', 'suggested_area': None, 'sw_version': None, 'via_device_id': None, diff --git a/tests/components/teslemetry/snapshots/test_lock.ambr b/tests/components/teslemetry/snapshots/test_lock.ambr index deaabbae904..2130c4d9574 100644 --- a/tests/components/teslemetry/snapshots/test_lock.ambr +++ b/tests/components/teslemetry/snapshots/test_lock.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_port_latch', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_latch', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_latch', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_locked', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_locked', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_locked', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_media_player.ambr b/tests/components/teslemetry/snapshots/test_media_player.ambr index 06500437701..dc31a270b5e 100644 --- a/tests/components/teslemetry/snapshots/test_media_player.ambr +++ b/tests/components/teslemetry/snapshots/test_media_player.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', + 'unique_id': 'LRW3F7EK4NC700000-media', 'unit_of_measurement': None, }) # --- @@ -107,7 +107,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', + 'unique_id': 'LRW3F7EK4NC700000-media', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_number.ambr b/tests/components/teslemetry/snapshots/test_number.ambr index f33b5e15d30..0f30daf635e 100644 --- a/tests/components/teslemetry/snapshots/test_number.ambr +++ b/tests/components/teslemetry/snapshots/test_number.ambr @@ -149,7 +149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_current_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_current_request', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_current_request', 'unit_of_measurement': , }) # --- @@ -206,7 +206,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_limit_soc', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_limit_soc', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_limit_soc', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_select.ambr b/tests/components/teslemetry/snapshots/test_select.ambr index 4e6feda7e5d..234c885e81a 100644 --- a/tests/components/teslemetry/snapshots/test_select.ambr +++ b/tests/components/teslemetry/snapshots/test_select.ambr @@ -149,7 +149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_left', 'unit_of_measurement': None, }) # --- @@ -208,7 +208,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_right', 'unit_of_measurement': None, }) # --- @@ -267,7 +267,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_center', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_center', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_center', 'unit_of_measurement': None, }) # --- @@ -326,7 +326,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_left', 'unit_of_measurement': None, }) # --- @@ -385,7 +385,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_right', 'unit_of_measurement': None, }) # --- @@ -444,7 +444,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_third_row_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_third_row_left', 'unit_of_measurement': None, }) # --- @@ -503,7 +503,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_third_row_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_third_row_right', 'unit_of_measurement': None, }) # --- @@ -561,7 +561,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_steering_wheel_heat_level', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_steering_wheel_heat_level', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_steering_wheel_heat_level', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 96cebc2b01f..acff157bfea 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -2422,7 +2422,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_level', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_level', 'unit_of_measurement': '%', }) # --- @@ -2495,7 +2495,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_range', 'unit_of_measurement': , }) # --- @@ -2560,7 +2560,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_conn_charge_cable', 'unit_of_measurement': None, }) # --- @@ -2624,7 +2624,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_energy_added', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_energy_added', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_energy_added', 'unit_of_measurement': , }) # --- @@ -2694,7 +2694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_rate', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_rate', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_rate', 'unit_of_measurement': , }) # --- @@ -2761,7 +2761,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_actual_current', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_actual_current', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_actual_current', 'unit_of_measurement': , }) # --- @@ -2828,7 +2828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_power', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_power', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_power', 'unit_of_measurement': , }) # --- @@ -2895,7 +2895,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_voltage', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_voltage', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_voltage', 'unit_of_measurement': , }) # --- @@ -2969,7 +2969,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charging_state', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charging_state', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charging_state', 'unit_of_measurement': None, }) # --- @@ -3051,7 +3051,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_miles_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_miles_to_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_miles_to_arrival', 'unit_of_measurement': , }) # --- @@ -3121,7 +3121,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_driver_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_driver_temp_setting', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_driver_temp_setting', 'unit_of_measurement': , }) # --- @@ -3194,7 +3194,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_est_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_est_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_est_battery_range', 'unit_of_measurement': , }) # --- @@ -3259,7 +3259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_fast_charger_type', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_fast_charger_type', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_fast_charger_type', 'unit_of_measurement': None, }) # --- @@ -3326,7 +3326,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_ideal_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_ideal_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_ideal_battery_range', 'unit_of_measurement': , }) # --- @@ -3396,7 +3396,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_inside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_inside_temp', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_inside_temp', 'unit_of_measurement': , }) # --- @@ -3469,7 +3469,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_odometer', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_odometer', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_odometer', 'unit_of_measurement': , }) # --- @@ -3539,7 +3539,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_outside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_outside_temp', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_outside_temp', 'unit_of_measurement': , }) # --- @@ -3609,7 +3609,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_passenger_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_passenger_temp_setting', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_passenger_temp_setting', 'unit_of_measurement': , }) # --- @@ -3676,7 +3676,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_power', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_power', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_power', 'unit_of_measurement': , }) # --- @@ -3748,7 +3748,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_shift_state', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_shift_state', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_shift_state', 'unit_of_measurement': None, }) # --- @@ -3826,7 +3826,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_speed', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_speed', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_speed', 'unit_of_measurement': , }) # --- @@ -3893,7 +3893,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_energy_at_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_energy_at_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_energy_at_arrival', 'unit_of_measurement': '%', }) # --- @@ -3958,7 +3958,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_minutes_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_minutes_to_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_minutes_to_arrival', 'unit_of_measurement': None, }) # --- @@ -4019,7 +4019,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_minutes_to_full_charge', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_minutes_to_full_charge', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_minutes_to_full_charge', 'unit_of_measurement': None, }) # --- @@ -4088,7 +4088,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_fl', 'unit_of_measurement': , }) # --- @@ -4161,7 +4161,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_fr', 'unit_of_measurement': , }) # --- @@ -4234,7 +4234,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_rl', 'unit_of_measurement': , }) # --- @@ -4307,7 +4307,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_rr', 'unit_of_measurement': , }) # --- @@ -4374,7 +4374,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_traffic_minutes_delay', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_traffic_minutes_delay', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_traffic_minutes_delay', 'unit_of_measurement': , }) # --- @@ -4441,7 +4441,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_usable_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_usable_battery_level', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_usable_battery_level', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_switch.ambr b/tests/components/teslemetry/snapshots/test_switch.ambr index f55cbae6a54..5693d4bdd5e 100644 --- a/tests/components/teslemetry/snapshots/test_switch.ambr +++ b/tests/components/teslemetry/snapshots/test_switch.ambr @@ -122,7 +122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_seat_climate_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_seat_climate_left', 'unit_of_measurement': None, }) # --- @@ -169,7 +169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_seat_climate_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_seat_climate_right', 'unit_of_measurement': None, }) # --- @@ -216,7 +216,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_steering_wheel_heat', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_steering_wheel_heat', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_steering_wheel_heat', 'unit_of_measurement': None, }) # --- @@ -263,7 +263,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_user_charge_enable_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_user_charge_enable_request', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_user_charge_enable_request', 'unit_of_measurement': None, }) # --- @@ -310,7 +310,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_defrost_mode', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_defrost_mode', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_defrost_mode', 'unit_of_measurement': None, }) # --- @@ -357,7 +357,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_sentry_mode', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sentry_mode', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sentry_mode', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index a1213f3d94b..0777f4ccdb9 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_software_update_status', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_software_update_status', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_software_update_status', 'unit_of_measurement': None, }) # --- @@ -86,7 +86,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_software_update_status', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_software_update_status', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_software_update_status', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 2a33e1def66..52fd6a77368 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -12,10 +12,7 @@ from tesla_fleet_api.exceptions import ( VehicleOffline, ) -from homeassistant.components.teslemetry.coordinator import ( - VEHICLE_INTERVAL, - VEHICLE_WAIT, -) +from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform @@ -117,63 +114,6 @@ async def test_vehicle_refresh_error( assert entry.state is state -async def test_vehicle_sleep( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test coordinator refresh with an error.""" - await setup_platform(hass, [Platform.CLIMATE]) - assert mock_vehicle_data.call_count == 1 - - freezer.tick(VEHICLE_WAIT + VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Let vehicle sleep, no updates for 15 minutes - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Vehicle didn't sleep, go back to normal - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 3 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Regular polling - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 4 - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Vehicle active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 5 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 6 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 7 - - # Test Energy Live Coordinator @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_energy_live_refresh_error( From 1256a7ea9621bb94cd3a654d30b454a26fde681b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 9 Dec 2024 23:11:30 +0100 Subject: [PATCH 0418/1198] Update demetriek to v1.0.0 (#132765) --- homeassistant/components/lametric/diagnostics.py | 2 +- homeassistant/components/lametric/manifest.json | 2 +- homeassistant/components/lametric/notify.py | 12 ++++++++++-- homeassistant/components/lametric/services.py | 10 ++++++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lametric/conftest.py | 12 ++++++------ .../lametric/snapshots/test_diagnostics.ambr | 3 +++ tests/components/lametric/test_notify.py | 2 +- tests/components/lametric/test_services.py | 2 +- 10 files changed, 33 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/lametric/diagnostics.py b/homeassistant/components/lametric/diagnostics.py index 69c681e911a..c14ed998ace 100644 --- a/homeassistant/components/lametric/diagnostics.py +++ b/homeassistant/components/lametric/diagnostics.py @@ -26,5 +26,5 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" coordinator: LaMetricDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] # Round-trip via JSON to trigger serialization - data = json.loads(coordinator.data.json()) + data = json.loads(coordinator.data.to_json()) return async_redact_data(data, TO_REDACT) diff --git a/homeassistant/components/lametric/manifest.json b/homeassistant/components/lametric/manifest.json index b0c6f8fd96e..b930192caf0 100644 --- a/homeassistant/components/lametric/manifest.json +++ b/homeassistant/components/lametric/manifest.json @@ -13,7 +13,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["demetriek"], - "requirements": ["demetriek==0.4.0"], + "requirements": ["demetriek==1.0.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:LaMetric:1" diff --git a/homeassistant/components/lametric/notify.py b/homeassistant/components/lametric/notify.py index 7362f0ca402..195924e2da5 100644 --- a/homeassistant/components/lametric/notify.py +++ b/homeassistant/components/lametric/notify.py @@ -5,12 +5,14 @@ from __future__ import annotations from typing import Any from demetriek import ( + AlarmSound, LaMetricDevice, LaMetricError, Model, Notification, NotificationIconType, NotificationPriority, + NotificationSound, Simple, Sound, ) @@ -18,8 +20,9 @@ from demetriek import ( from homeassistant.components.notify import ATTR_DATA, BaseNotificationService from homeassistant.const import CONF_ICON from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util.enum import try_parse_enum from .const import CONF_CYCLES, CONF_ICON_TYPE, CONF_PRIORITY, CONF_SOUND, DOMAIN from .coordinator import LaMetricDataUpdateCoordinator @@ -53,7 +56,12 @@ class LaMetricNotificationService(BaseNotificationService): sound = None if CONF_SOUND in data: - sound = Sound(sound=data[CONF_SOUND], category=None) + snd: AlarmSound | NotificationSound | None + if (snd := try_parse_enum(AlarmSound, data[CONF_SOUND])) is None and ( + snd := try_parse_enum(NotificationSound, data[CONF_SOUND]) + ) is None: + raise ServiceValidationError("Unknown sound provided") + sound = Sound(sound=snd, category=None) notification = Notification( icon_type=NotificationIconType(data.get(CONF_ICON_TYPE, "none")), diff --git a/homeassistant/components/lametric/services.py b/homeassistant/components/lametric/services.py index d5191e0a434..2d9cd8f222d 100644 --- a/homeassistant/components/lametric/services.py +++ b/homeassistant/components/lametric/services.py @@ -19,8 +19,9 @@ import voluptuous as vol from homeassistant.const import CONF_DEVICE_ID, CONF_ICON from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv +from homeassistant.util.enum import try_parse_enum from .const import ( CONF_CYCLES, @@ -118,7 +119,12 @@ async def async_send_notification( """Send a notification to an LaMetric device.""" sound = None if CONF_SOUND in call.data: - sound = Sound(sound=call.data[CONF_SOUND], category=None) + snd: AlarmSound | NotificationSound | None + if (snd := try_parse_enum(AlarmSound, call.data[CONF_SOUND])) is None and ( + snd := try_parse_enum(NotificationSound, call.data[CONF_SOUND]) + ) is None: + raise ServiceValidationError("Unknown sound provided") + sound = Sound(sound=snd, category=None) notification = Notification( icon_type=NotificationIconType(call.data[CONF_ICON_TYPE]), diff --git a/requirements_all.txt b/requirements_all.txt index b14d35e09a6..0b71ddbd283 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -746,7 +746,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==0.4.0 +demetriek==1.0.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 63eda9070b3..cdc8d07958e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -636,7 +636,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==0.4.0 +demetriek==1.0.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index e8ba727f3db..c460834be6c 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -6,7 +6,6 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from demetriek import CloudDevice, Device -from pydantic import parse_raw_as # pylint: disable=no-name-in-module import pytest from homeassistant.components.application_credentials import ( @@ -18,7 +17,7 @@ from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_fixture, load_json_array_fixture @pytest.fixture(autouse=True) @@ -61,9 +60,10 @@ def mock_lametric_cloud() -> Generator[MagicMock]: "homeassistant.components.lametric.config_flow.LaMetricCloud", autospec=True ) as lametric_mock: lametric = lametric_mock.return_value - lametric.devices.return_value = parse_raw_as( - list[CloudDevice], load_fixture("cloud_devices.json", DOMAIN) - ) + lametric.devices.return_value = [ + CloudDevice.from_dict(cloud_device) + for cloud_device in load_json_array_fixture("cloud_devices.json", DOMAIN) + ] yield lametric @@ -89,7 +89,7 @@ def mock_lametric(device_fixture: str) -> Generator[MagicMock]: lametric = lametric_mock.return_value lametric.api_key = "mock-api-key" lametric.host = "127.0.0.1" - lametric.device.return_value = Device.parse_raw( + lametric.device.return_value = Device.from_json( load_fixture(f"{device_fixture}.json", DOMAIN) ) yield lametric diff --git a/tests/components/lametric/snapshots/test_diagnostics.ambr b/tests/components/lametric/snapshots/test_diagnostics.ambr index cadd0e37566..15b35576ad4 100644 --- a/tests/components/lametric/snapshots/test_diagnostics.ambr +++ b/tests/components/lametric/snapshots/test_diagnostics.ambr @@ -26,6 +26,9 @@ 'brightness_mode': 'auto', 'display_type': 'mixed', 'height': 8, + 'screensaver': dict({ + 'enabled': False, + }), 'width': 37, }), 'mode': 'auto', diff --git a/tests/components/lametric/test_notify.py b/tests/components/lametric/test_notify.py index a46d97f8f81..d30a8c86543 100644 --- a/tests/components/lametric/test_notify.py +++ b/tests/components/lametric/test_notify.py @@ -100,7 +100,7 @@ async def test_notification_options( assert len(notification.model.frames) == 1 frame = notification.model.frames[0] assert type(frame) is Simple - assert frame.icon == 1234 + assert frame.icon == "1234" assert frame.text == "The secret of getting ahead is getting started" diff --git a/tests/components/lametric/test_services.py b/tests/components/lametric/test_services.py index d3fbd0a18e0..b9b5c4c8b3a 100644 --- a/tests/components/lametric/test_services.py +++ b/tests/components/lametric/test_services.py @@ -190,7 +190,7 @@ async def test_service_message( assert len(notification.model.frames) == 1 frame = notification.model.frames[0] assert type(frame) is Simple - assert frame.icon == 6916 + assert frame.icon == "6916" assert frame.text == "Meow!" mock_lametric.notify.side_effect = LaMetricError From bd4e21aa9d275e3ebcc09c28cc37de7c79980eee Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 9 Dec 2024 23:15:23 +0100 Subject: [PATCH 0419/1198] Improve description of 'vapid_email' field (#131349) --- homeassistant/components/html5/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/html5/strings.json b/homeassistant/components/html5/strings.json index 40bdbb36261..2c68223581a 100644 --- a/homeassistant/components/html5/strings.json +++ b/homeassistant/components/html5/strings.json @@ -7,7 +7,7 @@ "vapid_prv_key": "VAPID private key" }, "data_description": { - "vapid_email": "Email to use for html5 push notifications.", + "vapid_email": "This contact address will be included in the metadata of each notification.", "vapid_prv_key": "If not specified, one will be automatically generated." } } From d2478b40582bdfc61d4b2e61a269d25a1ea637c9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 9 Dec 2024 23:16:23 +0100 Subject: [PATCH 0420/1198] Use consistent UI name for system_log.clear action (#132083) --- homeassistant/components/system_log/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/system_log/strings.json b/homeassistant/components/system_log/strings.json index ed1ca79fe07..db71cd6ace4 100644 --- a/homeassistant/components/system_log/strings.json +++ b/homeassistant/components/system_log/strings.json @@ -1,8 +1,8 @@ { "services": { "clear": { - "name": "Clear all", - "description": "Clears all log entries." + "name": "Clear", + "description": "Deletes all log entries." }, "write": { "name": "Write", From 879e082b540eceeb61c4b609304ab59b84a5bc83 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:17:57 +0100 Subject: [PATCH 0421/1198] Migrate osramlightify lights to use Kelvin (#132688) --- .../components/osramlightify/light.py | 38 ++++--------------- 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/osramlightify/light.py b/homeassistant/components/osramlightify/light.py index 0254c478b42..6ddd392af7b 100644 --- a/homeassistant/components/osramlightify/light.py +++ b/homeassistant/components/osramlightify/light.py @@ -11,7 +11,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -191,10 +191,7 @@ class Luminary(LightEntity): self._effect_list = [] self._is_on = False self._available = True - self._min_mireds = None - self._max_mireds = None self._brightness = None - self._color_temp = None self._rgb_color = None self._device_attributes = None @@ -256,11 +253,6 @@ class Luminary(LightEntity): """Return last hs color value set.""" return color_util.color_RGB_to_hs(*self._rgb_color) - @property - def color_temp(self): - """Return the color temperature.""" - return self._color_temp - @property def brightness(self): """Return brightness of the luminary (0..255).""" @@ -276,16 +268,6 @@ class Luminary(LightEntity): """List of supported effects.""" return self._effect_list - @property - def min_mireds(self): - """Return the coldest color_temp that this light supports.""" - return self._min_mireds - - @property - def max_mireds(self): - """Return the warmest color_temp that this light supports.""" - return self._max_mireds - @property def unique_id(self): """Return a unique ID.""" @@ -326,12 +308,10 @@ class Luminary(LightEntity): self._rgb_color = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) self._luminary.set_rgb(*self._rgb_color, transition) - if ATTR_COLOR_TEMP in kwargs: - self._color_temp = kwargs[ATTR_COLOR_TEMP] - self._luminary.set_temperature( - int(color_util.color_temperature_mired_to_kelvin(self._color_temp)), - transition, - ) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] + self._attr_color_temp_kelvin = color_temp_kelvin + self._luminary.set_temperature(color_temp_kelvin, transition) self._is_on = True if ATTR_BRIGHTNESS in kwargs: @@ -362,10 +342,10 @@ class Luminary(LightEntity): self._attr_supported_features = self._get_supported_features() self._effect_list = self._get_effect_list() if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._min_mireds = color_util.color_temperature_kelvin_to_mired( + self._attr_max_color_temp_kelvin = ( self._luminary.max_temp() or DEFAULT_KELVIN ) - self._max_mireds = color_util.color_temperature_kelvin_to_mired( + self._attr_min_color_temp_kelvin = ( self._luminary.min_temp() or DEFAULT_KELVIN ) if len(self._attr_supported_color_modes) == 1: @@ -380,9 +360,7 @@ class Luminary(LightEntity): self._brightness = int(self._luminary.lum() * 2.55) if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._color_temp = color_util.color_temperature_kelvin_to_mired( - self._luminary.temp() or DEFAULT_KELVIN - ) + self._attr_color_temp_kelvin = self._luminary.temp() or DEFAULT_KELVIN if ColorMode.HS in self._attr_supported_color_modes: self._rgb_color = self._luminary.rgb() From 020db5f8222eb6ce4ce27652be7b590302e6d88d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:43:45 +0100 Subject: [PATCH 0422/1198] Migrate matter lights to use Kelvin (#132685) --- homeassistant/components/matter/light.py | 38 ++++++++++++++++-------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 6d83fc31722..153e154e64e 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -9,7 +9,7 @@ from matter_server.client.models import device_types from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -23,6 +23,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import LOGGER from .entity import MatterEntity @@ -131,12 +132,16 @@ class MatterLight(MatterEntity, LightEntity): ) ) - async def _set_color_temp(self, color_temp: int, transition: float = 0.0) -> None: + async def _set_color_temp( + self, color_temp_kelvin: int, transition: float = 0.0 + ) -> None: """Set color temperature.""" - + color_temp_mired = color_util.color_temperature_kelvin_to_mired( + color_temp_kelvin + ) await self.send_device_command( clusters.ColorControl.Commands.MoveToColorTemperature( - colorTemperatureMireds=color_temp, + colorTemperatureMireds=color_temp_mired, # transition in matter is measured in tenths of a second transitionTime=int(transition * 10), # allow setting the color while the light is off, @@ -286,7 +291,7 @@ class MatterLight(MatterEntity, LightEntity): hs_color = kwargs.get(ATTR_HS_COLOR) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) brightness = kwargs.get(ATTR_BRIGHTNESS) transition = kwargs.get(ATTR_TRANSITION, 0) if self._transitions_disabled: @@ -298,10 +303,10 @@ class MatterLight(MatterEntity, LightEntity): elif xy_color is not None and ColorMode.XY in self.supported_color_modes: await self._set_xy_color(xy_color, transition) elif ( - color_temp is not None + color_temp_kelvin is not None and ColorMode.COLOR_TEMP in self.supported_color_modes ): - await self._set_color_temp(color_temp, transition) + await self._set_color_temp(color_temp_kelvin, transition) if brightness is not None and self._supports_brightness: await self._set_brightness(brightness, transition) @@ -368,12 +373,16 @@ class MatterLight(MatterEntity, LightEntity): clusters.ColorControl.Attributes.ColorTempPhysicalMinMireds ) if min_mireds > 0: - self._attr_min_mireds = min_mireds + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + ) max_mireds = self.get_matter_attribute_value( clusters.ColorControl.Attributes.ColorTempPhysicalMaxMireds ) if max_mireds > 0: - self._attr_max_mireds = max_mireds + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + ) supported_color_modes = filter_supported_color_modes(supported_color_modes) self._attr_supported_color_modes = supported_color_modes @@ -399,8 +408,13 @@ class MatterLight(MatterEntity, LightEntity): if self._supports_brightness: self._attr_brightness = self._get_brightness() - if self._supports_color_temperature: - self._attr_color_temp = self._get_color_temperature() + if ( + self._supports_color_temperature + and (color_temperature := self._get_color_temperature()) > 0 + ): + self._attr_color_temp_kelvin = color_util.color_temperature_mired_to_kelvin( + color_temperature + ) if self._supports_color: self._attr_color_mode = color_mode = self._get_color_mode() @@ -414,7 +428,7 @@ class MatterLight(MatterEntity, LightEntity): and color_mode == ColorMode.XY ): self._attr_xy_color = self._get_xy_color() - elif self._attr_color_temp is not None: + elif self._attr_color_temp_kelvin is not None: self._attr_color_mode = ColorMode.COLOR_TEMP elif self._attr_brightness is not None: self._attr_color_mode = ColorMode.BRIGHTNESS From b1c17334f65b90a555a8b13ad5afd4543f920fa7 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 9 Dec 2024 23:48:23 +0100 Subject: [PATCH 0423/1198] Set Nord Pool device as a service (#132717) --- homeassistant/components/nordpool/entity.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/nordpool/entity.py b/homeassistant/components/nordpool/entity.py index 32240aad12c..ec3264cd2e3 100644 --- a/homeassistant/components/nordpool/entity.py +++ b/homeassistant/components/nordpool/entity.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -29,4 +29,5 @@ class NordpoolBaseEntity(CoordinatorEntity[NordPoolDataUpdateCoordinator]): self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, area)}, name=f"Nord Pool {area}", + entry_type=DeviceEntryType.SERVICE, ) From f210b74790d11a8d42b027689a4c8a9fdcbae0b1 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:50:04 +0100 Subject: [PATCH 0424/1198] Suez_water: close session after config flow (#132714) --- .../components/suez_water/config_flow.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index ac09cf4a1d3..2a1edea35f1 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -37,16 +37,19 @@ async def validate_input(data: dict[str, Any]) -> None: data[CONF_PASSWORD], counter_id, ) - if not await client.check_credentials(): - raise InvalidAuth - except PySuezError as ex: - raise CannotConnect from ex - - if counter_id is None: try: - data[CONF_COUNTER_ID] = await client.find_counter() + if not await client.check_credentials(): + raise InvalidAuth except PySuezError as ex: - raise CounterNotFound from ex + raise CannotConnect from ex + + if counter_id is None: + try: + data[CONF_COUNTER_ID] = await client.find_counter() + except PySuezError as ex: + raise CounterNotFound from ex + finally: + await client.close_session() class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): From cd39e4ac80a82a350396b624b62d3ef67d1e2386 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:51:27 +0100 Subject: [PATCH 0425/1198] Migrate abode lights to use Kelvin (#132690) --- homeassistant/components/abode/light.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/abode/light.py b/homeassistant/components/abode/light.py index d69aad80875..9b21ee4eb74 100644 --- a/homeassistant/components/abode/light.py +++ b/homeassistant/components/abode/light.py @@ -9,7 +9,7 @@ from jaraco.abode.devices.light import Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -17,10 +17,6 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from . import AbodeSystem from .const import DOMAIN @@ -47,10 +43,8 @@ class AbodeLight(AbodeDevice, LightEntity): def turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable: - self._device.set_color_temp( - int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP])) - ) + if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable: + self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN]) return if ATTR_HS_COLOR in kwargs and self._device.is_color_capable: @@ -85,10 +79,10 @@ class AbodeLight(AbodeDevice, LightEntity): return None @property - def color_temp(self) -> int | None: + def color_temp_kelvin(self) -> int | None: """Return the color temp of the light.""" if self._device.has_color: - return color_temperature_kelvin_to_mired(self._device.color_temp) + return int(self._device.color_temp) return None @property From 5062a7fec8ec952387f10ba07e89a1045ee117c0 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Mon, 9 Dec 2024 23:21:27 -0500 Subject: [PATCH 0426/1198] Add new api to fetch sentence triggers (#132764) * Add new api to fetch sentence triggers * With latest packages --- .../components/conversation/default_agent.py | 12 +++++----- homeassistant/components/conversation/http.py | 22 +++++++++++++++++++ .../conversation/snapshots/test_http.ambr | 8 +++++++ tests/components/conversation/test_http.py | 13 +++++++++++ 4 files changed, 49 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 624fa3c3555..66ffb25fa1a 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -246,7 +246,7 @@ class DefaultAgent(ConversationEntity): self._unexposed_names_trie: Trie | None = None # Sentences that will trigger a callback (skipping intent recognition) - self._trigger_sentences: list[TriggerData] = [] + self.trigger_sentences: list[TriggerData] = [] self._trigger_intents: Intents | None = None self._unsub_clear_slot_list: list[Callable[[], None]] | None = None self._load_intents_lock = asyncio.Lock() @@ -1188,7 +1188,7 @@ class DefaultAgent(ConversationEntity): ) -> core.CALLBACK_TYPE: """Register a list of sentences that will trigger a callback when recognized.""" trigger_data = TriggerData(sentences=sentences, callback=callback) - self._trigger_sentences.append(trigger_data) + self.trigger_sentences.append(trigger_data) # Force rebuild on next use self._trigger_intents = None @@ -1205,7 +1205,7 @@ class DefaultAgent(ConversationEntity): # This works because the intents are rebuilt on every # register/unregister. str(trigger_id): {"data": [{"sentences": trigger_data.sentences}]} - for trigger_id, trigger_data in enumerate(self._trigger_sentences) + for trigger_id, trigger_data in enumerate(self.trigger_sentences) }, } @@ -1228,7 +1228,7 @@ class DefaultAgent(ConversationEntity): @core.callback def _unregister_trigger(self, trigger_data: TriggerData) -> None: """Unregister a set of trigger sentences.""" - self._trigger_sentences.remove(trigger_data) + self.trigger_sentences.remove(trigger_data) # Force rebuild on next use self._trigger_intents = None @@ -1241,7 +1241,7 @@ class DefaultAgent(ConversationEntity): Calls the registered callbacks if there's a match and returns a sentence trigger result. """ - if not self._trigger_sentences: + if not self.trigger_sentences: # No triggers registered return None @@ -1286,7 +1286,7 @@ class DefaultAgent(ConversationEntity): # Gather callback responses in parallel trigger_callbacks = [ - self._trigger_sentences[trigger_id].callback(user_input, trigger_result) + self.trigger_sentences[trigger_id].callback(user_input, trigger_result) for trigger_id, trigger_result in result.matched_triggers.items() ] diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index ebc5d70f1ef..d9873c5cbce 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -36,6 +36,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_process) websocket_api.async_register_command(hass, websocket_prepare) websocket_api.async_register_command(hass, websocket_list_agents) + websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_hass_agent_debug) @@ -150,6 +151,27 @@ async def websocket_list_agents( connection.send_message(websocket_api.result_message(msg["id"], {"agents": agents})) +@websocket_api.websocket_command( + { + vol.Required("type"): "conversation/sentences/list", + } +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_list_sentences( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict +) -> None: + """List custom registered sentences.""" + agent = hass.data.get(DATA_DEFAULT_ENTITY) + assert isinstance(agent, DefaultAgent) + + sentences = [] + for trigger_data in agent.trigger_sentences: + sentences.extend(trigger_data.sentences) + + connection.send_result(msg["id"], {"trigger_sentences": sentences}) + + @websocket_api.websocket_command( { vol.Required("type"): "conversation/agent/homeassistant/debug", diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 8023d1ee6fa..9cebfd9abd1 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -693,6 +693,14 @@ }) # --- # name: test_ws_hass_agent_debug_sentence_trigger + dict({ + 'trigger_sentences': list([ + 'hello', + 'hello[ world]', + ]), + }) +# --- +# name: test_ws_hass_agent_debug_sentence_trigger.1 dict({ 'results': list([ dict({ diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index e792d8c6913..6d69ec3c739 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -501,6 +501,19 @@ async def test_ws_hass_agent_debug_sentence_trigger( client = await hass_ws_client(hass) + # List sentence + await client.send_json_auto_id( + { + "type": "conversation/sentences/list", + } + ) + await hass.async_block_till_done() + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + # Use trigger sentence await client.send_json_auto_id( { From e83a50b88d53a650ce12d90833bb914e860b3f7e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:15:47 +0100 Subject: [PATCH 0427/1198] Migrate elgato lights to use Kelvin (#132789) --- homeassistant/components/elgato/light.py | 31 ++++++++++++------- .../elgato/snapshots/test_light.ambr | 28 ++++++++--------- 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index a62a26f21d3..9a85c572e2c 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -8,7 +8,7 @@ from elgato import ElgatoError from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -19,6 +19,7 @@ from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, ) +from homeassistant.util import color as color_util from . import ElgatorConfigEntry from .const import SERVICE_IDENTIFY @@ -49,8 +50,8 @@ class ElgatoLight(ElgatoEntity, LightEntity): """Defines an Elgato Light.""" _attr_name = None - _attr_min_mireds = 143 - _attr_max_mireds = 344 + _attr_min_color_temp_kelvin = 2900 # 344 Mireds + _attr_max_color_temp_kelvin = 7000 # 143 Mireds def __init__(self, coordinator: ElgatoDataUpdateCoordinator) -> None: """Initialize Elgato Light.""" @@ -69,8 +70,8 @@ class ElgatoLight(ElgatoEntity, LightEntity): or self.coordinator.data.state.hue is not None ): self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} - self._attr_min_mireds = 153 - self._attr_max_mireds = 285 + self._attr_min_color_temp_kelvin = 3500 # 285 Mireds + self._attr_max_color_temp_kelvin = 6500 # 153 Mireds @property def brightness(self) -> int | None: @@ -78,9 +79,11 @@ class ElgatoLight(ElgatoEntity, LightEntity): return round((self.coordinator.data.state.brightness * 255) / 100) @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.coordinator.data.state.temperature + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if (mired_temperature := self.coordinator.data.state.temperature) is None: + return None + return color_util.color_temperature_mired_to_kelvin(mired_temperature) @property def color_mode(self) -> str | None: @@ -116,7 +119,7 @@ class ElgatoLight(ElgatoEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - temperature = kwargs.get(ATTR_COLOR_TEMP) + temperature_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hue = None saturation = None @@ -133,12 +136,18 @@ class ElgatoLight(ElgatoEntity, LightEntity): if ( brightness and ATTR_HS_COLOR not in kwargs - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs and self.supported_color_modes and ColorMode.HS in self.supported_color_modes and self.color_mode == ColorMode.COLOR_TEMP ): - temperature = self.color_temp + temperature_kelvin = self.color_temp_kelvin + + temperature = ( + None + if temperature_kelvin is None + else color_util.color_temperature_kelvin_to_mired(temperature_kelvin) + ) try: await self.coordinator.client.light( diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index 009feefc145..4bb4644ab86 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -11,10 +11,10 @@ 27.316, 47.743, ), - 'max_color_temp_kelvin': 6993, + 'max_color_temp_kelvin': 7000, 'max_mireds': 344, - 'min_color_temp_kelvin': 2906, - 'min_mireds': 143, + 'min_color_temp_kelvin': 2900, + 'min_mireds': 142, 'rgb_color': tuple( 255, 189, @@ -43,10 +43,10 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6993, + 'max_color_temp_kelvin': 7000, 'max_mireds': 344, - 'min_color_temp_kelvin': 2906, - 'min_mireds': 143, + 'min_color_temp_kelvin': 2900, + 'min_mireds': 142, 'supported_color_modes': list([ , ]), @@ -126,9 +126,9 @@ 27.316, 47.743, ), - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'rgb_color': tuple( 255, @@ -159,9 +159,9 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'supported_color_modes': list([ , @@ -243,9 +243,9 @@ 358.0, 6.0, ), - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'rgb_color': tuple( 255, @@ -276,9 +276,9 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'supported_color_modes': list([ , From 580a8d66b275bd3b3dcb8752e309f7567b392451 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 10 Dec 2024 08:20:28 +0100 Subject: [PATCH 0428/1198] Change fields allowed to change in options flow for Mold indicator (#132400) --- .../components/mold_indicator/config_flow.py | 18 +++++++++--------- .../mold_indicator/test_config_flow.py | 3 +++ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/mold_indicator/config_flow.py b/homeassistant/components/mold_indicator/config_flow.py index e6f795ecc91..5e5512a60bf 100644 --- a/homeassistant/components/mold_indicator/config_flow.py +++ b/homeassistant/components/mold_indicator/config_flow.py @@ -51,15 +51,6 @@ async def validate_input( DATA_SCHEMA_OPTIONS = vol.Schema( { - vol.Required(CONF_CALIBRATION_FACTOR): NumberSelector( - NumberSelectorConfig(step=0.1, mode=NumberSelectorMode.BOX) - ) - } -) - -DATA_SCHEMA_CONFIG = vol.Schema( - { - vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), vol.Required(CONF_INDOOR_TEMP): EntitySelector( EntitySelectorConfig( domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE @@ -75,6 +66,15 @@ DATA_SCHEMA_CONFIG = vol.Schema( domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE ) ), + vol.Required(CONF_CALIBRATION_FACTOR): NumberSelector( + NumberSelectorConfig(step=0.1, mode=NumberSelectorMode.BOX) + ), + } +) + +DATA_SCHEMA_CONFIG = vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), } ).extend(DATA_SCHEMA_OPTIONS.schema) diff --git a/tests/components/mold_indicator/test_config_flow.py b/tests/components/mold_indicator/test_config_flow.py index 9df0e18d9ed..bb8362b5e0d 100644 --- a/tests/components/mold_indicator/test_config_flow.py +++ b/tests/components/mold_indicator/test_config_flow.py @@ -70,6 +70,9 @@ async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", CONF_CALIBRATION_FACTOR: 3.0, }, ) From 397091cc7d424f3ecbea035e1ae18e923b654dd6 Mon Sep 17 00:00:00 2001 From: David Rapan Date: Tue, 10 Dec 2024 08:26:42 +0100 Subject: [PATCH 0429/1198] Add Starlink usage sensors (#132738) * Add usage metrics returned from history_stats * Add upload and download usage sensors * Add strings for upload and download usage sensors * Add usage to test_diagnostics.ambr * Add icons for upload and download usage sensors * Add suggested_unit_of_measurement to GIGABYTES --- .../components/starlink/coordinator.py | 17 +++++++++++------ homeassistant/components/starlink/icons.json | 6 ++++++ homeassistant/components/starlink/sensor.py | 19 +++++++++++++++++++ .../components/starlink/strings.json | 6 ++++++ .../starlink/snapshots/test_diagnostics.ambr | 4 ++++ 5 files changed, 46 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/starlink/coordinator.py b/homeassistant/components/starlink/coordinator.py index 81ee56db3b4..89d03a4fadc 100644 --- a/homeassistant/components/starlink/coordinator.py +++ b/homeassistant/components/starlink/coordinator.py @@ -16,6 +16,7 @@ from starlink_grpc import ( ObstructionDict, PowerDict, StatusDict, + UsageDict, get_sleep_config, history_stats, location_data, @@ -41,6 +42,7 @@ class StarlinkData: status: StatusDict obstruction: ObstructionDict alert: AlertDict + usage: UsageDict consumption: PowerDict @@ -60,12 +62,15 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): def _get_starlink_data(self) -> StarlinkData: """Retrieve Starlink data.""" - channel_context = self.channel_context - location = location_data(channel_context) - sleep = get_sleep_config(channel_context) - status, obstruction, alert = status_data(channel_context) - statistics = history_stats(parse_samples=-1, context=channel_context) - return StarlinkData(location, sleep, status, obstruction, alert, statistics[-1]) + context = self.channel_context + status = status_data(context) + location = location_data(context) + sleep = get_sleep_config(context) + status, obstruction, alert = status_data(context) + usage, consumption = history_stats(parse_samples=-1, context=context)[-2:] + return StarlinkData( + location, sleep, status, obstruction, alert, usage, consumption + ) async def _async_update_data(self) -> StarlinkData: async with asyncio.timeout(4): diff --git a/homeassistant/components/starlink/icons.json b/homeassistant/components/starlink/icons.json index 65cb273e24b..02de62aeb8a 100644 --- a/homeassistant/components/starlink/icons.json +++ b/homeassistant/components/starlink/icons.json @@ -18,6 +18,12 @@ }, "last_boot_time": { "default": "mdi:clock" + }, + "upload": { + "default": "mdi:upload" + }, + "download": { + "default": "mdi:download" } } } diff --git a/homeassistant/components/starlink/sensor.py b/homeassistant/components/starlink/sensor.py index 4b33a7f4337..5481e310fbd 100644 --- a/homeassistant/components/starlink/sensor.py +++ b/homeassistant/components/starlink/sensor.py @@ -19,6 +19,7 @@ from homeassistant.const import ( EntityCategory, UnitOfDataRate, UnitOfEnergy, + UnitOfInformation, UnitOfPower, UnitOfTime, ) @@ -122,6 +123,24 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, value_fn=lambda data: data.status["pop_ping_drop_rate"] * 100, ), + StarlinkSensorEntityDescription( + key="upload", + translation_key="upload", + device_class=SensorDeviceClass.DATA_SIZE, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfInformation.BYTES, + suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, + value_fn=lambda data: data.usage["upload_usage"], + ), + StarlinkSensorEntityDescription( + key="download", + translation_key="download", + device_class=SensorDeviceClass.DATA_SIZE, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfInformation.BYTES, + suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, + value_fn=lambda data: data.usage["download_usage"], + ), StarlinkSensorEntityDescription( key="power", device_class=SensorDeviceClass.POWER, diff --git a/homeassistant/components/starlink/strings.json b/homeassistant/components/starlink/strings.json index 36a4f176e70..395b6288c71 100644 --- a/homeassistant/components/starlink/strings.json +++ b/homeassistant/components/starlink/strings.json @@ -70,6 +70,12 @@ }, "ping_drop_rate": { "name": "Ping drop rate" + }, + "upload": { + "name": "Upload" + }, + "download": { + "name": "Download" } }, "switch": { diff --git a/tests/components/starlink/snapshots/test_diagnostics.ambr b/tests/components/starlink/snapshots/test_diagnostics.ambr index c0b1b93085b..c54e0b2df6d 100644 --- a/tests/components/starlink/snapshots/test_diagnostics.ambr +++ b/tests/components/starlink/snapshots/test_diagnostics.ambr @@ -86,5 +86,9 @@ 'uplink_throughput_bps': 11802.771484375, 'uptime': 804138, }), + 'usage': dict({ + 'download_usage': 72504227, + 'upload_usage': 5719755, + }), }) # --- From 53e528e9b697718a8bb7958e34fc5747c4178017 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:27:05 +0100 Subject: [PATCH 0430/1198] Bump actions/attest-build-provenance from 2.0.1 to 2.1.0 (#132788) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index c172e0b14eb..9d3ab18f7c1 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@c4fbc648846ca6f503a13a2281a5e7b98aa57202 # v2.0.1 + uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} From 1ee3b68824297077de392970950a5aebaddc2204 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:28:38 +0100 Subject: [PATCH 0431/1198] Migrate homekit_controller lights to use Kelvin (#132792) --- .../components/homekit_controller/light.py | 58 ++++++++++++------- 1 file changed, 36 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/homekit_controller/light.py b/homeassistant/components/homekit_controller/light.py index 472ccfbd550..d8c48d81333 100644 --- a/homeassistant/components/homekit_controller/light.py +++ b/homeassistant/components/homekit_controller/light.py @@ -10,7 +10,7 @@ from propcache import cached_property from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -57,7 +57,12 @@ class HomeKitLight(HomeKitEntity, LightEntity): def _async_reconfigure(self) -> None: """Reconfigure entity.""" self._async_clear_property_cache( - ("supported_features", "min_mireds", "max_mireds", "supported_color_modes") + ( + "supported_features", + "min_color_temp_kelvin", + "max_color_temp_kelvin", + "supported_color_modes", + ) ) super()._async_reconfigure() @@ -90,25 +95,35 @@ class HomeKitLight(HomeKitEntity, LightEntity): ) @cached_property - def min_mireds(self) -> int: - """Return minimum supported color temperature.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().min_mireds - min_value = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue - return int(min_value) if min_value else super().min_mireds + return super().max_color_temp_kelvin + min_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue + return ( + color_util.color_temperature_mired_to_kelvin(min_value_mireds) + if min_value_mireds + else super().max_color_temp_kelvin + ) @cached_property - def max_mireds(self) -> int: - """Return the maximum color temperature.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().max_mireds - max_value = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue - return int(max_value) if max_value else super().max_mireds + return super().min_color_temp_kelvin + max_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue + return ( + color_util.color_temperature_mired_to_kelvin(max_value_mireds) + if max_value_mireds + else super().min_color_temp_kelvin + ) @property - def color_temp(self) -> int: - """Return the color temperature.""" - return self.service.value(CharacteristicsTypes.COLOR_TEMPERATURE) + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return color_util.color_temperature_mired_to_kelvin( + self.service.value(CharacteristicsTypes.COLOR_TEMPERATURE) + ) @property def color_mode(self) -> str: @@ -153,7 +168,7 @@ class HomeKitLight(HomeKitEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the specified light on.""" hs_color = kwargs.get(ATTR_HS_COLOR) - temperature = kwargs.get(ATTR_COLOR_TEMP) + temperature_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) brightness = kwargs.get(ATTR_BRIGHTNESS) characteristics: dict[str, Any] = {} @@ -167,19 +182,18 @@ class HomeKitLight(HomeKitEntity, LightEntity): # does not support both, temperature will win. This is not # expected to happen in the UI, but it is possible via a manual # service call. - if temperature is not None: + if temperature_kelvin is not None: if self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - characteristics[CharacteristicsTypes.COLOR_TEMPERATURE] = int( - temperature + characteristics[CharacteristicsTypes.COLOR_TEMPERATURE] = ( + color_util.color_temperature_kelvin_to_mired(temperature_kelvin) ) + elif hs_color is None: # Some HomeKit devices implement color temperature with HS # since the spec "technically" does not permit the COLOR_TEMPERATURE # characteristic and the HUE and SATURATION characteristics to be # present at the same time. - hue_sat = color_util.color_temperature_to_hs( - color_util.color_temperature_mired_to_kelvin(temperature) - ) + hue_sat = color_util.color_temperature_to_hs(temperature_kelvin) characteristics[CharacteristicsTypes.HUE] = hue_sat[0] characteristics[CharacteristicsTypes.SATURATION] = hue_sat[1] From 17521f25b627dd9ce7d492eb444f91f4d089bc84 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Tue, 10 Dec 2024 17:35:53 +1000 Subject: [PATCH 0432/1198] Remove sleep and forbidden handling from Teslemetry (#132784) --- .../components/teslemetry/__init__.py | 1 - .../components/teslemetry/binary_sensor.py | 13 +- .../components/teslemetry/climate.py | 4 +- .../components/teslemetry/coordinator.py | 40 +--- homeassistant/components/teslemetry/cover.py | 3 - homeassistant/components/teslemetry/lock.py | 2 - homeassistant/components/teslemetry/select.py | 6 +- homeassistant/components/teslemetry/switch.py | 6 +- tests/components/teslemetry/const.py | 2 + .../teslemetry/fixtures/vehicle_data_alt.json | 15 +- .../teslemetry/snapshots/test_climate.ambr | 142 +------------- .../snapshots/test_device_tracker.ambr | 34 ++++ .../snapshots/test_media_player.ambr | 1 - .../teslemetry/snapshots/test_select.ambr | 175 ------------------ .../teslemetry/test_binary_sensors.py | 15 +- tests/components/teslemetry/test_climate.py | 31 +--- tests/components/teslemetry/test_cover.py | 15 +- .../teslemetry/test_device_tracker.py | 25 +-- tests/components/teslemetry/test_init.py | 18 +- tests/components/teslemetry/test_lock.py | 17 +- .../teslemetry/test_media_player.py | 13 -- tests/components/teslemetry/test_number.py | 15 +- tests/components/teslemetry/test_select.py | 33 ++-- tests/components/teslemetry/test_switch.py | 21 +-- tests/components/teslemetry/test_update.py | 15 +- 25 files changed, 107 insertions(+), 555 deletions(-) diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index aa1d2b42660..0b61120877a 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -253,7 +253,6 @@ def create_handle_vehicle_stream(vin: str, coordinator) -> Callable[[dict], None """Handle vehicle data from the stream.""" if "vehicle_data" in data: LOGGER.debug("Streaming received vehicle data from %s", vin) - coordinator.updated_once = True coordinator.async_set_updated_data(flatten(data["vehicle_data"])) elif "state" in data: LOGGER.debug("Streaming received state from %s", vin) diff --git a/homeassistant/components/teslemetry/binary_sensor.py b/homeassistant/components/teslemetry/binary_sensor.py index b51a67a0b4e..29ebfea4db1 100644 --- a/homeassistant/components/teslemetry/binary_sensor.py +++ b/homeassistant/components/teslemetry/binary_sensor.py @@ -223,15 +223,12 @@ class TeslemetryVehicleBinarySensorEntity(TeslemetryVehicleEntity, BinarySensorE def _async_update_attrs(self) -> None: """Update the attributes of the binary sensor.""" - if self.coordinator.updated_once: - if self._value is None: - self._attr_available = False - self._attr_is_on = None - else: - self._attr_available = True - self._attr_is_on = self.entity_description.is_on(self._value) - else: + if self._value is None: + self._attr_available = False self._attr_is_on = None + else: + self._attr_available = True + self._attr_is_on = self.entity_description.is_on(self._value) class TeslemetryEnergyLiveBinarySensorEntity( diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 020085140cc..95b769a1c2d 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -96,9 +96,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" value = self.get("climate_state_is_climate_on") - if value is None: - self._attr_hvac_mode = None - elif value: + if value: self._attr_hvac_mode = HVACMode.HEAT_COOL else: self._attr_hvac_mode = HVACMode.OFF diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 63f1bc27c5f..e7232d0f87c 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -6,18 +6,16 @@ from typing import Any from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( - Forbidden, InvalidToken, SubscriptionRequired, TeslaFleetError, - VehicleOffline, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import ENERGY_HISTORY_FIELDS, LOGGER, TeslemetryState +from .const import ENERGY_HISTORY_FIELDS, LOGGER from .helpers import flatten VEHICLE_INTERVAL = timedelta(seconds=30) @@ -39,7 +37,6 @@ ENDPOINTS = [ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching data from the Teslemetry API.""" - updated_once: bool last_active: datetime def __init__( @@ -54,43 +51,24 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) self.api = api self.data = flatten(product) - self.updated_once = False self.last_active = datetime.now() async def _async_update_data(self) -> dict[str, Any]: """Update vehicle data using Teslemetry API.""" try: - if self.data["state"] != TeslemetryState.ONLINE: - response = await self.api.vehicle() - self.data["state"] = response["response"]["state"] - - if self.data["state"] != TeslemetryState.ONLINE: - return self.data - - response = await self.api.vehicle_data(endpoints=ENDPOINTS) - data = response["response"] - - except VehicleOffline: - self.data["state"] = TeslemetryState.OFFLINE - return self.data - except InvalidToken as e: - raise ConfigEntryAuthFailed from e - except SubscriptionRequired as e: + data = (await self.api.vehicle_data(endpoints=ENDPOINTS))["response"] + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e - self.updated_once = True - return flatten(data) class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching energy site live status from the Teslemetry API.""" - updated_once: bool - def __init__(self, hass: HomeAssistant, api: EnergySpecific) -> None: """Initialize Teslemetry Energy Site Live coordinator.""" super().__init__( @@ -106,7 +84,7 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) try: data = (await self.api.live_status())["response"] - except (InvalidToken, Forbidden, SubscriptionRequired) as e: + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e @@ -122,8 +100,6 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching energy site info from the Teslemetry API.""" - updated_once: bool - def __init__(self, hass: HomeAssistant, api: EnergySpecific, product: dict) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( @@ -140,7 +116,7 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) try: data = (await self.api.site_info())["response"] - except (InvalidToken, Forbidden, SubscriptionRequired) as e: + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e @@ -151,8 +127,6 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching energy site info from the Teslemetry API.""" - updated_once: bool - def __init__(self, hass: HomeAssistant, api: EnergySpecific) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( @@ -168,13 +142,11 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): try: data = (await self.api.energy_history(TeslaEnergyPeriod.DAY))["response"] - except (InvalidToken, Forbidden, SubscriptionRequired) as e: + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e - self.updated_once = True - # Add all time periods together output = {key: 0 for key in ENERGY_HISTORY_FIELDS} for period in data.get("time_series", []): diff --git a/homeassistant/components/teslemetry/cover.py b/homeassistant/components/teslemetry/cover.py index 8775da931d5..d14ef385b9c 100644 --- a/homeassistant/components/teslemetry/cover.py +++ b/homeassistant/components/teslemetry/cover.py @@ -73,9 +73,6 @@ class TeslemetryWindowEntity(TeslemetryVehicleEntity, CoverEntity): # All closed set to closed elif CLOSED == fd == fp == rd == rp: self._attr_is_closed = True - # Otherwise, set to unknown - else: - self._attr_is_closed = None async def async_open_cover(self, **kwargs: Any) -> None: """Vent windows.""" diff --git a/homeassistant/components/teslemetry/lock.py b/homeassistant/components/teslemetry/lock.py index 0a7a557ed88..4600391145b 100644 --- a/homeassistant/components/teslemetry/lock.py +++ b/homeassistant/components/teslemetry/lock.py @@ -82,8 +82,6 @@ class TeslemetryCableLockEntity(TeslemetryVehicleEntity, LockEntity): def _async_update_attrs(self) -> None: """Update entity attributes.""" - if self._value is None: - self._attr_is_locked = None self._attr_is_locked = self._value == ENGAGED async def async_lock(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py index 192e2b194a8..baf1d80ac6c 100644 --- a/homeassistant/components/teslemetry/select.py +++ b/homeassistant/components/teslemetry/select.py @@ -90,10 +90,12 @@ async def async_setup_entry( ) for description in SEAT_HEATER_DESCRIPTIONS for vehicle in entry.runtime_data.vehicles + if description.key in vehicle.coordinator.data ), ( TeslemetryWheelHeaterSelectEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles + if vehicle.coordinator.data.get("climate_state_steering_wheel_heater") ), ( TeslemetryOperationSelectEntity(energysite, entry.runtime_data.scopes) @@ -137,7 +139,7 @@ class TeslemetrySeatHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity): """Handle updated data from the coordinator.""" self._attr_available = self.entity_description.available_fn(self) value = self._value - if value is None: + if not isinstance(value, int): self._attr_current_option = None else: self._attr_current_option = self._attr_options[value] @@ -182,7 +184,7 @@ class TeslemetryWheelHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity): """Handle updated data from the coordinator.""" value = self._value - if value is None: + if not isinstance(value, int): self._attr_current_option = None else: self._attr_current_option = self._attr_options[value] diff --git a/homeassistant/components/teslemetry/switch.py b/homeassistant/components/teslemetry/switch.py index 91ef3074bae..6a1cff4c5da 100644 --- a/homeassistant/components/teslemetry/switch.py +++ b/homeassistant/components/teslemetry/switch.py @@ -102,6 +102,7 @@ async def async_setup_entry( ) for vehicle in entry.runtime_data.vehicles for description in VEHICLE_DESCRIPTIONS + if description.key in vehicle.coordinator.data ), ( TeslemetryChargeSwitchEntity( @@ -150,10 +151,7 @@ class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEnt def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" - if self._value is None: - self._attr_is_on = None - else: - self._attr_is_on = bool(self._value) + self._attr_is_on = bool(self._value) async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Switch.""" diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index e459379ccf7..bf483d576cd 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -12,6 +12,8 @@ WAKE_UP_ASLEEP = {"response": {"state": TeslemetryState.ASLEEP}, "error": None} PRODUCTS = load_json_object_fixture("products.json", DOMAIN) VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ASLEEP = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ASLEEP["response"]["state"] = TeslemetryState.OFFLINE VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) diff --git a/tests/components/teslemetry/fixtures/vehicle_data_alt.json b/tests/components/teslemetry/fixtures/vehicle_data_alt.json index 9a74508833a..5ef5ea92a74 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data_alt.json +++ b/tests/components/teslemetry/fixtures/vehicle_data_alt.json @@ -24,7 +24,6 @@ "battery_range": 266.87, "charge_amps": 16, "charge_current_request": 16, - "charge_current_request_max": 16, "charge_enable_request": true, "charge_energy_added": 0, "charge_limit_soc": 80, @@ -72,16 +71,16 @@ "user_charge_enable_request": true }, "climate_state": { - "allow_cabin_overheat_protection": true, + "allow_cabin_overheat_protection": null, "auto_seat_climate_left": false, "auto_seat_climate_right": false, "auto_steering_wheel_heat": false, "battery_heater": true, "battery_heater_no_power": null, - "cabin_overheat_protection": "Off", + "cabin_overheat_protection": null, "cabin_overheat_protection_actively_cooling": false, "climate_keeper_mode": "off", - "cop_activation_temperature": "Low", + "cop_activation_temperature": null, "defrost_mode": 0, "driver_temp_setting": 22, "fan_status": 0, @@ -106,7 +105,7 @@ "seat_heater_right": 0, "side_mirror_heaters": false, "steering_wheel_heat_level": 0, - "steering_wheel_heater": false, + "steering_wheel_heater": true, "supports_fan_only_cabin_overheat_protection": true, "timestamp": 1705707520649, "wiper_blade_heater": false @@ -204,9 +203,9 @@ "is_user_present": true, "locked": false, "media_info": { - "audio_volume": 2.6667, - "audio_volume_increment": 0.333333, - "audio_volume_max": 10.333333, + "audio_volume": null, + "audio_volume_increment": null, + "audio_volume_max": null, "media_playback_status": "Stopped", "now_playing_album": "", "now_playing_artist": "", diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index ab66ae7241d..7064309e98b 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -208,7 +208,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', + 'state': 'unknown', }) # --- # name: test_climate_alt[climate.test_climate-entry] @@ -365,146 +365,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'target_temp_step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_cabin_overheat_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cabin overheat protection', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Cabin overheat protection', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'supported_features': , - 'target_temp_step': 5, - }), - 'context': , - 'entity_id': 'climate.test_cabin_overheat_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_climate_offline[climate.test_climate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_climate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': 'LRW3F7EK4NC700000-driver_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_offline[climate.test_climate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Climate', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_mode': None, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - 'supported_features': , - 'temperature': None, - }), - 'context': , - 'entity_id': 'climate.test_climate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_invalid_error[error] 'Command returned exception: The data request or command is unknown.' # --- diff --git a/tests/components/teslemetry/snapshots/test_device_tracker.ambr b/tests/components/teslemetry/snapshots/test_device_tracker.ambr index 2b1f3d6175c..ac4c388873f 100644 --- a/tests/components/teslemetry/snapshots/test_device_tracker.ambr +++ b/tests/components/teslemetry/snapshots/test_device_tracker.ambr @@ -99,3 +99,37 @@ 'state': 'home', }) # --- +# name: test_device_tracker_alt[device_tracker.test_location-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Location', + 'gps_accuracy': 0, + 'latitude': -30.222626, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_device_tracker_alt[device_tracker.test_route-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Route', + 'gps_accuracy': 0, + 'latitude': 30.2226265, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_route', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/teslemetry/snapshots/test_media_player.ambr b/tests/components/teslemetry/snapshots/test_media_player.ambr index dc31a270b5e..a9d2569c637 100644 --- a/tests/components/teslemetry/snapshots/test_media_player.ambr +++ b/tests/components/teslemetry/snapshots/test_media_player.ambr @@ -67,7 +67,6 @@ 'media_title': '', 'source': 'Spotify', 'supported_features': , - 'volume_level': 0.25806775026025003, }), 'context': , 'entity_id': 'media_player.test_media_player', diff --git a/tests/components/teslemetry/snapshots/test_select.ambr b/tests/components/teslemetry/snapshots/test_select.ambr index 234c885e81a..0c2547f309d 100644 --- a/tests/components/teslemetry/snapshots/test_select.ambr +++ b/tests/components/teslemetry/snapshots/test_select.ambr @@ -408,178 +408,3 @@ 'state': 'off', }) # --- -# name: test_select[select.test_seat_heater_third_row_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row left', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_left', - 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_third_row_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row left', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row right', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_right', - 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_third_row_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row right', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_steering_wheel_heater-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_steering_wheel_heater', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Steering wheel heater', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_steering_wheel_heat_level', - 'unique_id': 'LRW3F7EK4NC700000-climate_state_steering_wheel_heat_level', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_steering_wheel_heater-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Steering wheel heater', - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_steering_wheel_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/teslemetry/test_binary_sensors.py b/tests/components/teslemetry/test_binary_sensors.py index 95fccde5f25..0a47dce9537 100644 --- a/tests/components/teslemetry/test_binary_sensors.py +++ b/tests/components/teslemetry/test_binary_sensors.py @@ -5,10 +5,9 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL -from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -49,15 +48,3 @@ async def test_binary_sensor_refresh( await hass.async_block_till_done() assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) - - -async def test_binary_sensor_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the binary sensor entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.BINARY_SENSOR]) - state = hass.states.get("binary_sensor.test_status") - assert state.state == STATE_UNKNOWN diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 55f99caa13c..33f2e134806 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline +from tesla_fleet_api.exceptions import InvalidCommand from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -19,7 +19,6 @@ from homeassistant.components.climate import ( SERVICE_TURN_ON, HVACMode, ) -from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -31,12 +30,11 @@ from .const import ( COMMAND_IGNORED_REASON, METADATA_NOSCOPE, VEHICLE_DATA_ALT, + VEHICLE_DATA_ASLEEP, WAKE_UP_ASLEEP, WAKE_UP_ONLINE, ) -from tests.common import async_fire_time_changed - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_climate( @@ -205,20 +203,6 @@ async def test_climate_alt( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate_offline( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the climate entity is correct.""" - - mock_vehicle_data.side_effect = VehicleOffline - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert_entities(hass, entry.entry_id, entity_registry, snapshot) - - async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Tests service error is handled.""" @@ -296,18 +280,9 @@ async def test_asleep_or_offline( ) -> None: """Tests asleep is handled.""" + mock_vehicle_data.return_value = VEHICLE_DATA_ASLEEP await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" - mock_vehicle_data.assert_called_once() - - # Put the vehicle alseep - mock_vehicle_data.reset_mock() - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - mock_wake_up.reset_mock() # Run a command but fail trying to wake up the vehicle mock_wake_up.side_effect = InvalidCommand diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 5801a356ac5..7dbdcfa5747 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, @@ -13,7 +12,7 @@ from homeassistant.components.cover import ( SERVICE_STOP_COVER, CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -61,18 +60,6 @@ async def test_cover_noscope( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_cover_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the cover entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.COVER]) - state = hass.states.get("cover.test_windows") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_services( hass: HomeAssistant, diff --git a/tests/components/teslemetry/test_device_tracker.py b/tests/components/teslemetry/test_device_tracker.py index a3fcd428c66..d86c3ca8596 100644 --- a/tests/components/teslemetry/test_device_tracker.py +++ b/tests/components/teslemetry/test_device_tracker.py @@ -1,13 +1,15 @@ """Test the Teslemetry device tracker platform.""" -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from unittest.mock import AsyncMock -from homeassistant.const import STATE_UNKNOWN, Platform +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT async def test_device_tracker( @@ -21,13 +23,14 @@ async def test_device_tracker( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_device_tracker_offline( +async def test_device_tracker_alt( hass: HomeAssistant, - mock_vehicle_data, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, ) -> None: - """Tests that the device tracker entities are correct when offline.""" + """Tests that the device tracker entities are correct.""" - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.DEVICE_TRACKER]) - state = hass.states.get("device_tracker.test_location") - assert state.state == STATE_UNKNOWN + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + entry = await setup_platform(hass, [Platform.DEVICE_TRACKER]) + assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 52fd6a77368..6d4e04c21b4 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_platform -from .const import VEHICLE_DATA_ALT, WAKE_UP_ASLEEP +from .const import VEHICLE_DATA_ALT from tests.common import async_fire_time_changed @@ -69,22 +69,6 @@ async def test_devices( assert device == snapshot(name=f"{device.identifiers}") -# Vehicle Coordinator -async def test_vehicle_refresh_asleep( - hass: HomeAssistant, - mock_vehicle: AsyncMock, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh with an error.""" - - mock_vehicle.return_value = WAKE_UP_ASLEEP - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle.assert_called_once() - mock_vehicle_data.assert_not_called() - - async def test_vehicle_refresh_offline( hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory ) -> None: diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index b1460e870f0..f7c9fea1400 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -1,10 +1,9 @@ """Test the Teslemetry lock platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, @@ -12,7 +11,7 @@ from homeassistant.components.lock import ( SERVICE_UNLOCK, LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er @@ -32,18 +31,6 @@ async def test_lock( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_lock_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the lock entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.LOCK]) - state = hass.states.get("lock.test_lock") - assert state.state == STATE_UNKNOWN - - async def test_lock_services( hass: HomeAssistant, ) -> None: diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index 0d30750d10d..ae462bfd026 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -3,7 +3,6 @@ from unittest.mock import AsyncMock, patch from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_LEVEL, @@ -47,18 +46,6 @@ async def test_media_player_alt( assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) -async def test_media_player_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the media player entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.MEDIA_PLAYER]) - state = hass.states.get("media_player.test_media_player") - assert state.state == MediaPlayerState.OFF - - async def test_media_player_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 5df948b475c..65c03514d22 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -4,14 +4,13 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,18 +30,6 @@ async def test_number( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_number_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the number entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.NUMBER]) - state = hass.states.get("number.test_charge_current") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number_services( hass: HomeAssistant, mock_vehicle_data: AsyncMock diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index caf0b9c1deb..005a6a2004e 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.select import ( ATTR_OPTION, @@ -33,18 +32,6 @@ async def test_select( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_select_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the select entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.SELECT]) - state = hass.states.get("select.test_seat_heater_front_left") - assert state.state == STATE_UNKNOWN - - async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: """Tests that the select services work.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT @@ -112,3 +99,23 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: state = hass.states.get(entity_id) assert state.state == EnergyExportMode.BATTERY_OK.value call.assert_called_once() + + +async def test_select_invalid_data( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, +) -> None: + """Tests that the select entities handle invalid data.""" + + broken_data = VEHICLE_DATA_ALT.copy() + broken_data["response"]["climate_state"]["seat_heater_left"] = "green" + broken_data["response"]["climate_state"]["steering_wheel_heat_level"] = "yellow" + + mock_vehicle_data.return_value = broken_data + await setup_platform(hass, [Platform.SELECT]) + state = hass.states.get("select.test_seat_heater_front_left") + assert state.state == STATE_UNKNOWN + state = hass.states.get("select.test_steering_wheel_heater") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index dae3ce6fbf8..6a1ddb430ce 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -4,20 +4,13 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -49,18 +42,6 @@ async def test_switch_alt( assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) -async def test_switch_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the switch entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.SWITCH]) - state = hass.states.get("switch.test_auto_seat_climate_left") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("name", "on", "off"), diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index f02f09cd19a..448f31afd67 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -5,12 +5,11 @@ from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.update import INSTALLING from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -44,18 +43,6 @@ async def test_update_alt( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_update_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the update entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.UPDATE]) - state = hass.states.get("update.test_update") - assert state.state == STATE_UNKNOWN - - async def test_update_services( hass: HomeAssistant, mock_vehicle_data: AsyncMock, From 3d1258ddc1d8eaab6a8b121c27044afee5781ebe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:36:43 +0100 Subject: [PATCH 0433/1198] Migrate eufy lights to use Kelvin (#132790) --- homeassistant/components/eufy/light.py | 33 ++++++++------------------ 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/eufy/light.py b/homeassistant/components/eufy/light.py index c1506c00cdc..95ad8a15d1c 100644 --- a/homeassistant/components/eufy/light.py +++ b/homeassistant/components/eufy/light.py @@ -8,7 +8,7 @@ import lakeside from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -17,10 +17,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.color as color_util -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) EUFYHOME_MAX_KELVIN = 6500 EUFYHOME_MIN_KELVIN = 2700 @@ -41,6 +37,9 @@ def setup_platform( class EufyHomeLight(LightEntity): """Representation of a EufyHome light.""" + _attr_min_color_temp_kelvin = EUFYHOME_MIN_KELVIN + _attr_max_color_temp_kelvin = EUFYHOME_MAX_KELVIN + def __init__(self, device): """Initialize the light.""" @@ -96,23 +95,12 @@ class EufyHomeLight(LightEntity): return int(self._brightness * 255 / 100) @property - def min_mireds(self) -> int: - """Return minimum supported color temperature.""" - return kelvin_to_mired(EUFYHOME_MAX_KELVIN) - - @property - def max_mireds(self) -> int: - """Return maximum supported color temperature.""" - return kelvin_to_mired(EUFYHOME_MIN_KELVIN) - - @property - def color_temp(self): - """Return the color temperature of this light.""" - temp_in_k = int( + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return int( EUFYHOME_MIN_KELVIN + (self._temp * (EUFYHOME_MAX_KELVIN - EUFYHOME_MIN_KELVIN) / 100) ) - return kelvin_to_mired(temp_in_k) @property def hs_color(self): @@ -134,7 +122,7 @@ class EufyHomeLight(LightEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the specified light on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - colortemp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs = kwargs.get(ATTR_HS_COLOR) if brightness is not None: @@ -144,10 +132,9 @@ class EufyHomeLight(LightEntity): self._brightness = 100 brightness = self._brightness - if colortemp is not None: + if color_temp_kelvin is not None: self._colormode = False - temp_in_k = mired_to_kelvin(colortemp) - relative_temp = temp_in_k - EUFYHOME_MIN_KELVIN + relative_temp = color_temp_kelvin - EUFYHOME_MIN_KELVIN temp = int( relative_temp * 100 / (EUFYHOME_MAX_KELVIN - EUFYHOME_MIN_KELVIN) ) From cd420aee88d308744b35f75321fcafb5f51d0f59 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Tue, 10 Dec 2024 02:38:34 -0500 Subject: [PATCH 0434/1198] Catch Hydrawise authorization errors in the correct place (#132727) --- .../components/hydrawise/config_flow.py | 15 ++++--- tests/components/hydrawise/conftest.py | 1 - .../components/hydrawise/test_config_flow.py | 39 +++++++++++++++---- 3 files changed, 41 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 242763e81e3..419927d6d42 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Callable, Mapping from typing import Any from aiohttp import ClientError -from pydrawise import auth, client +from pydrawise import auth as pydrawise_auth, client from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol @@ -29,16 +29,21 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): on_failure: Callable[[str], ConfigFlowResult], ) -> ConfigFlowResult: """Create the config entry.""" - # Verify that the provided credentials work.""" - api = client.Hydrawise(auth.Auth(username, password)) + auth = pydrawise_auth.Auth(username, password) try: - # Don't fetch zones because we don't need them yet. - user = await api.get_user(fetch_zones=False) + await auth.token() except NotAuthorizedError: return on_failure("invalid_auth") except TimeoutError: return on_failure("timeout_connect") + + try: + api = client.Hydrawise(auth) + # Don't fetch zones because we don't need them yet. + user = await api.get_user(fetch_zones=False) + except TimeoutError: + return on_failure("timeout_connect") except ClientError as ex: LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) return on_failure("cannot_connect") diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index a938322414b..2de7fb1da9a 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -56,7 +56,6 @@ def mock_legacy_pydrawise( @pytest.fixture def mock_pydrawise( - mock_auth: AsyncMock, user: User, controller: Controller, zones: list[Zone], diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index e85b1b9b249..4d25fd5840b 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -21,6 +21,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User, ) -> None: @@ -46,11 +47,12 @@ async def test_form( CONF_PASSWORD: "__password__", } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) + mock_auth.token.assert_awaited_once_with() + mock_pydrawise.get_user.assert_awaited_once_with(fetch_zones=False) async def test_form_api_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = ClientError("XXX") @@ -71,8 +73,29 @@ async def test_form_api_error( assert result2["type"] is FlowResultType.CREATE_ENTRY -async def test_form_connect_timeout( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +async def test_form_auth_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock +) -> None: + """Test we handle API errors.""" + mock_auth.token.side_effect = TimeoutError + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} + result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], data + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "timeout_connect"} + + mock_auth.token.reset_mock(side_effect=True) + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result2["type"] is FlowResultType.CREATE_ENTRY + + +async def test_form_client_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = TimeoutError @@ -94,10 +117,10 @@ async def test_form_connect_timeout( async def test_form_not_authorized_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock ) -> None: """Test we handle API errors.""" - mock_pydrawise.get_user.side_effect = NotAuthorizedError + mock_auth.token.side_effect = NotAuthorizedError init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -109,8 +132,7 @@ async def test_form_not_authorized_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - mock_pydrawise.get_user.reset_mock(side_effect=True) - mock_pydrawise.get_user.return_value = user + mock_auth.token.reset_mock(side_effect=True) result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -118,6 +140,7 @@ async def test_form_not_authorized_error( async def test_reauth( hass: HomeAssistant, user: User, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, ) -> None: """Test that re-authorization works.""" From a11bf5cce11e17e94f3ac30c80df1175b06fcf5a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:43:07 +0100 Subject: [PATCH 0435/1198] Migrate blebox lights to use Kelvin (#132787) --- homeassistant/components/blebox/light.py | 27 ++++++++++++++---------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/blebox/light.py b/homeassistant/components/blebox/light.py index 33fff1d71da..c3c9de8be51 100644 --- a/homeassistant/components/blebox/light.py +++ b/homeassistant/components/blebox/light.py @@ -11,7 +11,7 @@ from blebox_uniapi.light import BleboxColorMode from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -22,6 +22,7 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from . import BleBoxConfigEntry from .entity import BleBoxEntity @@ -58,8 +59,8 @@ COLOR_MODE_MAP = { class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): """Representation of BleBox lights.""" - _attr_max_mireds = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds - _attr_min_mireds = 154 # 1,000,000 divided by 6500 Kelvin = 154 Mireds + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 154 Mireds def __init__(self, feature: blebox_uniapi.light.Light) -> None: """Initialize a BleBox light.""" @@ -78,9 +79,9 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): return self._feature.brightness @property - def color_temp(self): - """Return color temperature.""" - return self._feature.color_temp + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp) @property def color_mode(self): @@ -136,7 +137,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): rgbw = kwargs.get(ATTR_RGBW_COLOR) brightness = kwargs.get(ATTR_BRIGHTNESS) effect = kwargs.get(ATTR_EFFECT) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) rgbww = kwargs.get(ATTR_RGBWW_COLOR) feature = self._feature value = feature.sensible_on_value @@ -144,9 +145,10 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): if rgbw is not None: value = list(rgbw) - if color_temp is not None: + if color_temp_kelvin is not None: value = feature.return_color_temp_with_brightness( - int(color_temp), self.brightness + int(color_util.color_temperature_kelvin_to_mired(color_temp_kelvin)), + self.brightness, ) if rgbww is not None: @@ -158,9 +160,12 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): value = list(rgb) if brightness is not None: - if self.color_mode == ATTR_COLOR_TEMP: + if self.color_mode == ColorMode.COLOR_TEMP: value = feature.return_color_temp_with_brightness( - self.color_temp, brightness + color_util.color_temperature_kelvin_to_mired( + self.color_temp_kelvin + ), + brightness, ) else: value = feature.apply_brightness(value, brightness) From 82692f9a8f203b5bc481321b10ff671f8c00ac89 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 09:20:35 +0100 Subject: [PATCH 0436/1198] Migrate mired attributes to kelvin in limitlessled (#132785) --- .../components/limitlessled/light.py | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/limitlessled/light.py b/homeassistant/components/limitlessled/light.py index 5f771a53e86..4b2b75be9d7 100644 --- a/homeassistant/components/limitlessled/light.py +++ b/homeassistant/components/limitlessled/light.py @@ -38,11 +38,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.color import ( - color_hs_to_RGB, - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) +from homeassistant.util.color import color_hs_to_RGB _LOGGER = logging.getLogger(__name__) @@ -221,8 +217,8 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): """Representation of a LimitessLED group.""" _attr_assumed_state = True - _attr_max_mireds = 370 - _attr_min_mireds = 154 + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 154 Mireds _attr_should_poll = False def __init__(self, group: Group, config: dict[str, Any]) -> None: @@ -265,7 +261,9 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): if last_state := await self.async_get_last_state(): self._attr_is_on = last_state.state == STATE_ON self._attr_brightness = last_state.attributes.get("brightness") - self._attr_color_temp = last_state.attributes.get("color_temp") + self._attr_color_temp_kelvin = last_state.attributes.get( + "color_temp_kelvin" + ) self._attr_hs_color = last_state.attributes.get("hs_color") @property @@ -334,9 +332,7 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): if ColorMode.HS in self.supported_color_modes: pipeline.white() self._attr_hs_color = WHITE - self._attr_color_temp = color_temperature_kelvin_to_mired( - kwargs[ATTR_COLOR_TEMP_KELVIN] - ) + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] args["temperature"] = self.limitlessled_temperature() if args: @@ -360,12 +356,9 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): def limitlessled_temperature(self) -> float: """Convert Home Assistant color temperature units to percentage.""" - max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds) - min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds) - width = max_kelvin - min_kelvin - assert self.color_temp is not None - kelvin = color_temperature_mired_to_kelvin(self.color_temp) - temperature = (kelvin - min_kelvin) / width + width = self.max_color_temp_kelvin - self.min_color_temp_kelvin + assert self.color_temp_kelvin is not None + temperature = (self.color_temp_kelvin - self.min_color_temp_kelvin) / width return max(0, min(1, temperature)) def limitlessled_brightness(self) -> float: From b0b3f04a0509f9c7703500294495d554a7ad89f8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 09:34:15 +0100 Subject: [PATCH 0437/1198] Migrate iglo lights to use Kelvin (#132796) --- homeassistant/components/iglo/light.py | 32 ++++++++++---------------- 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/iglo/light.py b/homeassistant/components/iglo/light.py index a31183f4489..0d20761c6e5 100644 --- a/homeassistant/components/iglo/light.py +++ b/homeassistant/components/iglo/light.py @@ -2,7 +2,6 @@ from __future__ import annotations -import math from typing import Any from iglo import Lamp @@ -11,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, @@ -83,23 +82,19 @@ class IGloLamp(LightEntity): return ColorMode.HS @property - def color_temp(self): - """Return the color temperature.""" - return color_util.color_temperature_kelvin_to_mired(self._lamp.state()["white"]) + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return self._lamp.state()["white"] @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return math.ceil( - color_util.color_temperature_kelvin_to_mired(self._lamp.max_kelvin) - ) + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return self._lamp.max_kelvin @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return math.ceil( - color_util.color_temperature_kelvin_to_mired(self._lamp.min_kelvin) - ) + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return self._lamp.min_kelvin @property def hs_color(self): @@ -135,11 +130,8 @@ class IGloLamp(LightEntity): self._lamp.rgb(*rgb) return - if ATTR_COLOR_TEMP in kwargs: - kelvin = int( - color_util.color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) - ) - self._lamp.white(kelvin) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + self._lamp.white(kwargs[ATTR_COLOR_TEMP_KELVIN]) return if ATTR_EFFECT in kwargs: From 988ca114a06f0a8bd9e13be94c510e7ef6c19636 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 10 Dec 2024 09:35:01 +0100 Subject: [PATCH 0438/1198] Update ciso8601 to v2.3.2 (#132793) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 2a580edf3a2..cd45f15fe7c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -24,7 +24,7 @@ bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.20.0 cached-ipaddress==0.8.0 certifi>=2021.5.30 -ciso8601==2.3.1 +ciso8601==2.3.2 cryptography==44.0.0 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 diff --git a/pyproject.toml b/pyproject.toml index dcfd84b0fbe..5239874e2f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dependencies = [ "awesomeversion==24.6.0", "bcrypt==4.2.0", "certifi>=2021.5.30", - "ciso8601==2.3.1", + "ciso8601==2.3.2", "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration diff --git a/requirements.txt b/requirements.txt index 4379d51e204..7ed445c6b65 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 bcrypt==4.2.0 certifi>=2021.5.30 -ciso8601==2.3.1 +ciso8601==2.3.2 fnv-hash-fast==1.0.2 hass-nabucasa==0.86.0 httpx==0.27.2 From 3bf4ef095d47917048c26ba0f8bd31cacb389b92 Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Tue, 10 Dec 2024 10:39:33 +0200 Subject: [PATCH 0439/1198] bump pyituran to 0.1.4 (#132791) --- homeassistant/components/ituran/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ituran/manifest.json b/homeassistant/components/ituran/manifest.json index 570b4582a8a..93860427a77 100644 --- a/homeassistant/components/ituran/manifest.json +++ b/homeassistant/components/ituran/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ituran", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["pyituran==0.1.3"] + "requirements": ["pyituran==0.1.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0b71ddbd283..0152d65111a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1997,7 +1997,7 @@ pyisy==3.1.14 pyitachip2ir==0.0.7 # homeassistant.components.ituran -pyituran==0.1.3 +pyituran==0.1.4 # homeassistant.components.jvc_projector pyjvcprojector==1.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cdc8d07958e..6e46edf9680 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1611,7 +1611,7 @@ pyiss==1.0.1 pyisy==3.1.14 # homeassistant.components.ituran -pyituran==0.1.3 +pyituran==0.1.4 # homeassistant.components.jvc_projector pyjvcprojector==1.1.2 From bcedb004be68ebeb63a21f2b288042499c2b4cf6 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Tue, 10 Dec 2024 03:40:51 -0500 Subject: [PATCH 0440/1198] Add diagnostics platform to Russound RIO (#132776) --- .../components/russound_rio/diagnostics.py | 14 ++++ tests/components/russound_rio/conftest.py | 1 + .../russound_rio/fixtures/get_state.json | 75 +++++++++++++++++ .../snapshots/test_diagnostics.ambr | 81 +++++++++++++++++++ .../russound_rio/test_diagnostics.py | 29 +++++++ 5 files changed, 200 insertions(+) create mode 100644 homeassistant/components/russound_rio/diagnostics.py create mode 100644 tests/components/russound_rio/fixtures/get_state.json create mode 100644 tests/components/russound_rio/snapshots/test_diagnostics.ambr create mode 100644 tests/components/russound_rio/test_diagnostics.py diff --git a/homeassistant/components/russound_rio/diagnostics.py b/homeassistant/components/russound_rio/diagnostics.py new file mode 100644 index 00000000000..0e96413c41a --- /dev/null +++ b/homeassistant/components/russound_rio/diagnostics.py @@ -0,0 +1,14 @@ +"""Diagnostics platform for Russound RIO.""" + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import RussoundConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: RussoundConfigEntry +) -> dict[str, Any]: + """Return diagnostics for the provided config entry.""" + return entry.runtime_data.state diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 09cccd7d83f..deb7bfccdf0 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -54,6 +54,7 @@ def mock_russound_client() -> Generator[AsyncMock]: int(k): Source.from_dict(v) for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() } + client.state = load_json_object_fixture("get_state.json", DOMAIN) for k, v in zones.items(): v.device_str = zone_device_str(1, k) v.fetch_current_source = Mock( diff --git a/tests/components/russound_rio/fixtures/get_state.json b/tests/components/russound_rio/fixtures/get_state.json new file mode 100644 index 00000000000..931b7611d01 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_state.json @@ -0,0 +1,75 @@ +{ + "S": { + "3": { + "name": "Streamer", + "type": "Misc Audio" + }, + "2": { + "name": "Liv. Rm TV", + "type": "Misc Audio" + }, + "5": { + "name": "Source 5", + "type": null + }, + "4": { + "name": "Basement TV", + "type": null + }, + "1": { + "name": "Tuner", + "type": "DMS-3.1 Media Streamer", + "channelName": null, + "coverArtURL": null, + "mode": "Unknown", + "shuffleMode": null, + "repeatMode": null, + "volume": "0", + "rating": null, + "playlistName": "Please Wait...", + "artistName": null, + "albumName": null, + "songName": "Connecting to media source." + }, + "6": { + "name": "Source 6", + "type": null + }, + "8": { + "name": "Source 8", + "type": null + }, + "7": { + "name": "Source 7", + "type": null + } + }, + "System": { + "status": "OFF" + }, + "C": { + "1": { + "Z": { + "1": { + "name": "Deck", + "treble": "0", + "balance": "0", + "loudness": "OFF", + "turnOnVolume": "10", + "doNotDisturb": "OFF", + "currentSource": "2", + "volume": "0", + "status": "OFF", + "mute": "OFF", + "partyMode": "OFF", + "bass": "0", + "page": "OFF", + "sharedSource": "OFF", + "sleepTimeRemaining": "0", + "lastError": null, + "enabled_sources": [3, 2] + } + } + } + } +} diff --git a/tests/components/russound_rio/snapshots/test_diagnostics.ambr b/tests/components/russound_rio/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..ff3a8bf757f --- /dev/null +++ b/tests/components/russound_rio/snapshots/test_diagnostics.ambr @@ -0,0 +1,81 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'C': dict({ + '1': dict({ + 'Z': dict({ + '1': dict({ + 'balance': '0', + 'bass': '0', + 'currentSource': '2', + 'doNotDisturb': 'OFF', + 'enabled_sources': list([ + 3, + 2, + ]), + 'lastError': None, + 'loudness': 'OFF', + 'mute': 'OFF', + 'name': 'Deck', + 'page': 'OFF', + 'partyMode': 'OFF', + 'sharedSource': 'OFF', + 'sleepTimeRemaining': '0', + 'status': 'OFF', + 'treble': '0', + 'turnOnVolume': '10', + 'volume': '0', + }), + }), + }), + }), + 'S': dict({ + '1': dict({ + 'albumName': None, + 'artistName': None, + 'channelName': None, + 'coverArtURL': None, + 'mode': 'Unknown', + 'name': 'Tuner', + 'playlistName': 'Please Wait...', + 'rating': None, + 'repeatMode': None, + 'shuffleMode': None, + 'songName': 'Connecting to media source.', + 'type': 'DMS-3.1 Media Streamer', + 'volume': '0', + }), + '2': dict({ + 'name': 'Liv. Rm TV', + 'type': 'Misc Audio', + }), + '3': dict({ + 'name': 'Streamer', + 'type': 'Misc Audio', + }), + '4': dict({ + 'name': 'Basement TV', + 'type': None, + }), + '5': dict({ + 'name': 'Source 5', + 'type': None, + }), + '6': dict({ + 'name': 'Source 6', + 'type': None, + }), + '7': dict({ + 'name': 'Source 7', + 'type': None, + }), + '8': dict({ + 'name': 'Source 8', + 'type': None, + }), + }), + 'System': dict({ + 'status': 'OFF', + }), + }) +# --- diff --git a/tests/components/russound_rio/test_diagnostics.py b/tests/components/russound_rio/test_diagnostics.py new file mode 100644 index 00000000000..c6c5441128d --- /dev/null +++ b/tests/components/russound_rio/test_diagnostics.py @@ -0,0 +1,29 @@ +"""Tests for the diagnostics data provided by the Russound RIO integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot From 790edea4a0e322b64b694d6120edba49986192be Mon Sep 17 00:00:00 2001 From: YogevBokobza Date: Tue, 10 Dec 2024 10:43:09 +0200 Subject: [PATCH 0441/1198] Bump aioswitcher to 5.1.0 (#132753) * Bump aioswitcher to 5.0.0 * fix tests --- homeassistant/components/switcher_kis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/switcher_kis/consts.py | 6 ++++++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 987dac65077..d0731c5ae3b 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/switcher_kis", "iot_class": "local_push", "loggers": ["aioswitcher"], - "requirements": ["aioswitcher==5.0.0"], + "requirements": ["aioswitcher==5.1.0"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 0152d65111a..160e72e2b19 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -384,7 +384,7 @@ aiosteamist==1.0.0 aiostreammagic==2.10.0 # homeassistant.components.switcher_kis -aioswitcher==5.0.0 +aioswitcher==5.1.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6e46edf9680..596998b1dd2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -366,7 +366,7 @@ aiosteamist==1.0.0 aiostreammagic==2.10.0 # homeassistant.components.switcher_kis -aioswitcher==5.0.0 +aioswitcher==5.1.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index e9d96673e24..defe970c674 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -3,6 +3,7 @@ from aioswitcher.device import ( DeviceState, DeviceType, + ShutterChildLock, ShutterDirection, SwitcherDualShutterSingleLight, SwitcherLight, @@ -90,6 +91,8 @@ DUMMY_POSITION = [54] DUMMY_POSITION_2 = [54, 54] DUMMY_DIRECTION = [ShutterDirection.SHUTTER_STOP] DUMMY_DIRECTION_2 = [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP] +DUMMY_CHILD_LOCK = [ShutterChildLock.OFF] +DUMMY_CHILD_LOCK_2 = [ShutterChildLock.OFF, ShutterChildLock.OFF] DUMMY_USERNAME = "email" DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" DUMMY_LIGHT = [DeviceState.ON] @@ -135,6 +138,7 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, + DUMMY_CHILD_LOCK, ) DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE = SwitcherSingleShutterDualLight( @@ -148,6 +152,7 @@ DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE = SwitcherSingleShutterDualLight( DUMMY_TOKEN_NEEDED5, DUMMY_POSITION, DUMMY_DIRECTION, + DUMMY_CHILD_LOCK, DUMMY_LIGHT_2, ) @@ -162,6 +167,7 @@ DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE = SwitcherDualShutterSingleLight( DUMMY_TOKEN_NEEDED6, DUMMY_POSITION_2, DUMMY_DIRECTION_2, + DUMMY_CHILD_LOCK_2, DUMMY_LIGHT, ) From 2a127d19dd9def22c816261e536641550b5d9f80 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Tue, 10 Dec 2024 09:50:53 +0100 Subject: [PATCH 0442/1198] Use UnitOfEnergy.KILO_CALORIE in Tractive integration (#131909) --- homeassistant/components/tractive/sensor.py | 3 ++- tests/components/tractive/snapshots/test_sensor.ambr | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tractive/sensor.py b/homeassistant/components/tractive/sensor.py index a92efa660b6..a3c1893267c 100644 --- a/homeassistant/components/tractive/sensor.py +++ b/homeassistant/components/tractive/sensor.py @@ -16,6 +16,7 @@ from homeassistant.const import ( ATTR_BATTERY_LEVEL, PERCENTAGE, EntityCategory, + UnitOfEnergy, UnitOfTime, ) from homeassistant.core import HomeAssistant, callback @@ -127,7 +128,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = ( TractiveSensorEntityDescription( key=ATTR_CALORIES, translation_key="calories", - native_unit_of_measurement="kcal", + native_unit_of_measurement=UnitOfEnergy.KILO_CALORIE, signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED, state_class=SensorStateClass.TOTAL, ), diff --git a/tests/components/tractive/snapshots/test_sensor.ambr b/tests/components/tractive/snapshots/test_sensor.ambr index f1ed397450e..f10cfb29226 100644 --- a/tests/components/tractive/snapshots/test_sensor.ambr +++ b/tests/components/tractive/snapshots/test_sensor.ambr @@ -139,7 +139,7 @@ 'supported_features': 0, 'translation_key': 'calories', 'unique_id': 'pet_id_123_calories', - 'unit_of_measurement': 'kcal', + 'unit_of_measurement': , }) # --- # name: test_sensor[sensor.test_pet_calories_burned-state] @@ -147,7 +147,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Test Pet Calories burned', 'state_class': , - 'unit_of_measurement': 'kcal', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_pet_calories_burned', From e31e4c5d75acec2a4f9916c6e71d831cc26e29ef Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:07:02 +0100 Subject: [PATCH 0443/1198] Migrate wiz lights to use Kelvin (#132809) --- homeassistant/components/wiz/light.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/wiz/light.py b/homeassistant/components/wiz/light.py index a3f36d580d2..9ef4cd57b3d 100644 --- a/homeassistant/components/wiz/light.py +++ b/homeassistant/components/wiz/light.py @@ -10,7 +10,7 @@ from pywizlight.scenes import get_id_from_scene_name from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -21,10 +21,6 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from . import WizConfigEntry from .entity import WizToggleEntity @@ -43,10 +39,10 @@ def _async_pilot_builder(**kwargs: Any) -> PilotBuilder: if ATTR_RGBW_COLOR in kwargs: return PilotBuilder(brightness=brightness, rgbw=kwargs[ATTR_RGBW_COLOR]) - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: return PilotBuilder( brightness=brightness, - colortemp=color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]), + colortemp=kwargs[ATTR_COLOR_TEMP_KELVIN], ) if ATTR_EFFECT in kwargs: @@ -93,8 +89,8 @@ class WizBulbEntity(WizToggleEntity, LightEntity): self._attr_effect_list = wiz_data.scenes if bulb_type.bulb_type != BulbClass.DW: kelvin = bulb_type.kelvin_range - self._attr_min_mireds = color_temperature_kelvin_to_mired(kelvin.max) - self._attr_max_mireds = color_temperature_kelvin_to_mired(kelvin.min) + self._attr_max_color_temp_kelvin = kelvin.max + self._attr_min_color_temp_kelvin = kelvin.min if bulb_type.features.effect: self._attr_supported_features = LightEntityFeature.EFFECT self._async_update_attrs() @@ -111,7 +107,7 @@ class WizBulbEntity(WizToggleEntity, LightEntity): color_temp := state.get_colortemp() ): self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = color_temperature_kelvin_to_mired(color_temp) + self._attr_color_temp_kelvin = color_temp elif ( ColorMode.RGBWW in color_modes and (rgbww := state.get_rgbww()) is not None ): From bd6df06248d5619e8502d0936c469225c130aee9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:07:36 +0100 Subject: [PATCH 0444/1198] Migrate wemo lights to use Kelvin (#132808) --- homeassistant/components/wemo/light.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/wemo/light.py b/homeassistant/components/wemo/light.py index 26dec417631..b39f4829605 100644 --- a/homeassistant/components/wemo/light.py +++ b/homeassistant/components/wemo/light.py @@ -8,7 +8,7 @@ from pywemo import Bridge, BridgeLight, Dimmer from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -123,9 +123,11 @@ class WemoLight(WemoEntity, LightEntity): return self.light.state.get("color_xy") @property - def color_temp(self) -> int | None: - """Return the color temperature of this light in mireds.""" - return self.light.state.get("temperature_mireds") + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if not (mireds := self.light.state.get("temperature_mireds")): + return None + return color_util.color_temperature_mired_to_kelvin(mireds) @property def color_mode(self) -> ColorMode: @@ -165,7 +167,7 @@ class WemoLight(WemoEntity, LightEntity): xy_color = None brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness or 255) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs_color = kwargs.get(ATTR_HS_COLOR) transition_time = int(kwargs.get(ATTR_TRANSITION, 0)) @@ -182,9 +184,9 @@ class WemoLight(WemoEntity, LightEntity): if xy_color is not None: self.light.set_color(xy_color, transition=transition_time) - if color_temp is not None: + if color_temp_kelvin is not None: self.light.set_temperature( - mireds=color_temp, transition=transition_time + kelvin=color_temp_kelvin, transition=transition_time ) self.light.turn_on(**turn_on_kwargs) From f0e7cb5794f5d8bb7b5f1bcb4465c44ed7094531 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:09:20 +0100 Subject: [PATCH 0445/1198] Migrate tuya lights to use Kelvin (#132803) --- homeassistant/components/tuya/light.py | 28 +++++++++++++++++--------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/tuya/light.py b/homeassistant/components/tuya/light.py index 060b1f4b7ef..d7dffc16b58 100644 --- a/homeassistant/components/tuya/light.py +++ b/homeassistant/components/tuya/light.py @@ -10,7 +10,7 @@ from tuya_sharing import CustomerDevice, Manager from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -21,6 +21,7 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from . import TuyaConfigEntry from .const import TUYA_DISCOVERY_NEW, DPCode, DPType, WorkMode @@ -49,6 +50,9 @@ DEFAULT_COLOR_TYPE_DATA_V2 = ColorTypeData( v_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1), ) +MAX_MIREDS = 500 # 2000 K +MIN_MIREDS = 153 # 6500 K + @dataclass(frozen=True) class TuyaLightEntityDescription(LightEntityDescription): @@ -457,6 +461,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity): _color_mode: DPCode | None = None _color_temp: IntegerTypeData | None = None _fixed_color_mode: ColorMode | None = None + _attr_min_color_temp_kelvin = 2000 # 500 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds def __init__( self, @@ -532,7 +538,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity): """Turn on or control the light.""" commands = [{"code": self.entity_description.key, "value": True}] - if self._color_temp and ATTR_COLOR_TEMP in kwargs: + if self._color_temp and ATTR_COLOR_TEMP_KELVIN in kwargs: if self._color_mode_dpcode: commands += [ { @@ -546,9 +552,11 @@ class TuyaLightEntity(TuyaEntity, LightEntity): "code": self._color_temp.dpcode, "value": round( self._color_temp.remap_value_from( - kwargs[ATTR_COLOR_TEMP], - self.min_mireds, - self.max_mireds, + color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ), + MIN_MIREDS, + MAX_MIREDS, reverse=True, ) ), @@ -560,7 +568,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity): or ( ATTR_BRIGHTNESS in kwargs and self.color_mode == ColorMode.HS - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs ) ): if self._color_mode_dpcode: @@ -688,8 +696,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity): return round(brightness) @property - def color_temp(self) -> int | None: - """Return the color_temp of the light.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if not self._color_temp: return None @@ -697,9 +705,9 @@ class TuyaLightEntity(TuyaEntity, LightEntity): if temperature is None: return None - return round( + return color_util.color_temperature_mired_to_kelvin( self._color_temp.remap_value_to( - temperature, self.min_mireds, self.max_mireds, reverse=True + temperature, MIN_MIREDS, MAX_MIREDS, reverse=True ) ) From 36ce90177f32bfe07c7e3c4c47b3a95c566d6d95 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:09:55 +0100 Subject: [PATCH 0446/1198] Migrate tradfri lights to use Kelvin (#132800) --- homeassistant/components/tradfri/light.py | 38 ++++++++++++++--------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/tradfri/light.py b/homeassistant/components/tradfri/light.py index b0bf6d24019..a71691e6e90 100644 --- a/homeassistant/components/tradfri/light.py +++ b/homeassistant/components/tradfri/light.py @@ -9,7 +9,7 @@ from pytradfri.command import Command from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -87,8 +87,16 @@ class TradfriLight(TradfriBaseEntity, LightEntity): self._fixed_color_mode = next(iter(self._attr_supported_color_modes)) if self._device_control: - self._attr_min_mireds = self._device_control.min_mireds - self._attr_max_mireds = self._device_control.max_mireds + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + self._device_control.min_mireds + ) + ) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + self._device_control.max_mireds + ) + ) def _refresh(self) -> None: """Refresh the device.""" @@ -118,11 +126,11 @@ class TradfriLight(TradfriBaseEntity, LightEntity): return cast(int, self._device_data.dimmer) @property - def color_temp(self) -> int | None: - """Return the color temp value in mireds.""" - if not self._device_data: + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if not self._device_data or not (color_temp := self._device_data.color_temp): return None - return cast(int, self._device_data.color_temp) + return color_util.color_temperature_mired_to_kelvin(color_temp) @property def hs_color(self) -> tuple[float, float] | None: @@ -191,18 +199,19 @@ class TradfriLight(TradfriBaseEntity, LightEntity): transition_time = None temp_command = None - if ATTR_COLOR_TEMP in kwargs and ( + if ATTR_COLOR_TEMP_KELVIN in kwargs and ( self._device_control.can_set_temp or self._device_control.can_set_color ): - temp = kwargs[ATTR_COLOR_TEMP] + temp_k = kwargs[ATTR_COLOR_TEMP_KELVIN] # White Spectrum bulb if self._device_control.can_set_temp: - if temp > self.max_mireds: - temp = self.max_mireds - elif temp < self.min_mireds: - temp = self.min_mireds + temp = color_util.color_temperature_kelvin_to_mired(temp_k) + if temp < (min_mireds := self._device_control.min_mireds): + temp = min_mireds + elif temp > (max_mireds := self._device_control.max_mireds): + temp = max_mireds temp_data = { - ATTR_COLOR_TEMP: temp, + "color_temp": temp, "transition_time": transition_time, } temp_command = self._device_control.set_color_temp(**temp_data) @@ -210,7 +219,6 @@ class TradfriLight(TradfriBaseEntity, LightEntity): # Color bulb (CWS) # color_temp needs to be set with hue/saturation elif self._device_control.can_set_color: - temp_k = color_util.color_temperature_mired_to_kelvin(temp) hs_color = color_util.color_temperature_to_hs(temp_k) hue = int(hs_color[0] * (self._device_control.max_hue / 360)) sat = int(hs_color[1] * (self._device_control.max_saturation / 100)) From 7b0a309fa7f67e5a1b39df24c94d42546186571f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:11:06 +0100 Subject: [PATCH 0447/1198] Migrate template lights to use Kelvin (#132799) --- homeassistant/components/template/light.py | 42 +++++++++++++--------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index cae6c0cebc1..9c7bc23022a 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -39,6 +39,7 @@ from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import color as color_util from .const import DOMAIN from .template_entity import ( @@ -262,25 +263,27 @@ class LightTemplate(TemplateEntity, LightEntity): return self._brightness @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self._temperature + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if self._temperature is None: + return None + return color_util.color_temperature_mired_to_kelvin(self._temperature) @property - def max_mireds(self) -> int: - """Return the max mireds value in mireds.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if self._max_mireds is not None: - return self._max_mireds + return color_util.color_temperature_mired_to_kelvin(self._max_mireds) - return super().max_mireds + return super().min_color_temp_kelvin @property - def min_mireds(self) -> int: - """Return the min mireds value in mireds.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if self._min_mireds is not None: - return self._min_mireds + return color_util.color_temperature_mired_to_kelvin(self._min_mireds) - return super().min_mireds + return super().max_color_temp_kelvin @property def hs_color(self) -> tuple[float, float] | None: @@ -447,13 +450,16 @@ class LightTemplate(TemplateEntity, LightEntity): self._brightness = kwargs[ATTR_BRIGHTNESS] optimistic_set = True - if self._temperature_template is None and ATTR_COLOR_TEMP in kwargs: + if self._temperature_template is None and ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) _LOGGER.debug( "Optimistically setting color temperature to %s", - kwargs[ATTR_COLOR_TEMP], + color_temp, ) self._color_mode = ColorMode.COLOR_TEMP - self._temperature = kwargs[ATTR_COLOR_TEMP] + self._temperature = color_temp if self._hs_template is None and self._color_template is None: self._hs_color = None if self._rgb_template is None: @@ -544,8 +550,10 @@ class LightTemplate(TemplateEntity, LightEntity): if ATTR_TRANSITION in kwargs and self._supports_transition is True: common_params["transition"] = kwargs[ATTR_TRANSITION] - if ATTR_COLOR_TEMP in kwargs and self._temperature_script: - common_params["color_temp"] = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs and self._temperature_script: + common_params["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) await self.async_run_script( self._temperature_script, From 48808490742a6bbecb1a61124ef2f6efb65539c7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:17:40 +0100 Subject: [PATCH 0448/1198] Migrate homematic lights to use Kelvin (#132794) --- homeassistant/components/homematic/light.py | 26 ++++++++++++++------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/homematic/light.py b/homeassistant/components/homematic/light.py index b05cc6a46d6..838cdc9c3c3 100644 --- a/homeassistant/components/homematic/light.py +++ b/homeassistant/components/homematic/light.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -17,10 +17,14 @@ from homeassistant.components.light import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import color as color_util from .const import ATTR_DISCOVER_DEVICES from .entity import HMDevice +MAX_MIREDS = 500 # 2000 K +MIN_MIREDS = 153 # 6500 K + def setup_platform( hass: HomeAssistant, @@ -43,6 +47,9 @@ def setup_platform( class HMLight(HMDevice, LightEntity): """Representation of a Homematic light.""" + _attr_min_color_temp_kelvin = 2000 # 500 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds + @property def brightness(self): """Return the brightness of this light between 0..255.""" @@ -99,12 +106,14 @@ class HMLight(HMDevice, LightEntity): return hue * 360.0, sat * 100.0 @property - def color_temp(self): - """Return the color temp in mireds [int].""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if ColorMode.COLOR_TEMP not in self.supported_color_modes: return None hm_color_temp = self._hmdevice.get_color_temp(self._channel) - return self.max_mireds - (self.max_mireds - self.min_mireds) * hm_color_temp + return color_util.color_temperature_mired_to_kelvin( + MAX_MIREDS - (MAX_MIREDS - MIN_MIREDS) * hm_color_temp + ) @property def effect_list(self): @@ -130,7 +139,7 @@ class HMLight(HMDevice, LightEntity): self._hmdevice.set_level(percent_bright, self._channel) elif ( ATTR_HS_COLOR not in kwargs - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs and ATTR_EFFECT not in kwargs ): self._hmdevice.on(self._channel) @@ -141,10 +150,11 @@ class HMLight(HMDevice, LightEntity): saturation=kwargs[ATTR_HS_COLOR][1] / 100.0, channel=self._channel, ) - if ATTR_COLOR_TEMP in kwargs: - hm_temp = (self.max_mireds - kwargs[ATTR_COLOR_TEMP]) / ( - self.max_mireds - self.min_mireds + if ATTR_COLOR_TEMP_KELVIN in kwargs: + mireds = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] ) + hm_temp = (MAX_MIREDS - mireds) / (MAX_MIREDS - MIN_MIREDS) self._hmdevice.set_color_temp(hm_temp) if ATTR_EFFECT in kwargs: self._hmdevice.set_effect(kwargs[ATTR_EFFECT]) From 28d01d88a23060b6730c56f18f242455ea4ddc9a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:17:55 +0100 Subject: [PATCH 0449/1198] Migrate nanoleaf lights to use Kelvin (#132797) --- homeassistant/components/nanoleaf/light.py | 27 ++++++++-------------- 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/nanoleaf/light.py b/homeassistant/components/nanoleaf/light.py index 19d817b9999..681053fa573 100644 --- a/homeassistant/components/nanoleaf/light.py +++ b/homeassistant/components/nanoleaf/light.py @@ -2,12 +2,11 @@ from __future__ import annotations -import math from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -17,10 +16,6 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) from . import NanoleafConfigEntry from .coordinator import NanoleafCoordinator @@ -51,10 +46,8 @@ class NanoleafLight(NanoleafEntity, LightEntity): """Initialize the Nanoleaf light.""" super().__init__(coordinator) self._attr_unique_id = self._nanoleaf.serial_no - self._attr_min_mireds = math.ceil( - 1000000 / self._nanoleaf.color_temperature_max - ) - self._attr_max_mireds = kelvin_to_mired(self._nanoleaf.color_temperature_min) + self._attr_max_color_temp_kelvin = self._nanoleaf.color_temperature_max + self._attr_min_color_temp_kelvin = self._nanoleaf.color_temperature_min @property def brightness(self) -> int: @@ -62,9 +55,9 @@ class NanoleafLight(NanoleafEntity, LightEntity): return int(self._nanoleaf.brightness * 2.55) @property - def color_temp(self) -> int: - """Return the current color temperature.""" - return kelvin_to_mired(self._nanoleaf.color_temperature) + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return self._nanoleaf.color_temperature @property def effect(self) -> str | None: @@ -106,7 +99,7 @@ class NanoleafLight(NanoleafEntity, LightEntity): """Instruct the light to turn on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) hs_color = kwargs.get(ATTR_HS_COLOR) - color_temp_mired = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) effect = kwargs.get(ATTR_EFFECT) transition = kwargs.get(ATTR_TRANSITION) @@ -120,10 +113,8 @@ class NanoleafLight(NanoleafEntity, LightEntity): hue, saturation = hs_color await self._nanoleaf.set_hue(int(hue)) await self._nanoleaf.set_saturation(int(saturation)) - elif color_temp_mired: - await self._nanoleaf.set_color_temperature( - mired_to_kelvin(color_temp_mired) - ) + elif color_temp_kelvin: + await self._nanoleaf.set_color_temperature(color_temp_kelvin) if transition: if brightness: # tune to the required brightness in n seconds await self._nanoleaf.set_brightness( From be1c225c7091265606e8fd6ab9ef7c1a3bc10d17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Tue, 10 Dec 2024 10:20:30 +0100 Subject: [PATCH 0450/1198] Address misc comments from myuplink quality scale review (#132802) --- homeassistant/components/myuplink/binary_sensor.py | 10 ++++------ homeassistant/components/myuplink/select.py | 7 ++----- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/myuplink/binary_sensor.py b/homeassistant/components/myuplink/binary_sensor.py index 953859986d0..d903c7cbfae 100644 --- a/homeassistant/components/myuplink/binary_sensor.py +++ b/homeassistant/components/myuplink/binary_sensor.py @@ -155,7 +155,7 @@ class MyUplinkDeviceBinarySensor(MyUplinkEntity, BinarySensorEntity): self, coordinator: MyUplinkDataCoordinator, device_id: str, - entity_description: BinarySensorEntityDescription | None, + entity_description: BinarySensorEntityDescription, unique_id_suffix: str, ) -> None: """Initialize the binary_sensor.""" @@ -165,8 +165,7 @@ class MyUplinkDeviceBinarySensor(MyUplinkEntity, BinarySensorEntity): unique_id_suffix=unique_id_suffix, ) - if entity_description is not None: - self.entity_description = entity_description + self.entity_description = entity_description @property def is_on(self) -> bool: @@ -185,7 +184,7 @@ class MyUplinkSystemBinarySensor(MyUplinkSystemEntity, BinarySensorEntity): coordinator: MyUplinkDataCoordinator, system_id: str, device_id: str, - entity_description: BinarySensorEntityDescription | None, + entity_description: BinarySensorEntityDescription, unique_id_suffix: str, ) -> None: """Initialize the binary_sensor.""" @@ -196,8 +195,7 @@ class MyUplinkSystemBinarySensor(MyUplinkSystemEntity, BinarySensorEntity): unique_id_suffix=unique_id_suffix, ) - if entity_description is not None: - self.entity_description = entity_description + self.entity_description = entity_description @property def is_on(self) -> bool | None: diff --git a/homeassistant/components/myuplink/select.py b/homeassistant/components/myuplink/select.py index c0fb66602de..96058b916b3 100644 --- a/homeassistant/components/myuplink/select.py +++ b/homeassistant/components/myuplink/select.py @@ -5,7 +5,7 @@ from typing import cast from aiohttp import ClientError from myuplink import DevicePoint -from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.components.select import SelectEntity from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -30,14 +30,12 @@ async def async_setup_entry( for point_id, device_point in point_data.items(): if skip_entity(device_point.category, device_point): continue - description = None - if find_matching_platform(device_point, description) == Platform.SELECT: + if find_matching_platform(device_point, None) == Platform.SELECT: entities.append( MyUplinkSelect( coordinator=coordinator, device_id=device_id, device_point=device_point, - entity_description=description, unique_id_suffix=point_id, ) ) @@ -53,7 +51,6 @@ class MyUplinkSelect(MyUplinkEntity, SelectEntity): coordinator: MyUplinkDataCoordinator, device_id: str, device_point: DevicePoint, - entity_description: SelectEntityDescription | None, unique_id_suffix: str, ) -> None: """Initialize the select.""" From d724488376b868a39670d53ef0a77ef03d41f448 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:29:32 +0100 Subject: [PATCH 0451/1198] Migrate yeelight lights to use Kelvin (#132814) --- homeassistant/components/yeelight/light.py | 37 +++---- tests/components/yeelight/test_light.py | 111 ++++++++------------- 2 files changed, 58 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/yeelight/light.py b/homeassistant/components/yeelight/light.py index 7f705da68d1..8cc3f2600e5 100644 --- a/homeassistant/components/yeelight/light.py +++ b/homeassistant/components/yeelight/light.py @@ -16,7 +16,7 @@ from yeelight.main import BulbException from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -39,10 +39,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import VolDictType import homeassistant.util.color as color_util -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) from . import YEELIGHT_FLOW_TRANSITION_SCHEMA from .const import ( @@ -440,8 +436,8 @@ class YeelightBaseLight(YeelightEntity, LightEntity): self._effect = None model_specs = self._bulb.get_model_specs() - self._attr_min_mireds = kelvin_to_mired(model_specs["color_temp"]["max"]) - self._attr_max_mireds = kelvin_to_mired(model_specs["color_temp"]["min"]) + self._attr_max_color_temp_kelvin = model_specs["color_temp"]["max"] + self._attr_min_color_temp_kelvin = model_specs["color_temp"]["min"] self._light_type = LightType.Main @@ -476,10 +472,10 @@ class YeelightBaseLight(YeelightEntity, LightEntity): return self._predefined_effects + self.custom_effects_names @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if temp_in_k := self._get_property("ct"): - self._color_temp = kelvin_to_mired(int(temp_in_k)) + self._color_temp = int(temp_in_k) return self._color_temp @property @@ -678,20 +674,19 @@ class YeelightBaseLight(YeelightEntity, LightEntity): ) @_async_cmd - async def async_set_colortemp(self, colortemp, duration) -> None: + async def async_set_colortemp(self, temp_in_k, duration) -> None: """Set bulb's color temperature.""" if ( - not colortemp + not temp_in_k or not self.supported_color_modes or ColorMode.COLOR_TEMP not in self.supported_color_modes ): return - temp_in_k = mired_to_kelvin(colortemp) if ( not self.device.is_color_flow_enabled and self.color_mode == ColorMode.COLOR_TEMP - and self.color_temp == colortemp + and self.color_temp_kelvin == temp_in_k ): _LOGGER.debug("Color temp already set to: %s", temp_in_k) # Already set, and since we get pushed updates @@ -779,7 +774,7 @@ class YeelightBaseLight(YeelightEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the bulb on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - colortemp = kwargs.get(ATTR_COLOR_TEMP) + colortemp = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs_color = kwargs.get(ATTR_HS_COLOR) rgb = kwargs.get(ATTR_RGB_COLOR) flash = kwargs.get(ATTR_FLASH) @@ -933,12 +928,12 @@ class YeelightWithoutNightlightSwitchMixIn(YeelightBaseLight): return super()._brightness_property @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if self.device.is_nightlight_enabled: # Enabling the nightlight locks the colortemp to max - return self.max_mireds - return super().color_temp + return self.min_color_temp_kelvin + return super().color_temp_kelvin class YeelightColorLightWithoutNightlightSwitch( @@ -1081,8 +1076,8 @@ class YeelightAmbientLight(YeelightColorLightWithoutNightlightSwitch): def __init__(self, *args, **kwargs): """Initialize the Yeelight Ambient light.""" super().__init__(*args, **kwargs) - self._attr_min_mireds = kelvin_to_mired(6500) - self._attr_max_mireds = kelvin_to_mired(1700) + self._attr_max_color_temp_kelvin = 6500 + self._attr_min_color_temp_kelvin = 1700 self._light_type = LightType.Ambient diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index 518537262b2..f4ff82e7757 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -35,6 +35,7 @@ from homeassistant.components.light import ( FLASH_SHORT, SERVICE_TURN_OFF, SERVICE_TURN_ON, + ColorMode, LightEntityFeature, ) from homeassistant.components.yeelight.const import ( @@ -931,9 +932,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -962,9 +961,7 @@ async def test_device_types( "rgb_color": (255, 121, 0), "xy_color": (0.62, 0.368), "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -992,9 +989,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1028,9 +1023,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1065,9 +1058,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1102,9 +1093,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1138,9 +1127,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1173,12 +1160,8 @@ async def test_device_types( "effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST, "effect": None, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1204,12 +1187,8 @@ async def test_device_types( "effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST, "effect": None, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1217,17 +1196,15 @@ async def test_device_types( model_specs["color_temp"]["min"] ), "brightness": nl_br, - "color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), + "color_temp_kelvin": model_specs["color_temp"]["min"], "color_temp": color_temperature_kelvin_to_mired( model_specs["color_temp"]["min"] ), "color_mode": "color_temp", "supported_color_modes": ["color_temp"], - "hs_color": (28.391, 65.659), - "rgb_color": (255, 167, 88), - "xy_color": (0.524, 0.388), + "hs_color": (28.395, 65.723), + "rgb_color": (255, 167, 87), + "xy_color": (0.525, 0.388), }, ) @@ -1245,12 +1222,8 @@ async def test_device_types( "flowing": False, "night_light": True, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1279,12 +1252,8 @@ async def test_device_types( "flowing": False, "night_light": True, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1292,17 +1261,15 @@ async def test_device_types( model_specs["color_temp"]["min"] ), "brightness": nl_br, - "color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), + "color_temp_kelvin": model_specs["color_temp"]["min"], "color_temp": color_temperature_kelvin_to_mired( model_specs["color_temp"]["min"] ), "color_mode": "color_temp", "supported_color_modes": ["color_temp"], - "hs_color": (28.391, 65.659), - "rgb_color": (255, 167, 88), - "xy_color": (0.524, 0.388), + "hs_color": (28.395, 65.723), + "rgb_color": (255, 167, 87), + "xy_color": (0.525, 0.388), }, ) # Background light - color mode CT @@ -1315,16 +1282,18 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, "color_temp_kelvin": bg_ct, "color_temp": bg_ct_kelvin, "color_mode": "color_temp", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], "hs_color": (27.001, 19.243), "rgb_color": (255, 228, 206), "xy_color": (0.371, 0.349), @@ -1343,9 +1312,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, @@ -1355,7 +1322,11 @@ async def test_device_types( "color_temp": None, "color_temp_kelvin": None, "color_mode": "hs", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", @@ -1371,9 +1342,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, @@ -1383,7 +1352,11 @@ async def test_device_types( "color_temp": None, "color_temp_kelvin": None, "color_mode": "rgb", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", From 611cef5cd11eb98d09b0e1f542e8b920d88475a9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:41:38 +0100 Subject: [PATCH 0452/1198] Migrate xiaomi_miio lights to use Kelvin (#132811) --- homeassistant/components/xiaomi_miio/light.py | 99 +++++++++++++------ 1 file changed, 68 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/xiaomi_miio/light.py b/homeassistant/components/xiaomi_miio/light.py index 8ccc798a2e1..3f1f8b926b3 100644 --- a/homeassistant/components/xiaomi_miio/light.py +++ b/homeassistant/components/xiaomi_miio/light.py @@ -28,7 +28,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -45,7 +45,7 @@ from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import color, dt as dt_util +from homeassistant.util import color as color_util, dt as dt_util from .const import ( CONF_FLOW_TYPE, @@ -430,33 +430,54 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._color_temp = None @property - def color_temp(self): + def _current_mireds(self): """Return the color temperature.""" return self._color_temp @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 175 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 333 + @property + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(self._color_temp) + if self._color_temp + else None + ) + + @property + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin(self._max_mireds) + + @property + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin(self._min_mireds) + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) percent_color_temp = self.translate( - color_temp, self.max_mireds, self.min_mireds, CCT_MIN, CCT_MAX + color_temp, self._max_mireds, self._min_mireds, CCT_MIN, CCT_MAX ) if ATTR_BRIGHTNESS in kwargs: brightness = kwargs[ATTR_BRIGHTNESS] percent_brightness = ceil(100 * brightness / 255.0) - if ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP in kwargs: + if ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting brightness and color temperature: %s %s%%, %s mireds, %s%% cct", brightness, @@ -476,7 +497,7 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._color_temp = color_temp self._brightness = brightness - elif ATTR_COLOR_TEMP in kwargs: + elif ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting color temperature: %s mireds, %s%% cct", color_temp, @@ -526,7 +547,11 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) delayed_turn_off = self.delayed_turn_off_timestamp( @@ -560,12 +585,12 @@ class XiaomiPhilipsCeilingLamp(XiaomiPhilipsBulb): ) @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 175 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 370 @@ -585,7 +610,11 @@ class XiaomiPhilipsCeilingLamp(XiaomiPhilipsBulb): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) delayed_turn_off = self.delayed_turn_off_timestamp( @@ -797,12 +826,12 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): ) @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 153 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 588 @@ -820,10 +849,12 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) percent_color_temp = self.translate( - color_temp, self.max_mireds, self.min_mireds, CCT_MIN, CCT_MAX + color_temp, self._max_mireds, self._min_mireds, CCT_MIN, CCT_MAX ) if ATTR_BRIGHTNESS in kwargs: @@ -832,7 +863,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): if ATTR_HS_COLOR in kwargs: hs_color = kwargs[ATTR_HS_COLOR] - rgb = color.color_hs_to_RGB(*hs_color) + rgb = color_util.color_hs_to_RGB(*hs_color) if ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR in kwargs: _LOGGER.debug( @@ -853,7 +884,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): self._hs_color = hs_color self._brightness = brightness - elif ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP in kwargs: + elif ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( ( "Setting brightness and color temperature: " @@ -886,7 +917,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): if result: self._hs_color = hs_color - elif ATTR_COLOR_TEMP in kwargs: + elif ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting color temperature: %s mireds, %s%% cct", color_temp, @@ -936,9 +967,13 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) - self._hs_color = color.color_RGB_to_hs(*state.rgb) + self._hs_color = color_util.color_RGB_to_hs(*state.rgb) self._state_attrs.update( { @@ -1014,7 +1049,7 @@ class XiaomiGatewayLight(LightEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" if ATTR_HS_COLOR in kwargs: - rgb = color.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) + rgb = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) else: rgb = self._rgb @@ -1052,7 +1087,7 @@ class XiaomiGatewayLight(LightEntity): if self._is_on: self._brightness_pct = state_dict["brightness"] self._rgb = state_dict["rgb"] - self._hs = color.color_RGB_to_hs(*self._rgb) + self._hs = color_util.color_RGB_to_hs(*self._rgb) class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): @@ -1067,7 +1102,7 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): return round((self._sub_device.status["brightness"] * 255) / 100) @property - def color_temp(self): + def _current_mireds(self): """Return current color temperature.""" return self._sub_device.status["color_temp"] @@ -1077,12 +1112,12 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): return self._sub_device.status["status"] == "on" @property - def min_mireds(self): + def _min_mireds(self): """Return min cct.""" return self._sub_device.status["cct_min"] @property - def max_mireds(self): + def _max_mireds(self): """Return max cct.""" return self._sub_device.status["cct_max"] @@ -1090,8 +1125,10 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): """Instruct the light to turn on.""" await self.hass.async_add_executor_job(self._sub_device.on) - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) await self.hass.async_add_executor_job( self._sub_device.set_color_temp, color_temp ) From 30e9c45c7f123608599ac4be047f6860c9289e0c Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 10 Dec 2024 10:55:39 +0100 Subject: [PATCH 0453/1198] Update pvo to v2.2.0 (#132812) --- homeassistant/components/pvoutput/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index bc96bc5061d..9dbdad53bcb 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvoutput", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["pvo==2.1.1"] + "requirements": ["pvo==2.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 160e72e2b19..6fce6667da5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1669,7 +1669,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.1 +pvo==2.2.0 # homeassistant.components.aosmith py-aosmith==1.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 596998b1dd2..540ec433359 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1367,7 +1367,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.1 +pvo==2.2.0 # homeassistant.components.aosmith py-aosmith==1.0.11 From 28aa9c2fa3d8207c97d6aef0d2e90e4f8f73dcd2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 10:56:17 +0100 Subject: [PATCH 0454/1198] Migrate vesync lights to use Kelvin (#132806) --- homeassistant/components/vesync/light.py | 33 +++++++++++-------- .../vesync/snapshots/test_light.ambr | 12 +++---- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/vesync/light.py b/homeassistant/components/vesync/light.py index 6e449f63394..5b08b92f75a 100644 --- a/homeassistant/components/vesync/light.py +++ b/homeassistant/components/vesync/light.py @@ -5,7 +5,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ColorMode, LightEntity, ) @@ -13,11 +13,14 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_LIGHTS from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) +MAX_MIREDS = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds +MIN_MIREDS = 153 # 1,000,000 divided by 6500 Kelvin = 153 Mireds async def async_setup_entry( @@ -84,15 +87,16 @@ class VeSyncBaseLight(VeSyncDevice, LightEntity): """Turn the device on.""" attribute_adjustment_only = False # set white temperature - if self.color_mode == ColorMode.COLOR_TEMP and ATTR_COLOR_TEMP in kwargs: + if self.color_mode == ColorMode.COLOR_TEMP and ATTR_COLOR_TEMP_KELVIN in kwargs: # get white temperature from HA data - color_temp = int(kwargs[ATTR_COLOR_TEMP]) + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) # ensure value between min-max supported Mireds - color_temp = max(self.min_mireds, min(color_temp, self.max_mireds)) + color_temp = max(MIN_MIREDS, min(color_temp, MAX_MIREDS)) # convert Mireds to Percent value that api expects color_temp = round( - ((color_temp - self.min_mireds) / (self.max_mireds - self.min_mireds)) - * 100 + ((color_temp - MIN_MIREDS) / (MAX_MIREDS - MIN_MIREDS)) * 100 ) # flip cold/warm to what pyvesync api expects color_temp = 100 - color_temp @@ -138,13 +142,13 @@ class VeSyncTunableWhiteLightHA(VeSyncBaseLight, LightEntity): """Representation of a VeSync Tunable White Light device.""" _attr_color_mode = ColorMode.COLOR_TEMP - _attr_max_mireds = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds - _attr_min_mireds = 154 # 1,000,000 divided by 6500 Kelvin = 154 Mireds + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds _attr_supported_color_modes = {ColorMode.COLOR_TEMP} @property - def color_temp(self) -> int: - """Get device white temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" # get value from pyvesync library api, result = self.device.color_temp_pct try: @@ -159,15 +163,16 @@ class VeSyncTunableWhiteLightHA(VeSyncBaseLight, LightEntity): ), result, ) - return 0 + return None # flip cold/warm color_temp_value = 100 - color_temp_value # ensure value between 0-100 color_temp_value = max(0, min(color_temp_value, 100)) # convert percent value to Mireds color_temp_value = round( - self.min_mireds - + ((self.max_mireds - self.min_mireds) / 100 * color_temp_value) + MIN_MIREDS + ((MAX_MIREDS - MIN_MIREDS) / 100 * color_temp_value) ) # ensure value between minimum and maximum Mireds - return max(self.min_mireds, min(color_temp_value, self.max_mireds)) + return color_util.color_temperature_mired_to_kelvin( + max(MIN_MIREDS, min(color_temp_value, MAX_MIREDS)) + ) diff --git a/tests/components/vesync/snapshots/test_light.ambr b/tests/components/vesync/snapshots/test_light.ambr index 36694ae3ef6..2e7fe9ac1bb 100644 --- a/tests/components/vesync/snapshots/test_light.ambr +++ b/tests/components/vesync/snapshots/test_light.ambr @@ -428,10 +428,10 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6493, + 'max_color_temp_kelvin': 6500, 'max_mireds': 370, - 'min_color_temp_kelvin': 2702, - 'min_mireds': 154, + 'min_color_temp_kelvin': 2700, + 'min_mireds': 153, 'supported_color_modes': list([ , ]), @@ -473,10 +473,10 @@ 'color_temp_kelvin': None, 'friendly_name': 'Temperature Light', 'hs_color': None, - 'max_color_temp_kelvin': 6493, + 'max_color_temp_kelvin': 6500, 'max_mireds': 370, - 'min_color_temp_kelvin': 2702, - 'min_mireds': 154, + 'min_color_temp_kelvin': 2700, + 'min_mireds': 153, 'rgb_color': None, 'supported_color_modes': list([ , From b7018deebc5e7a8069a45f5388a92c3639daee60 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Tue, 10 Dec 2024 10:57:56 +0100 Subject: [PATCH 0455/1198] Use "remove" in description of "Clear playlist" action (#132079) --- homeassistant/components/media_player/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/media_player/strings.json b/homeassistant/components/media_player/strings.json index ff246e420ce..1c9ba929b38 100644 --- a/homeassistant/components/media_player/strings.json +++ b/homeassistant/components/media_player/strings.json @@ -282,7 +282,7 @@ }, "clear_playlist": { "name": "Clear playlist", - "description": "Clears the playlist." + "description": "Removes all items from the playlist." }, "shuffle_set": { "name": "Shuffle", From 13a37da91756858d84ec5aedf16a49060ed8a96c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:01:22 +0100 Subject: [PATCH 0456/1198] Migrate zwave_js lights to use Kelvin (#132818) --- homeassistant/components/zwave_js/light.py | 47 +++++++++------------- 1 file changed, 18 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index 4a044ca3f52..e6cfc6c8b29 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -29,7 +29,7 @@ from zwave_js_server.model.value import Value from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_RGBW_COLOR, ATTR_TRANSITION, @@ -60,6 +60,8 @@ MULTI_COLOR_MAP = { ColorComponent.CYAN: COLOR_SWITCH_COMBINED_CYAN, ColorComponent.PURPLE: COLOR_SWITCH_COMBINED_PURPLE, } +MIN_MIREDS = 153 # 6500K as a safe default +MAX_MIREDS = 370 # 2700K as a safe default async def async_setup_entry( @@ -103,6 +105,9 @@ def byte_to_zwave_brightness(value: int) -> int: class ZwaveLight(ZWaveBaseEntity, LightEntity): """Representation of a Z-Wave light.""" + _attr_min_color_temp_kelvin = 2700 # 370 mireds as a safe default + _attr_max_color_temp_kelvin = 6500 # 153 mireds as a safe default + def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo ) -> None: @@ -116,8 +121,6 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._hs_color: tuple[float, float] | None = None self._rgbw_color: tuple[int, int, int, int] | None = None self._color_temp: int | None = None - self._min_mireds = 153 # 6500K as a safe default - self._max_mireds = 370 # 2700K as a safe default self._warm_white = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -241,20 +244,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): return self._rgbw_color @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" return self._color_temp - @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._min_mireds - - @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._max_mireds - @property def supported_color_modes(self) -> set[ColorMode] | None: """Flag supported features.""" @@ -267,10 +260,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): brightness = kwargs.get(ATTR_BRIGHTNESS) hs_color = kwargs.get(ATTR_HS_COLOR) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_k = kwargs.get(ATTR_COLOR_TEMP_KELVIN) rgbw = kwargs.get(ATTR_RGBW_COLOR) - new_colors = self._get_new_colors(hs_color, color_temp, rgbw) + new_colors = self._get_new_colors(hs_color, color_temp_k, rgbw) if new_colors is not None: await self._async_set_colors(new_colors, transition) @@ -284,7 +277,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): def _get_new_colors( self, hs_color: tuple[float, float] | None, - color_temp: int | None, + color_temp_k: int | None, rgbw: tuple[int, int, int, int] | None, brightness_scale: float | None = None, ) -> dict[ColorComponent, int] | None: @@ -309,17 +302,14 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): return colors # Color temperature - if color_temp is not None and self._supports_color_temp: + if color_temp_k is not None and self._supports_color_temp: # Limit color temp to min/max values + color_temp = color_util.color_temperature_kelvin_to_mired(color_temp_k) cold = max( 0, min( 255, - round( - (self._max_mireds - color_temp) - / (self._max_mireds - self._min_mireds) - * 255 - ), + round((MAX_MIREDS - color_temp) / (MAX_MIREDS - MIN_MIREDS) * 255), ), ) warm = 255 - cold @@ -505,9 +495,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): cold_white = multi_color.get(COLOR_SWITCH_COMBINED_COLD_WHITE, cw_val.value) # Calculate color temps based on whites if cold_white or warm_white: - self._color_temp = round( - self._max_mireds - - ((cold_white / 255) * (self._max_mireds - self._min_mireds)) + self._color_temp = color_util.color_temperature_mired_to_kelvin( + MAX_MIREDS - ((cold_white / 255) * (MAX_MIREDS - MIN_MIREDS)) ) # White channels turned on, set color mode to color_temp self._color_mode = ColorMode.COLOR_TEMP @@ -568,7 +557,7 @@ class ZwaveColorOnOffLight(ZwaveLight): if ( kwargs.get(ATTR_RGBW_COLOR) is not None - or kwargs.get(ATTR_COLOR_TEMP) is not None + or kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None ): # RGBW and color temp are not supported in this mode, # delegate to the parent class @@ -629,7 +618,7 @@ class ZwaveColorOnOffLight(ZwaveLight): if new_colors is None: new_colors = self._get_new_colors( - hs_color=hs_color, color_temp=None, rgbw=None, brightness_scale=scale + hs_color=hs_color, color_temp_k=None, rgbw=None, brightness_scale=scale ) if new_colors is not None: From ea12a7c9a77d1c1762bab8b2649c71cabd3f1edc Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Tue, 10 Dec 2024 11:27:58 +0100 Subject: [PATCH 0457/1198] Remove config flow option to set mydevolo URL (#132821) --- homeassistant/components/devolo_home_control/__init__.py | 3 +-- .../components/devolo_home_control/config_flow.py | 8 +------- homeassistant/components/devolo_home_control/const.py | 2 -- tests/components/devolo_home_control/__init__.py | 1 - .../devolo_home_control/snapshots/test_diagnostics.ambr | 1 - tests/components/devolo_home_control/test_config_flow.py | 8 +------- 6 files changed, 3 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/devolo_home_control/__init__.py b/homeassistant/components/devolo_home_control/__init__.py index 7755e0f22b4..e86b7b753c8 100644 --- a/homeassistant/components/devolo_home_control/__init__.py +++ b/homeassistant/components/devolo_home_control/__init__.py @@ -18,7 +18,7 @@ from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.device_registry import DeviceEntry -from .const import CONF_MYDEVOLO, DEFAULT_MYDEVOLO, GATEWAY_SERIAL_PATTERN, PLATFORMS +from .const import GATEWAY_SERIAL_PATTERN, PLATFORMS type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]] @@ -102,5 +102,4 @@ def configure_mydevolo(conf: dict[str, Any] | MappingProxyType[str, Any]) -> Myd mydevolo = Mydevolo() mydevolo.user = conf[CONF_USERNAME] mydevolo.password = conf[CONF_PASSWORD] - mydevolo.url = conf.get(CONF_MYDEVOLO, DEFAULT_MYDEVOLO) return mydevolo diff --git a/homeassistant/components/devolo_home_control/config_flow.py b/homeassistant/components/devolo_home_control/config_flow.py index bfb083e0c44..e15204af7c2 100644 --- a/homeassistant/components/devolo_home_control/config_flow.py +++ b/homeassistant/components/devolo_home_control/config_flow.py @@ -18,7 +18,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from . import configure_mydevolo -from .const import CONF_MYDEVOLO, DEFAULT_MYDEVOLO, DOMAIN, SUPPORTED_MODEL_TYPES +from .const import DOMAIN, SUPPORTED_MODEL_TYPES from .exceptions import CredentialsInvalid, UuidChanged @@ -35,14 +35,11 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, } - self._url = DEFAULT_MYDEVOLO async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self.show_advanced_options: - self.data_schema[vol.Required(CONF_MYDEVOLO, default=self._url)] = str if user_input is None: return self._show_form(step_id="user") try: @@ -78,7 +75,6 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauthentication.""" self._reauth_entry = self._get_reauth_entry() - self._url = entry_data[CONF_MYDEVOLO] self.data_schema = { vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str, vol.Required(CONF_PASSWORD): str, @@ -104,7 +100,6 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): async def _connect_mydevolo(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Connect to mydevolo.""" - user_input[CONF_MYDEVOLO] = user_input.get(CONF_MYDEVOLO, self._url) mydevolo = configure_mydevolo(conf=user_input) credentials_valid = await self.hass.async_add_executor_job( mydevolo.credentials_valid @@ -121,7 +116,6 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): data={ CONF_PASSWORD: mydevolo.password, CONF_USERNAME: mydevolo.user, - CONF_MYDEVOLO: mydevolo.url, }, ) diff --git a/homeassistant/components/devolo_home_control/const.py b/homeassistant/components/devolo_home_control/const.py index eb48a6d269e..bd2282ad99f 100644 --- a/homeassistant/components/devolo_home_control/const.py +++ b/homeassistant/components/devolo_home_control/const.py @@ -5,7 +5,6 @@ import re from homeassistant.const import Platform DOMAIN = "devolo_home_control" -DEFAULT_MYDEVOLO = "https://www.mydevolo.com" PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, @@ -15,6 +14,5 @@ PLATFORMS = [ Platform.SIREN, Platform.SWITCH, ] -CONF_MYDEVOLO = "mydevolo_url" GATEWAY_SERIAL_PATTERN = re.compile(r"\d{16}") SUPPORTED_MODEL_TYPES = ["2600", "2601"] diff --git a/tests/components/devolo_home_control/__init__.py b/tests/components/devolo_home_control/__init__.py index f0e18eaf1a2..a1bf9d56aac 100644 --- a/tests/components/devolo_home_control/__init__.py +++ b/tests/components/devolo_home_control/__init__.py @@ -11,7 +11,6 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry: config = { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", } entry = MockConfigEntry( domain=DOMAIN, data=config, entry_id="123456", unique_id="123456" diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index 6a7ef1fc6d3..abedc128756 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -33,7 +33,6 @@ ]), 'entry': dict({ 'data': dict({ - 'mydevolo_url': 'https://test_mydevolo_url.test', 'password': '**REDACTED**', 'username': '**REDACTED**', }), diff --git a/tests/components/devolo_home_control/test_config_flow.py b/tests/components/devolo_home_control/test_config_flow.py index 7c9bfdeff63..aab3e69b38f 100644 --- a/tests/components/devolo_home_control/test_config_flow.py +++ b/tests/components/devolo_home_control/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant import config_entries -from homeassistant.components.devolo_home_control.const import DEFAULT_MYDEVOLO, DOMAIN +from homeassistant.components.devolo_home_control.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -90,7 +90,6 @@ async def test_form_advanced_options(hass: HomeAssistant) -> None: { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) await hass.async_block_till_done() @@ -100,7 +99,6 @@ async def test_form_advanced_options(hass: HomeAssistant) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", } assert len(mock_setup_entry.mock_calls) == 1 @@ -170,7 +168,6 @@ async def test_form_reauth(hass: HomeAssistant) -> None: data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) mock_config.add_to_hass(hass) @@ -207,7 +204,6 @@ async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) mock_config.add_to_hass(hass) @@ -229,7 +225,6 @@ async def test_form_uuid_change_reauth(hass: HomeAssistant) -> None: data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) mock_config.add_to_hass(hass) @@ -281,7 +276,6 @@ async def _setup(hass: HomeAssistant, result: FlowResult) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "mydevolo_url": DEFAULT_MYDEVOLO, } assert len(mock_setup_entry.mock_calls) == 1 From e343b695571ffe9899d122744633e1b22b21274a Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 10 Dec 2024 11:35:00 +0100 Subject: [PATCH 0458/1198] Update gotailwind to v0.3.0 (#132817) --- homeassistant/components/tailwind/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tailwind/manifest.json b/homeassistant/components/tailwind/manifest.json index 705f591785f..7ad43c929a7 100644 --- a/homeassistant/components/tailwind/manifest.json +++ b/homeassistant/components/tailwind/manifest.json @@ -11,7 +11,7 @@ "documentation": "https://www.home-assistant.io/integrations/tailwind", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["gotailwind==0.2.4"], + "requirements": ["gotailwind==0.3.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 6fce6667da5..97a3cf368dc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1030,7 +1030,7 @@ googlemaps==2.5.1 goslide-api==0.7.0 # homeassistant.components.tailwind -gotailwind==0.2.4 +gotailwind==0.3.0 # homeassistant.components.govee_ble govee-ble==0.40.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 540ec433359..5738016cefc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -877,7 +877,7 @@ google-photos-library-api==0.12.1 googlemaps==2.5.1 # homeassistant.components.tailwind -gotailwind==0.2.4 +gotailwind==0.3.0 # homeassistant.components.govee_ble govee-ble==0.40.0 From 03c6dab1431e98f179c0e0ee286f60ff4d7ae97f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:47:08 +0100 Subject: [PATCH 0459/1198] Add missing Kelvin attributes to mqtt ignore list (#132820) --- homeassistant/components/mqtt/light/schema_basic.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index de6a9d4c126..8a1b7a2a76a 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -12,10 +12,13 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, + ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -113,10 +116,13 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( ATTR_COLOR_MODE, ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, + ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, From f6621023c2fbea79fea486c70189ccde75b4b3d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Tue, 10 Dec 2024 12:20:21 +0100 Subject: [PATCH 0460/1198] Improve myuplink tests to reach full coverage for all modules (#131937) --- .../fixtures/device_points_nibe_f730.json | 51 + .../snapshots/test_binary_sensor.ambr | 326 ++ .../myuplink/snapshots/test_diagnostics.ambr | 102 + .../myuplink/snapshots/test_sensor.ambr | 4767 +++++++++++++++++ .../components/myuplink/test_binary_sensor.py | 57 +- tests/components/myuplink/test_config_flow.py | 46 +- tests/components/myuplink/test_init.py | 84 + tests/components/myuplink/test_sensor.py | 26 +- 8 files changed, 5390 insertions(+), 69 deletions(-) create mode 100644 tests/components/myuplink/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/myuplink/snapshots/test_sensor.ambr diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 99dd9c857e6..aaccdec530a 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -1024,6 +1024,23 @@ "scaleValue": "1", "zoneId": null }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, { "category": "F730 CU 3x400V", "parameterId": "47011", @@ -1040,5 +1057,39 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_binary_sensor.ambr b/tests/components/myuplink/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..755cae3c623 --- /dev/null +++ b/tests/components/myuplink/snapshots/test_binary_sensor.ambr @@ -0,0 +1,326 @@ +# serializer version: 1 +# name: test_binary_sensor_states[binary_sensor.gotham_city_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm', + 'unique_id': '123456-7890-1234-has_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Gotham City Alarm', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-connection_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Gotham City Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_connectivity_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-connection_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Gotham City Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_connectivity_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extern. adjust\xadment climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43161', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Extern. adjust\xadment climate system 1', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extern. adjust\xadment climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43161', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Extern. adjust\xadment climate system 1', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pump: Heating medium (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49995', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Pump: Heating medium (GP1)', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pump: Heating medium (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49995', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Pump: Heating medium (GP1)', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 1b3502c1f04..71b33c58a87 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1085,6 +1085,23 @@ "scaleValue": "1", "zoneId": null }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, { "category": "F730 CU 3x400V", "parameterId": "47011", @@ -1101,6 +1118,40 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] @@ -2179,6 +2230,23 @@ "scaleValue": "1", "zoneId": null }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, { "category": "F730 CU 3x400V", "parameterId": "47011", @@ -2195,6 +2263,40 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_sensor.ambr b/tests/components/myuplink/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a5469dc9a77 --- /dev/null +++ b/tests/components/myuplink/snapshots/test_sensor.ambr @@ -0,0 +1,4767 @@ +# serializer version: 1 +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Average outdoor temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40067', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Average outdoor temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Average outdoor temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40067', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Average outdoor temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Calculated supply climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43009', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Calculated supply climate system 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Calculated supply climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43009', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Calculated supply climate system 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_condenser_bt12', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Condenser (BT12)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40017', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Condenser (BT12)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_condenser_bt12', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_condenser_bt12_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Condenser (BT12)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40017', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Condenser (BT12)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_condenser_bt12_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40079', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40079', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40081', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be2_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40081', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be2_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40083', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be3_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40083', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be3_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-41778', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Current compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-41778', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Current compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_fan_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current fan mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_mode', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43108', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current fan mode', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_fan_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_fan_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current fan mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_mode', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43108', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current fan mode', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_fan_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_hot_water_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current hot water mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43109', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current hot water mode', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_hot_water_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_hot_water_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current hot water mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43109', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current hot water mode', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_hot_water_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current outd temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40004', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Current outd temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-9.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current outd temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40004', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Current outd temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-9.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Decrease from reference value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43125', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Decrease from reference value', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Decrease from reference value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43125', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Decrease from reference value', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_defrosting_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Defrosting time', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43066', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Gotham City Defrosting time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_defrosting_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_defrosting_time_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Defrosting time', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43066', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Gotham City Defrosting time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_defrosting_time_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_degree_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_degree_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_degree_minutes_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_degree_minutes_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-42770', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49633', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-42770', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49633', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_discharge_bt14', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharge (BT14)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40018', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Discharge (BT14)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_discharge_bt14', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '89.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_discharge_bt14_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharge (BT14)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40018', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Discharge (BT14)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_discharge_bt14_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '89.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'dT Inverter - exh air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City dT Inverter - exh air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'dT Inverter - exh air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City dT Inverter - exh air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_evaporator_bt16', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Evaporator (BT16)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40020', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Evaporator (BT16)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_evaporator_bt16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-14.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_evaporator_bt16_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Evaporator (BT16)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40020', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Evaporator (BT16)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_evaporator_bt16_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-14.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Exhaust air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40025', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Exhaust air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Exhaust air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40025', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Exhaust air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_extract_air_bt21', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air (BT21)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40026', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Extract air (BT21)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_extract_air_bt21', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_extract_air_bt21_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air (BT21)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40026', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Extract air (BT21)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_extract_air_bt21_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating medium pump speed (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43437', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating medium pump speed (GP1)', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '79', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating medium pump speed (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43437', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating medium pump speed (GP1)', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '79', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge current value ((BT12 | BT63))', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43116', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge current value ((BT12 | BT63))', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge current value ((BT12 | BT63))', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43116', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge current value ((BT12 | BT63))', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge set point value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43115', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge set point value', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge set point value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43115', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge set point value', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water charging (BT6)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40014', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water charging (BT6)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '44.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water charging (BT6)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40014', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water charging (BT6)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '44.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water top (BT7)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40013', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water top (BT7)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water top (BT7)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40013', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water top (BT7)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Int elec add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Int elec add heat', + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Active', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Int elec add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Int elec add heat', + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Active', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Int elec add heat raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Int elec add heat raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Int elec add heat raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Int elec add heat raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_inverter_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter temperature', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43140', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Inverter temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_inverter_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_inverter_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter temperature', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43140', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Inverter temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_inverter_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_liquid_line_bt15', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Liquid line (BT15)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40019', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Liquid line (BT15)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_liquid_line_bt15', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_liquid_line_bt15_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Liquid line (BT15)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40019', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Liquid line (BT15)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_liquid_line_bt15_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_max_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43123', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Max compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_max_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_max_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43123', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Max compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_max_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_min_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Min compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43122', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Min compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_min_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_min_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Min compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43122', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Min compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_min_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (EP15-BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40145', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (EP15-BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (EP15-BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40145', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (EP15-BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Priority', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Priority', + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Heating', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Priority', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Priority', + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Heating', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Prior\xadity raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Prior\xadity raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Prior\xadity raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Prior\xadity raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'r start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072r', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City r start diff additional heat', + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'r start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072r', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City r start diff additional heat', + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reference, air speed sensor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'airflow', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43124', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Gotham City Reference, air speed sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '127.6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reference, air speed sensor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'airflow', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43124', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Gotham City Reference, air speed sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '127.6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40012', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt3_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40012', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt3_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt62', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT62)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40048', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT62)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt62', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt62_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT62)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40048', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT62)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt62_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_room_temperature_bt50', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature (BT50)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40033', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Room temperature (BT50)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_room_temperature_bt50', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_room_temperature_bt50_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature (BT50)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40033', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Room temperature (BT50)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_room_temperature_bt50_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status compressor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Status compressor', + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Runs', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status compressor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Status compressor', + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Runs', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status com\xadpressor raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Status com\xadpressor raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status com\xadpressor raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Status com\xadpressor raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_suction_gas_bt17', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Suction gas (BT17)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40022', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Suction gas (BT17)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_suction_gas_bt17', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_suction_gas_bt17_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Suction gas (BT17)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40022', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Suction gas (BT17)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_suction_gas_bt17_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40008', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt2_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40008', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt2_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt61', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT61)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40047', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT61)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt61', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt61_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT61)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40047', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT61)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt61_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_time_factor_add_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Time factor add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43081', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Time factor add heat', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_time_factor_add_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1686.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_time_factor_add_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Time factor add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43081', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Time factor add heat', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_time_factor_add_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1686.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Value, air velocity sensor (BS1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40050', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Value, air velocity sensor (BS1)', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '101.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Value, air velocity sensor (BS1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40050', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Value, air velocity sensor (BS1)', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '101.5', + }) +# --- diff --git a/tests/components/myuplink/test_binary_sensor.py b/tests/components/myuplink/test_binary_sensor.py index 128a4ebdde9..160530bcdab 100644 --- a/tests/components/myuplink/test_binary_sensor.py +++ b/tests/components/myuplink/test_binary_sensor.py @@ -1,57 +1,28 @@ -"""Tests for myuplink sensor module.""" +"""Tests for myuplink binary sensor module.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch -import pytest +from syrupy import SnapshotAssertion -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -# Test one entity from each of binary_sensor classes. -@pytest.mark.parametrize( - ("entity_id", "friendly_name", "test_attributes", "expected_state"), - [ - ( - "binary_sensor.gotham_city_pump_heating_medium_gp1", - "Gotham City Pump: Heating medium (GP1)", - True, - STATE_ON, - ), - ( - "binary_sensor.gotham_city_connectivity", - "Gotham City Connectivity", - False, - STATE_ON, - ), - ( - "binary_sensor.gotham_city_alarm", - "Gotham City Pump: Alarm", - False, - STATE_OFF, - ), - ], -) -async def test_sensor_states( +async def test_binary_sensor_states( hass: HomeAssistant, mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, - entity_id: str, - friendly_name: str, - test_attributes: bool, - expected_state: str, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: - """Test sensor state.""" - await setup_integration(hass, mock_config_entry) + """Test binary sensor state.""" - state = hass.states.get(entity_id) - assert state is not None - assert state.state == expected_state - if test_attributes: - assert state.attributes == { - "friendly_name": friendly_name, - } + with patch("homeassistant.components.myuplink.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index 509af19db8c..6bcc8468617 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from .const import CLIENT_ID +from .const import CLIENT_ID, UNIQUE_ID from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -76,7 +76,28 @@ async def test_full_flow( @pytest.mark.usefixtures("current_request_with_host") -async def test_flow_reauth( +@pytest.mark.parametrize( + ("unique_id", "scope", "expected_reason"), + [ + ( + UNIQUE_ID, + CURRENT_SCOPE, + "reauth_successful", + ), + ( + "wrong_uid", + CURRENT_SCOPE, + "account_mismatch", + ), + ( + UNIQUE_ID, + "READSYSTEM offline_access", + "reauth_successful", + ), + ], + ids=["reauth_only", "account_mismatch", "wrong_scope"], +) +async def test_flow_reauth_abort( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, @@ -84,27 +105,26 @@ async def test_flow_reauth( mock_config_entry: MockConfigEntry, access_token: str, expires_at: float, + unique_id: str, + scope: str, + expected_reason: str, ) -> None: - """Test reauth step.""" + """Test reauth step with correct params and mismatches.""" - OLD_SCOPE = "READSYSTEM offline_access" - OLD_SCOPE_TOKEN = { + CURRENT_TOKEN = { "auth_implementation": DOMAIN, "token": { "access_token": access_token, - "scope": OLD_SCOPE, + "scope": scope, "expires_in": 86399, "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", "token_type": "Bearer", "expires_at": expires_at, }, } - assert mock_config_entry.data["token"]["scope"] == CURRENT_SCOPE assert hass.config_entries.async_update_entry( - mock_config_entry, data=OLD_SCOPE_TOKEN + mock_config_entry, data=CURRENT_TOKEN, unique_id=unique_id ) - assert mock_config_entry.data["token"]["scope"] == OLD_SCOPE - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 result = await mock_config_entry.start_reauth_flow(hass) @@ -148,13 +168,11 @@ async def test_flow_reauth( with patch( f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True - ) as mock_setup: + ): result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "reauth_successful" + assert result.get("reason") == expected_reason assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - assert mock_config_entry.data["token"]["scope"] == CURRENT_SCOPE diff --git a/tests/components/myuplink/test_init.py b/tests/components/myuplink/test_init.py index 440002311e9..fda0d3526f9 100644 --- a/tests/components/myuplink/test_init.py +++ b/tests/components/myuplink/test_init.py @@ -4,18 +4,21 @@ import http import time from unittest.mock import MagicMock +from aiohttp import ClientConnectionError import pytest from homeassistant.components.myuplink.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component from . import setup_integration from .const import UNIQUE_ID from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import WebSocketGenerator async def test_load_unload_entry( @@ -71,6 +74,37 @@ async def test_expired_token_refresh_failure( assert mock_config_entry.state is expected_state +@pytest.mark.parametrize( + ("expires_at", "expected_state"), + [ + ( + time.time() - 3600, + ConfigEntryState.SETUP_RETRY, + ), + ], + ids=[ + "client_connection_error", + ], +) +async def test_expired_token_refresh_connection_failure( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + expected_state: ConfigEntryState, +) -> None: + """Test failure while refreshing token with a ClientError.""" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + exc=ClientConnectionError(), + ) + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is expected_state + + @pytest.mark.parametrize( "load_systems_file", [load_fixture("systems.json", DOMAIN)], @@ -130,3 +164,53 @@ async def test_migrate_config_entry( assert mock_entry_v1_1.version == 1 assert mock_entry_v1_1.minor_version == 2 assert mock_entry_v1_1.unique_id == UNIQUE_ID + + +async def test_oaut2_scope_failure( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that an incorrect OAuth2 scope fails.""" + + mock_config_entry.data["token"]["scope"] = "wrong_scope" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_device_remove_devices( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_config_entry: MockConfigEntry, + mock_myuplink_client: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test we can only remove a device that no longer exists.""" + assert await async_setup_component(hass, "config", {}) + + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + "batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff", + ) + }, + ) + client = await hass_ws_client(hass) + response = await client.remove_device(device_entry.id, mock_config_entry.entry_id) + assert not response["success"] + + old_device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={(DOMAIN, "OLD-DEVICE-UUID")}, + ) + response = await client.remove_device( + old_device_entry.id, mock_config_entry.entry_id + ) + assert response["success"] diff --git a/tests/components/myuplink/test_sensor.py b/tests/components/myuplink/test_sensor.py index 8fecb787122..98cdfc322da 100644 --- a/tests/components/myuplink/test_sensor.py +++ b/tests/components/myuplink/test_sensor.py @@ -1,28 +1,30 @@ """Tests for myuplink sensor module.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_states( hass: HomeAssistant, mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: """Test sensor state.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("sensor.gotham_city_average_outdoor_temp_bt1") - assert state is not None - assert state.state == "-12.2" - assert state.attributes == { - "friendly_name": "Gotham City Average outdoor temp (BT1)", - "device_class": "temperature", - "state_class": "measurement", - "unit_of_measurement": "°C", - } + with patch("homeassistant.components.myuplink.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From 71d7e14032d00bfed4194c5ad568a78de63c909d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 10 Dec 2024 12:46:56 +0100 Subject: [PATCH 0461/1198] Update wled to v0.21.0 (#132822) --- homeassistant/components/wled/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index c731f8181af..326008ae1af 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/wled", "integration_type": "device", "iot_class": "local_push", - "requirements": ["wled==0.20.2"], + "requirements": ["wled==0.21.0"], "zeroconf": ["_wled._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 97a3cf368dc..360f6a159dd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3011,7 +3011,7 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.2 +wled==0.21.0 # homeassistant.components.wolflink wolf-comm==0.0.15 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5738016cefc..185fdae7bd5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2406,7 +2406,7 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.2 +wled==0.21.0 # homeassistant.components.wolflink wolf-comm==0.0.15 From 46d4081ec678d2533f1217dfd4c733e218e1dd79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Tue, 10 Dec 2024 12:58:42 +0100 Subject: [PATCH 0462/1198] Address review comment on myuplink tests (#132819) --- tests/components/myuplink/test_config_flow.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index 6bcc8468617..e823402bda6 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -69,11 +69,16 @@ async def test_full_flow( with patch( f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True ) as mock_setup: - await hass.config_entries.flow.async_configure(result["flow_id"]) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 + assert result["data"]["auth_implementation"] == DOMAIN + assert result["data"]["token"]["refresh_token"] == "mock-refresh-token" + assert result["result"].unique_id == UNIQUE_ID + @pytest.mark.usefixtures("current_request_with_host") @pytest.mark.parametrize( From 95107cf6708d11891b92572c4d4e01a5833e079f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 13:07:08 +0100 Subject: [PATCH 0463/1198] Add check for typed ConfigEntry in quality scale validation (#132028) --- script/hassfest/quality_scale.py | 11 ++- .../quality_scale_validation/__init__.py | 4 +- .../config_entry_unloading.py | 2 +- .../quality_scale_validation/config_flow.py | 2 +- .../quality_scale_validation/diagnostics.py | 2 +- .../quality_scale_validation/discovery.py | 2 +- .../parallel_updates.py | 2 +- .../reauthentication_flow.py | 2 +- .../reconfiguration_flow.py | 2 +- .../quality_scale_validation/runtime_data.py | 90 +++++++++++++++++-- .../quality_scale_validation/strict_typing.py | 2 +- .../unique_config_entry.py | 2 +- 12 files changed, 101 insertions(+), 22 deletions(-) diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index ff67bbbe416..9f6d1e0b783 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1348,16 +1348,19 @@ def validate_iqs_file(config: Config, integration: Integration) -> None: "quality_scale", f"Invalid {name}: {humanize_error(data, err)}" ) + rules_done = set[str]() rules_met = set[str]() for rule_name, rule_value in data.get("rules", {}).items(): status = rule_value["status"] if isinstance(rule_value, dict) else rule_value if status not in {"done", "exempt"}: continue rules_met.add(rule_name) - if ( - status == "done" - and (validator := VALIDATORS.get(rule_name)) - and (errors := validator.validate(integration)) + if status == "done": + rules_done.add(rule_name) + + for rule_name in rules_done: + if (validator := VALIDATORS.get(rule_name)) and ( + errors := validator.validate(integration, rules_done=rules_done) ): for error in errors: integration.add_error("quality_scale", f"[{rule_name}] {error}") diff --git a/script/hassfest/quality_scale_validation/__init__.py b/script/hassfest/quality_scale_validation/__init__.py index 836c1082763..892bb70fabd 100644 --- a/script/hassfest/quality_scale_validation/__init__.py +++ b/script/hassfest/quality_scale_validation/__init__.py @@ -8,7 +8,9 @@ from script.hassfest.model import Integration class RuleValidationProtocol(Protocol): """Protocol for rule validation.""" - def validate(self, integration: Integration) -> list[str] | None: + def validate( + self, integration: Integration, *, rules_done: set[str] + ) -> list[str] | None: """Validate a quality scale rule. Returns error (if any). diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index b25a72e427f..fb636a7f2ed 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -17,7 +17,7 @@ def _has_unload_entry_function(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration has a config flow.""" init_file = integration.path / "__init__.py" diff --git a/script/hassfest/quality_scale_validation/config_flow.py b/script/hassfest/quality_scale_validation/config_flow.py index e1361d6550f..6e88aa462f4 100644 --- a/script/hassfest/quality_scale_validation/config_flow.py +++ b/script/hassfest/quality_scale_validation/config_flow.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/c from script.hassfest.model import Integration -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration implements config flow.""" if not integration.config_flow: diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py index d3ef38474f8..44012208bcb 100644 --- a/script/hassfest/quality_scale_validation/diagnostics.py +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -22,7 +22,7 @@ def _has_diagnostics_function(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration implements diagnostics.""" diagnostics_file = integration.path / "diagnostics.py" diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py index 66a08456314..db50cdba55a 100644 --- a/script/hassfest/quality_scale_validation/discovery.py +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -38,7 +38,7 @@ def _has_discovery_function(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration implements diagnostics.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py index 74ec55991f9..3483a44f504 100644 --- a/script/hassfest/quality_scale_validation/parallel_updates.py +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -18,7 +18,7 @@ def _has_parallel_updates_defined(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration sets PARALLEL_UPDATES constant.""" errors = [] diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py index 4ae8fed5696..81d34ec4f7f 100644 --- a/script/hassfest/quality_scale_validation/reauthentication_flow.py +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -17,7 +17,7 @@ def _has_step_reauth_function(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration has a reauthentication flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py index 19192cb28d0..b27475e8c70 100644 --- a/script/hassfest/quality_scale_validation/reconfiguration_flow.py +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -17,7 +17,7 @@ def _has_step_reconfigure_function(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration has a reconfiguration flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py index c426496636b..8ad721a218c 100644 --- a/script/hassfest/quality_scale_validation/runtime_data.py +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -4,10 +4,31 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r """ import ast +import re +from homeassistant.const import Platform from script.hassfest import ast_parse_module from script.hassfest.model import Integration +_ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$") +_FUNCTIONS: dict[str, dict[str, int]] = { + "__init__": { # based on ComponentProtocol + "async_migrate_entry": 2, + "async_remove_config_entry_device": 2, + "async_remove_entry": 2, + "async_setup_entry": 2, + "async_unload_entry": 2, + }, + "diagnostics": { # based on DiagnosticsProtocol + "async_get_config_entry_diagnostics": 2, + "async_get_device_diagnostics": 2, + }, +} +for platform in Platform: # based on EntityPlatformModule + _FUNCTIONS[platform.value] = { + "async_setup_entry": 2, + } + def _sets_runtime_data( async_setup_entry_function: ast.AsyncFunctionDef, config_entry_argument: ast.arg @@ -25,30 +46,83 @@ def _sets_runtime_data( return False -def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None: - """Get async_setup_entry function.""" +def _get_async_function(module: ast.Module, name: str) -> ast.AsyncFunctionDef | None: + """Get async function.""" for item in module.body: - if isinstance(item, ast.AsyncFunctionDef) and item.name == "async_setup_entry": + if isinstance(item, ast.AsyncFunctionDef) and item.name == name: return item return None -def validate(integration: Integration) -> list[str] | None: +def _check_function_annotation( + function: ast.AsyncFunctionDef, position: int +) -> str | None: + """Ensure function uses CustomConfigEntry type annotation.""" + if len(function.args.args) < position: + return f"{function.name} has incorrect signature" + argument = function.args.args[position - 1] + if not ( + (annotation := argument.annotation) + and isinstance(annotation, ast.Name) + and _ANNOTATION_MATCH.match(annotation.id) + ): + return f"([+ strict-typing]) {function.name} does not use typed ConfigEntry" + return None + + +def _check_typed_config_entry(integration: Integration) -> list[str]: + """Ensure integration uses CustomConfigEntry type annotation.""" + errors: list[str] = [] + # Check body level function annotations + for file, functions in _FUNCTIONS.items(): + module_file = integration.path / f"{file}.py" + if not module_file.exists(): + continue + module = ast_parse_module(module_file) + for function, position in functions.items(): + if not (async_function := _get_async_function(module, function)): + continue + if error := _check_function_annotation(async_function, position): + errors.append(f"{error} in {module_file}") + + # Check config_flow annotations + config_flow_file = integration.path / "config_flow.py" + config_flow = ast_parse_module(config_flow_file) + for node in config_flow.body: + if not isinstance(node, ast.ClassDef): + continue + if any( + isinstance(async_function, ast.FunctionDef) + and async_function.name == "async_get_options_flow" + and (error := _check_function_annotation(async_function, 1)) + for async_function in node.body + ): + errors.append(f"{error} in {config_flow_file}") + + return errors + + +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate correct use of ConfigEntry.runtime_data.""" init_file = integration.path / "__init__.py" init = ast_parse_module(init_file) # Should not happen, but better to be safe - if not (async_setup_entry := _get_setup_entry_function(init)): + if not (async_setup_entry := _get_async_function(init, "async_setup_entry")): return [f"Could not find `async_setup_entry` in {init_file}"] if len(async_setup_entry.args.args) != 2: return [f"async_setup_entry has incorrect signature in {init_file}"] config_entry_argument = async_setup_entry.args.args[1] + errors: list[str] = [] if not _sets_runtime_data(async_setup_entry, config_entry_argument): - return [ + errors.append( "Integration does not set entry.runtime_data in async_setup_entry" f"({init_file})" - ] + ) - return None + # Extra checks, if strict-typing is marked as done + if "strict-typing" in rules_done: + errors.extend(_check_typed_config_entry(integration)) + + return errors diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py index 285746a9eb6..a7755b6bb40 100644 --- a/script/hassfest/quality_scale_validation/strict_typing.py +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -24,7 +24,7 @@ def _strict_typing_components() -> set[str]: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration has strict typing enabled.""" if integration.domain not in _strict_typing_components(): diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py index bf9991d5635..8c38923e584 100644 --- a/script/hassfest/quality_scale_validation/unique_config_entry.py +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -30,7 +30,7 @@ def _has_abort_unique_id_configured(module: ast.Module) -> bool: ) -def validate(integration: Integration) -> list[str] | None: +def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: """Validate that the integration prevents duplicate devices.""" if integration.manifest.get("single_config_entry"): From 25d092c8eb1c4fe115ccf55f7fa3adcba4631d01 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 10 Dec 2024 13:31:22 +0100 Subject: [PATCH 0464/1198] Bump deebot-client to 9.3.0 (#132834) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index ad154b8f284..b9315e0c1c6 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.2.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 360f6a159dd..6397d3673c7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -735,7 +735,7 @@ debugpy==1.8.8 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.2.0 +deebot-client==9.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 185fdae7bd5..fbc7462ac03 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -625,7 +625,7 @@ dbus-fast==2.24.3 debugpy==1.8.8 # homeassistant.components.ecovacs -deebot-client==9.2.0 +deebot-client==9.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 6f3a2305249ca5562c4d1e612f706e5f8777c99a Mon Sep 17 00:00:00 2001 From: Xiretza Date: Tue, 10 Dec 2024 12:47:20 +0000 Subject: [PATCH 0465/1198] spaceapi: fix sensor values (#132099) --- homeassistant/components/spaceapi/__init__.py | 13 ++++++- tests/components/spaceapi/test_init.py | 36 +++++++++++++++---- 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/spaceapi/__init__.py b/homeassistant/components/spaceapi/__init__.py index 93d448bd17f..90281fe311c 100644 --- a/homeassistant/components/spaceapi/__init__.py +++ b/homeassistant/components/spaceapi/__init__.py @@ -1,6 +1,7 @@ """Support for the SpaceAPI.""" from contextlib import suppress +import math import voluptuous as vol @@ -254,7 +255,17 @@ class APISpaceApiView(HomeAssistantView): """Get data from a sensor.""" if not (sensor_state := hass.states.get(sensor)): return None - sensor_data = {ATTR_NAME: sensor_state.name, ATTR_VALUE: sensor_state.state} + + # SpaceAPI sensor values must be numbers + try: + state = float(sensor_state.state) + except ValueError: + state = math.nan + sensor_data = { + ATTR_NAME: sensor_state.name, + ATTR_VALUE: state, + } + if ATTR_SENSOR_LOCATION in sensor_state.attributes: sensor_data[ATTR_LOCATION] = sensor_state.attributes[ATTR_SENSOR_LOCATION] else: diff --git a/tests/components/spaceapi/test_init.py b/tests/components/spaceapi/test_init.py index 0de96d05605..8c0e897947a 100644 --- a/tests/components/spaceapi/test_init.py +++ b/tests/components/spaceapi/test_init.py @@ -6,7 +6,12 @@ from unittest.mock import patch from aiohttp.test_utils import TestClient import pytest -from homeassistant.components.spaceapi import DOMAIN, SPACEAPI_VERSION, URL_API_SPACEAPI +from homeassistant.components.spaceapi import ( + ATTR_SENSOR_LOCATION, + DOMAIN, + SPACEAPI_VERSION, + URL_API_SPACEAPI, +) from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,7 +32,7 @@ CONFIG = { "icon_closed": "https://home-assistant.io/close.png", }, "sensors": { - "temperature": ["test.temp1", "test.temp2"], + "temperature": ["test.temp1", "test.temp2", "test.temp3"], "humidity": ["test.hum1"], }, "spacefed": {"spacenet": True, "spacesaml": False, "spacephone": True}, @@ -67,17 +72,23 @@ SENSOR_OUTPUT = { "location": "Home", "name": "temp1", "unit": UnitOfTemperature.CELSIUS, - "value": "25", + "value": 25.0, + }, + { + "location": "outside", + "name": "temp2", + "unit": UnitOfTemperature.CELSIUS, + "value": 23.0, }, { "location": "Home", - "name": "temp2", + "name": "temp3", "unit": UnitOfTemperature.CELSIUS, - "value": "23", + "value": None, }, ], "humidity": [ - {"location": "Home", "name": "hum1", "unit": PERCENTAGE, "value": "88"} + {"location": "Home", "name": "hum1", "unit": PERCENTAGE, "value": 88.0} ], } @@ -96,6 +107,19 @@ def mock_client(hass: HomeAssistant, hass_client: ClientSessionGenerator) -> Tes hass.states.async_set( "test.temp2", 23, + attributes={ + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + ATTR_SENSOR_LOCATION: "outside", + }, + ) + hass.states.async_set( + "test.temp3", + "foo", + attributes={ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "test.temp3", + "foo", attributes={ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, ) hass.states.async_set( From 416a4c02b42345a7b41a56e72c908f27c84cc07c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 13:55:28 +0100 Subject: [PATCH 0466/1198] Migrate hue lights to use Kelvin (#132835) --- homeassistant/components/hue/v1/light.py | 50 ++++++++++++---------- homeassistant/components/hue/v2/group.py | 23 +++++++--- homeassistant/components/hue/v2/helpers.py | 15 ++++--- homeassistant/components/hue/v2/light.py | 39 +++++++++-------- 4 files changed, 76 insertions(+), 51 deletions(-) diff --git a/homeassistant/components/hue/v1/light.py b/homeassistant/components/hue/v1/light.py index 76dd0fce12b..78a06784b8d 100644 --- a/homeassistant/components/hue/v1/light.py +++ b/homeassistant/components/hue/v1/light.py @@ -12,7 +12,7 @@ import aiohue from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -35,7 +35,7 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, UpdateFailed, ) -from homeassistant.util import color +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import ( @@ -362,7 +362,7 @@ class HueLight(CoordinatorEntity, LightEntity): "bulb in the Philips Hue App." ) LOGGER.warning(err, self.name) - if self.gamut and not color.check_valid_gamut(self.gamut): + if self.gamut and not color_util.check_valid_gamut(self.gamut): err = "Color gamut of %s: %s, not valid, setting gamut to None." LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE @@ -427,49 +427,50 @@ class HueLight(CoordinatorEntity, LightEntity): source = self.light.action if self.is_group else self.light.state if mode in ("xy", "hs") and "xy" in source: - return color.color_xy_to_hs(*source["xy"], self.gamut) + return color_util.color_xy_to_hs(*source["xy"], self.gamut) return None @property - def color_temp(self): - """Return the CT color value.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" # Don't return color temperature unless in color temperature mode if self._color_mode != "ct": return None - if self.is_group: - return self.light.action.get("ct") - return self.light.state.get("ct") + ct = ( + self.light.action.get("ct") if self.is_group else self.light.state.get("ct") + ) + return color_util.color_temperature_mired_to_kelvin(ct) if ct else None @property - def min_mireds(self): - """Return the coldest color_temp that this light supports.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if self.is_group: - return super().min_mireds + return super().max_color_temp_kelvin min_mireds = self.light.controlcapabilities.get("ct", {}).get("min") # We filter out '0' too, which can be incorrectly reported by 3rd party buls if not min_mireds: - return super().min_mireds + return super().max_color_temp_kelvin - return min_mireds + return color_util.color_temperature_mired_to_kelvin(min_mireds) @property - def max_mireds(self): - """Return the warmest color_temp that this light supports.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if self.is_group: - return super().max_mireds + return super().min_color_temp_kelvin if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get("ct", {}).get("max") if not max_mireds: - return super().max_mireds + return super().min_color_temp_kelvin - return max_mireds + return color_util.color_temperature_mired_to_kelvin(max_mireds) @property def is_on(self): @@ -541,11 +542,14 @@ class HueLight(CoordinatorEntity, LightEntity): # Philips hue bulb models respond differently to hue/sat # requests, so we convert to XY first to ensure a consistent # color. - xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) + xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command["xy"] = xy_color - elif ATTR_COLOR_TEMP in kwargs: - temp = kwargs[ATTR_COLOR_TEMP] - command["ct"] = max(self.min_mireds, min(temp, self.max_mireds)) + elif ATTR_COLOR_TEMP_KELVIN in kwargs: + temp_k = max( + self.min_color_temp_kelvin, + min(self.max_color_temp_kelvin, kwargs[ATTR_COLOR_TEMP_KELVIN]), + ) + command["ct"] = color_util.color_temperature_kelvin_to_mired(temp_k) if ATTR_BRIGHTNESS in kwargs: command["bri"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) diff --git a/homeassistant/components/hue/v2/group.py b/homeassistant/components/hue/v2/group.py index 97ff6feffa5..c7f966ce9f2 100644 --- a/homeassistant/components/hue/v2/group.py +++ b/homeassistant/components/hue/v2/group.py @@ -12,7 +12,7 @@ from aiohue.v2.models.feature import DynamicStatus from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_FLASH, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import DOMAIN @@ -157,7 +158,7 @@ class GroupedHueLight(HueBaseEntity, LightEntity): """Turn the grouped_light on.""" transition = normalize_hue_transition(kwargs.get(ATTR_TRANSITION)) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP)) + color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP_KELVIN)) brightness = normalize_hue_brightness(kwargs.get(ATTR_BRIGHTNESS)) flash = kwargs.get(ATTR_FLASH) @@ -235,9 +236,21 @@ class GroupedHueLight(HueBaseEntity, LightEntity): if color_temp := light.color_temperature: lights_with_color_temp_support += 1 # we assume mired values from the first capable light - self._attr_color_temp = color_temp.mirek - self._attr_max_mireds = color_temp.mirek_schema.mirek_maximum - self._attr_min_mireds = color_temp.mirek_schema.mirek_minimum + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(color_temp.mirek) + if color_temp.mirek + else None + ) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_maximum + ) + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_minimum + ) + ) if color_temp.mirek is not None and color_temp.mirek_valid: lights_in_colortemp_mode += 1 if color := light.color: diff --git a/homeassistant/components/hue/v2/helpers.py b/homeassistant/components/hue/v2/helpers.py index 480296760e7..384d2a30596 100644 --- a/homeassistant/components/hue/v2/helpers.py +++ b/homeassistant/components/hue/v2/helpers.py @@ -2,6 +2,8 @@ from __future__ import annotations +from homeassistant.util import color as color_util + def normalize_hue_brightness(brightness: float | None) -> float | None: """Return calculated brightness values.""" @@ -21,10 +23,11 @@ def normalize_hue_transition(transition: float | None) -> float | None: return transition -def normalize_hue_colortemp(colortemp: int | None) -> int | None: +def normalize_hue_colortemp(colortemp_k: int | None) -> int | None: """Return color temperature within Hue's ranges.""" - if colortemp is not None: - # Hue only accepts a range between 153..500 - colortemp = min(colortemp, 500) - colortemp = max(colortemp, 153) - return colortemp + if colortemp_k is None: + return None + colortemp = color_util.color_temperature_kelvin_to_mired(colortemp_k) + # Hue only accepts a range between 153..500 + colortemp = min(colortemp, 500) + return max(colortemp, 153) diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index 053b3c19c2d..86d8cc93e54 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -13,7 +13,7 @@ from aiohue.v2.models.light import Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_TRANSITION, @@ -28,6 +28,7 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import DOMAIN @@ -39,9 +40,9 @@ from .helpers import ( ) EFFECT_NONE = "None" -FALLBACK_MIN_MIREDS = 153 # 6500 K -FALLBACK_MAX_MIREDS = 500 # 2000 K -FALLBACK_MIREDS = 173 # halfway +FALLBACK_MIN_KELVIN = 6500 +FALLBACK_MAX_KELVIN = 2000 +FALLBACK_KELVIN = 5800 # halfway async def async_setup_entry( @@ -164,28 +165,32 @@ class HueLight(HueBaseEntity, LightEntity): return None @property - def color_temp(self) -> int: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek + return color_util.color_temperature_mired_to_kelvin(color_temp.mirek) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MIREDS + return FALLBACK_KELVIN @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek_schema.mirek_minimum + return color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_minimum + ) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MIN_MIREDS + return FALLBACK_MAX_KELVIN @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek_schema.mirek_maximum + return color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_maximum + ) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MAX_MIREDS + return FALLBACK_MIN_KELVIN @property def extra_state_attributes(self) -> dict[str, str] | None: @@ -210,7 +215,7 @@ class HueLight(HueBaseEntity, LightEntity): """Turn the device on.""" transition = normalize_hue_transition(kwargs.get(ATTR_TRANSITION)) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP)) + color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP_KELVIN)) brightness = normalize_hue_brightness(kwargs.get(ATTR_BRIGHTNESS)) if self._last_brightness and brightness is None: # The Hue bridge sets the brightness to 1% when turning on a bulb From 9551a12c9cf2ee73c54d86b2b253a8a3b752b362 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 10 Dec 2024 13:58:02 +0100 Subject: [PATCH 0467/1198] Add exception translations for Fronius (#132830) * Add exception translations for Fronius * Update sensor.py --- homeassistant/components/fronius/__init__.py | 9 ++++++++- homeassistant/components/fronius/config_flow.py | 5 ++++- homeassistant/components/fronius/coordinator.py | 7 ++++++- homeassistant/components/fronius/sensor.py | 3 +++ homeassistant/components/fronius/strings.json | 11 +++++++++++ 5 files changed, 32 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fronius/__init__.py b/homeassistant/components/fronius/__init__.py index e30f8e85fa0..03d80e3b2d9 100644 --- a/homeassistant/components/fronius/__init__.py +++ b/homeassistant/components/fronius/__init__.py @@ -226,7 +226,14 @@ class FroniusSolarNet: _LOGGER.debug("Re-scan failed for %s", self.host) return inverter_infos - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": self.host, + "fronius_error": str(err), + }, + ) from err for inverter in _inverter_info["inverters"]: solar_net_id = inverter["device_id"]["value"] diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index 2adbf2ae2f3..1d5a26984fa 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -56,7 +56,10 @@ async def validate_host( _LOGGER.debug(err) raise CannotConnect from err except StopIteration as err: - raise CannotConnect("No supported Fronius SolarNet device found.") from err + raise CannotConnect( + translation_domain=DOMAIN, + translation_key="no_supported_device_found", + ) from err first_inverter_uid: str = first_inverter["unique_id"]["value"] return first_inverter_uid, FroniusConfigEntryData( host=host, diff --git a/homeassistant/components/fronius/coordinator.py b/homeassistant/components/fronius/coordinator.py index c3dea123a77..d4f1fc6c230 100644 --- a/homeassistant/components/fronius/coordinator.py +++ b/homeassistant/components/fronius/coordinator.py @@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( + DOMAIN, SOLAR_NET_ID_POWER_FLOW, SOLAR_NET_ID_SYSTEM, FroniusDeviceInfo, @@ -67,7 +68,11 @@ class FroniusCoordinatorBase( self._failed_update_count += 1 if self._failed_update_count == self.MAX_FAILED_UPDATES: self.update_interval = self.error_interval - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"fronius_error": str(err)}, + ) from err if self._failed_update_count != 0: self._failed_update_count = 0 diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index c8a840b1c2c..95c5df269e4 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -54,6 +54,9 @@ if TYPE_CHECKING: FroniusStorageUpdateCoordinator, ) + +PARALLEL_UPDATES = 0 + ENERGY_VOLT_AMPERE_REACTIVE_HOUR: Final = "varh" diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index dfdcfc0ddb2..86348a0e2d7 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -294,5 +294,16 @@ "name": "[%key:component::sensor::entity_component::temperature::name%]" } } + }, + "exceptions": { + "no_supported_device_found": { + "message": "No supported Fronius SolarNet device found." + }, + "entry_cannot_connect": { + "message": "Failed to connect to Fronius device at {host}: {fronius_error}" + }, + "update_failed": { + "message": "An error occurred while attempting to fetch data: {fronius_error}" + } } } From 1a60f0e668285308f81202a1fbc410221eecb9ec Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 10 Dec 2024 14:22:49 +0100 Subject: [PATCH 0468/1198] Bump aioacaia to 0.1.11 (#132838) --- homeassistant/components/acaia/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index 3f3e1c14d58..c1f1fdd7a81 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -25,5 +25,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioacaia"], - "requirements": ["aioacaia==0.1.10"] + "requirements": ["aioacaia==0.1.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6397d3673c7..0ef33d06220 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.10 +aioacaia==0.1.11 # homeassistant.components.airq aioairq==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fbc7462ac03..3a57a4e2a19 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.10 +aioacaia==0.1.11 # homeassistant.components.airq aioairq==0.4.3 From 9614a8d1ca7dbcb9b265141b3cb52d6d35344bdf Mon Sep 17 00:00:00 2001 From: David Knowles Date: Tue, 10 Dec 2024 08:23:14 -0500 Subject: [PATCH 0469/1198] Pass an application identifier to the Hydrawise API (#132779) --- homeassistant/components/hydrawise/__init__.py | 5 +++-- homeassistant/components/hydrawise/config_flow.py | 4 ++-- homeassistant/components/hydrawise/const.py | 4 ++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 9e402cd4932..ea5a5801e69 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -7,7 +7,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN +from .const import APP_ID, DOMAIN from .coordinator import ( HydrawiseMainDataUpdateCoordinator, HydrawiseUpdateCoordinators, @@ -30,7 +30,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b raise ConfigEntryAuthFailed hydrawise = client.Hydrawise( - auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]) + auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]), + app_id=APP_ID, ) main_coordinator = HydrawiseMainDataUpdateCoordinator(hass, hydrawise) diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 419927d6d42..5af32af3951 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from .const import DOMAIN, LOGGER +from .const import APP_ID, DOMAIN, LOGGER class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): @@ -39,7 +39,7 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): return on_failure("timeout_connect") try: - api = client.Hydrawise(auth) + api = client.Hydrawise(auth, app_id=APP_ID) # Don't fetch zones because we don't need them yet. user = await api.get_user(fetch_zones=False) except TimeoutError: diff --git a/homeassistant/components/hydrawise/const.py b/homeassistant/components/hydrawise/const.py index 6d846dd6127..beaf450a586 100644 --- a/homeassistant/components/hydrawise/const.py +++ b/homeassistant/components/hydrawise/const.py @@ -3,8 +3,12 @@ from datetime import timedelta import logging +from homeassistant.const import __version__ as HA_VERSION + LOGGER = logging.getLogger(__package__) +APP_ID = f"homeassistant-{HA_VERSION}" + DOMAIN = "hydrawise" DEFAULT_WATERING_TIME = timedelta(minutes=15) From 0a786394f52a93ed0608ee71e43e9b260bbf9b83 Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Tue, 10 Dec 2024 15:15:57 +0100 Subject: [PATCH 0470/1198] Add data descriptions to devolo Home Control (#132703) --- .../components/devolo_home_control/strings.json | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/devolo_home_control/strings.json b/homeassistant/components/devolo_home_control/strings.json index eeae9aa2e2f..1eaf64564c2 100644 --- a/homeassistant/components/devolo_home_control/strings.json +++ b/homeassistant/components/devolo_home_control/strings.json @@ -12,15 +12,21 @@ "user": { "data": { "username": "Email / devolo ID", - "password": "[%key:common::config_flow::data::password%]", - "mydevolo_url": "mydevolo URL" + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Email address you used to register the central unit at mydevolo.", + "password": "Password of your mydevolo account." } }, "zeroconf_confirm": { "data": { "username": "[%key:component::devolo_home_control::config::step::user::data::username%]", - "password": "[%key:common::config_flow::data::password%]", - "mydevolo_url": "[%key:component::devolo_home_control::config::step::user::data::mydevolo_url%]" + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::devolo_home_control::config::step::user::data_description::username%]", + "password": "[%key:component::devolo_home_control::config::step::user::data_description::password%]" } } } From 7014317e9e4859bed0113e712afd4c8df15e3405 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 15:29:33 +0100 Subject: [PATCH 0471/1198] Cleanup unnecessary mired attributes in esphome (#132833) * Cleanup unnecessary mired attributes in esphome * Adjust --- homeassistant/components/esphome/light.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/esphome/light.py b/homeassistant/components/esphome/light.py index 52f999afe4f..8fecf34862b 100644 --- a/homeassistant/components/esphome/light.py +++ b/homeassistant/components/esphome/light.py @@ -414,11 +414,8 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity): self._attr_supported_color_modes = supported self._attr_effect_list = static_info.effects - self._attr_min_mireds = round(static_info.min_mireds) - self._attr_max_mireds = round(static_info.max_mireds) - if ColorMode.COLOR_TEMP in supported: - self._attr_min_color_temp_kelvin = _mired_to_kelvin(static_info.max_mireds) - self._attr_max_color_temp_kelvin = _mired_to_kelvin(static_info.min_mireds) + self._attr_min_color_temp_kelvin = _mired_to_kelvin(static_info.max_mireds) + self._attr_max_color_temp_kelvin = _mired_to_kelvin(static_info.min_mireds) async_setup_entry = partial( From 6a323a1d3cc4988fb0c0de7661f21814cb073db8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 15:32:08 +0100 Subject: [PATCH 0472/1198] Fix wrong name attribute in mqtt ignore list (#132831) --- homeassistant/components/mqtt/entity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index c73e1975a68..fb047cc8d5e 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -137,7 +137,7 @@ MQTT_ATTRIBUTES_BLOCKED = { "extra_state_attributes", "force_update", "icon", - "name", + "friendly_name", "should_poll", "state", "supported_features", From 2b17037edcefdbc1c5835385fbe2b7f1a9dc6d18 Mon Sep 17 00:00:00 2001 From: Tom Date: Tue, 10 Dec 2024 16:43:08 +0100 Subject: [PATCH 0473/1198] Plugwise improve platform tests (#132748) --- homeassistant/components/plugwise/entity.py | 5 - .../components/plugwise/quality_scale.yaml | 8 +- .../fixtures/m_adam_cooling/all_data.json | 2 +- tests/components/plugwise/test_climate.py | 189 ++++++++++-------- tests/components/plugwise/test_init.py | 5 +- tests/components/plugwise/test_number.py | 19 ++ tests/components/plugwise/test_select.py | 24 ++- tests/components/plugwise/test_sensor.py | 25 ++- tests/components/plugwise/test_switch.py | 17 +- 9 files changed, 178 insertions(+), 116 deletions(-) diff --git a/homeassistant/components/plugwise/entity.py b/homeassistant/components/plugwise/entity.py index 7b28bf78342..3f63abaff43 100644 --- a/homeassistant/components/plugwise/entity.py +++ b/homeassistant/components/plugwise/entity.py @@ -77,8 +77,3 @@ class PlugwiseEntity(CoordinatorEntity[PlugwiseDataUpdateCoordinator]): def device(self) -> GwEntityData: """Return data for this device.""" return self.coordinator.data.devices[self._dev_id] - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self._handle_coordinator_update() - await super().async_added_to_hass() diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index 4bbafc09004..a6b364cf381 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -14,9 +14,7 @@ rules: action-setup: status: exempt comment: Plugwise integration has no custom actions - common-modules: - status: todo - comment: Verify entity for async_added_to_hass usage (discard?) + common-modules: done docs-high-level-description: status: todo comment: Rewrite top section, docs PR prepared waiting for 36087 merge @@ -37,9 +35,7 @@ rules: parallel-updates: status: todo comment: Using coordinator, but required due to mutable platform - test-coverage: - status: todo - comment: Consider using snapshots + consistency in setup calls + add numerical tests + use fixtures + test-coverage: done integration-owner: done docs-installation-parameters: status: todo diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 9c40e50278b..c5afd68bed5 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -177,7 +177,7 @@ "off" ], "climate_mode": "cool", - "control_state": "auto", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Bathroom", diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index 6320ab1f96b..8368af8e5cc 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -8,12 +8,31 @@ from plugwise.exceptions import PlugwiseError import pytest from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + ATTR_HVAC_MODES, + ATTR_MAX_TEMP, + ATTR_MIN_TEMP, + ATTR_PRESET_MODE, + ATTR_PRESET_MODES, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + ATTR_TARGET_TEMP_STEP, DOMAIN as CLIMATE_DOMAIN, + PRESET_AWAY, + PRESET_HOME, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_TEMPERATURE, + HVACAction, HVACMode, ) +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, + ATTR_TEMPERATURE, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -31,33 +50,33 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.woonkamer") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - assert "preset_modes" in state.attributes - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] - assert state.attributes["preset_mode"] == "home" - assert state.attributes["current_temperature"] == 20.9 - assert state.attributes["supported_features"] == 17 - assert state.attributes["temperature"] == 21.5 - assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 35.0 - assert state.attributes["target_temp_step"] == 0.1 + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT] + assert ATTR_PRESET_MODES in state.attributes + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOME + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 17 + assert state.attributes[ATTR_TEMPERATURE] == 21.5 + assert state.attributes[ATTR_MIN_TEMP] == 0.0 + assert state.attributes[ATTR_MAX_TEMP] == 35.0 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 state = hass.states.get("climate.jessie") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "idle" - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - assert "preset_modes" in state.attributes - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] - assert state.attributes["preset_mode"] == "asleep" - assert state.attributes["current_temperature"] == 17.2 - assert state.attributes["temperature"] == 15.0 - assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 35.0 - assert state.attributes["target_temp_step"] == 0.1 + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT] + assert ATTR_PRESET_MODES in state.attributes + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] + assert state.attributes[ATTR_PRESET_MODE] == "asleep" + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 17.2 + assert state.attributes[ATTR_TEMPERATURE] == 15.0 + assert state.attributes[ATTR_MIN_TEMP] == 0.0 + assert state.attributes[ATTR_MAX_TEMP] == 35.0 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 async def test_adam_2_climate_entity_attributes( @@ -67,8 +86,8 @@ async def test_adam_2_climate_entity_attributes( state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.HEAT - assert state.attributes["hvac_action"] == "preheating" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.PREHEATING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT, @@ -77,8 +96,8 @@ async def test_adam_2_climate_entity_attributes( state = hass.states.get("climate.bathroom") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "idle" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT, @@ -95,8 +114,8 @@ async def test_adam_3_climate_entity_attributes( state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.COOL - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.COOL, @@ -105,7 +124,9 @@ async def test_adam_3_climate_entity_attributes( data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "heating" ) - data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = "heating" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = ( + HVACAction.HEATING + ) data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = False @@ -120,8 +141,8 @@ async def test_adam_3_climate_entity_attributes( state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.HEAT - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT, @@ -131,7 +152,9 @@ async def test_adam_3_climate_entity_attributes( data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "cooling" ) - data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = "cooling" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = ( + HVACAction.COOLING + ) data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = True @@ -146,8 +169,8 @@ async def test_adam_3_climate_entity_attributes( state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.COOL - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.COOL, @@ -164,7 +187,7 @@ async def test_adam_climate_adjust_negative_testing( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.woonkamer", "temperature": 25}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 25}, blocking=True, ) @@ -176,7 +199,7 @@ async def test_adam_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.woonkamer", "temperature": 25}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 25}, blocking=True, ) assert mock_smile_adam.set_temperature.call_count == 1 @@ -188,9 +211,9 @@ async def test_adam_climate_entity_climate_changes( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - "entity_id": "climate.woonkamer", - "hvac_mode": "heat", - "temperature": 25, + ATTR_ENTITY_ID: "climate.woonkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, + ATTR_TEMPERATURE: 25, }, blocking=True, ) @@ -199,43 +222,43 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ServiceValidationError): + with pytest.raises(ServiceValidationError, match="Accepted range"): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.woonkamer", "temperature": 150}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 150}, blocking=True, ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {"entity_id": "climate.woonkamer", "preset_mode": "away"}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, ) assert mock_smile_adam.set_preset.call_count == 1 mock_smile_adam.set_preset.assert_called_with( - "c50f167537524366a5af7aa3942feb1e", "away" + "c50f167537524366a5af7aa3942feb1e", PRESET_AWAY ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.woonkamer", "hvac_mode": "heat"}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) assert mock_smile_adam.set_schedule_state.call_count == 2 mock_smile_adam.set_schedule_state.assert_called_with( - "c50f167537524366a5af7aa3942feb1e", "off" + "c50f167537524366a5af7aa3942feb1e", HVACMode.OFF ) - with pytest.raises(ServiceValidationError): + with pytest.raises(ServiceValidationError, match="valid modes are"): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.jessie", - "hvac_mode": "dry", + ATTR_ENTITY_ID: "climate.jessie", + ATTR_HVAC_MODE: HVACMode.DRY, }, blocking=True, ) @@ -254,8 +277,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.slaapkamer", - "hvac_mode": "heat", + ATTR_ENTITY_ID: "climate.slaapkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, ) @@ -270,8 +293,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.kinderkamer", - "hvac_mode": "off", + ATTR_ENTITY_ID: "climate.kinderkamer", + ATTR_HVAC_MODE: HVACMode.OFF, }, blocking=True, ) @@ -286,8 +309,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.logeerkamer", - "hvac_mode": "heat", + ATTR_ENTITY_ID: "climate.logeerkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, ) @@ -304,20 +327,20 @@ async def test_anna_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT_COOL] + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT_COOL] - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] - assert state.attributes["current_temperature"] == 19.3 - assert state.attributes["preset_mode"] == "home" - assert state.attributes["supported_features"] == 18 - assert state.attributes["target_temp_high"] == 30 - assert state.attributes["target_temp_low"] == 20.5 - assert state.attributes["min_temp"] == 4 - assert state.attributes["max_temp"] == 30 - assert state.attributes["target_temp_step"] == 0.1 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 19.3 + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOME + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 18 + assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 30 + assert state.attributes[ATTR_TARGET_TEMP_LOW] == 20.5 + assert state.attributes[ATTR_MIN_TEMP] == 4 + assert state.attributes[ATTR_MAX_TEMP] == 30 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 async def test_anna_2_climate_entity_attributes( @@ -329,14 +352,14 @@ async def test_anna_2_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.AUTO, HVACMode.HEAT_COOL, ] - assert state.attributes["supported_features"] == 18 - assert state.attributes["target_temp_high"] == 30 - assert state.attributes["target_temp_low"] == 20.5 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 18 + assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 30 + assert state.attributes[ATTR_TARGET_TEMP_LOW] == 20.5 async def test_anna_3_climate_entity_attributes( @@ -348,8 +371,8 @@ async def test_anna_3_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "idle" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.AUTO, HVACMode.HEAT_COOL, ] @@ -365,7 +388,11 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.anna", "target_temp_high": 30, "target_temp_low": 20}, + { + ATTR_ENTITY_ID: "climate.anna", + ATTR_TARGET_TEMP_HIGH: 30, + ATTR_TARGET_TEMP_LOW: 20, + }, blocking=True, ) assert mock_smile_anna.set_temperature.call_count == 1 @@ -377,18 +404,18 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {"entity_id": "climate.anna", "preset_mode": "away"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, ) assert mock_smile_anna.set_preset.call_count == 1 mock_smile_anna.set_preset.assert_called_with( - "c784ee9fdab44e1395b8dee7d7a497d5", "away" + "c784ee9fdab44e1395b8dee7d7a497d5", PRESET_AWAY ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.anna", "hvac_mode": "auto"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_HVAC_MODE: HVACMode.AUTO}, blocking=True, ) # hvac_mode is already auto so not called. @@ -397,12 +424,12 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.anna", "hvac_mode": "heat_cool"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_HVAC_MODE: HVACMode.HEAT_COOL}, blocking=True, ) assert mock_smile_anna.set_schedule_state.call_count == 1 mock_smile_anna.set_schedule_state.assert_called_with( - "c784ee9fdab44e1395b8dee7d7a497d5", "off" + "c784ee9fdab44e1395b8dee7d7a497d5", HVACMode.OFF ) data = mock_smile_anna.async_update.return_value @@ -414,4 +441,4 @@ async def test_anna_climate_entity_climate_changes( state = hass.states.get("climate.anna") assert state.state == HVACMode.HEAT - assert state.attributes["hvac_modes"] == [HVACMode.HEAT_COOL] + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT_COOL] diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 99ff79263b6..014003d29d0 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -19,7 +19,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_fire_time_changed @@ -118,7 +117,7 @@ async def test_device_in_dr( ) -> None: """Test Gateway device registry data.""" mock_config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, {}) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() device_entry = device_registry.async_get_device( @@ -237,7 +236,7 @@ async def test_update_device( data = mock_smile_adam_2.async_update.return_value mock_config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, {}) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert ( diff --git a/tests/components/plugwise/test_number.py b/tests/components/plugwise/test_number.py index e10a7caa9e9..fdceb042669 100644 --- a/tests/components/plugwise/test_number.py +++ b/tests/components/plugwise/test_number.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, @@ -9,6 +11,7 @@ from homeassistant.components.number import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from tests.common import MockConfigEntry @@ -101,3 +104,19 @@ async def test_adam_temperature_offset_change( mock_smile_adam.set_number.assert_called_with( "6a3bf693d05e48e0b460c815a4fdd09d", "temperature_offset", 1.0 ) + + +async def test_adam_temperature_offset_out_of_bounds_change( + hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test changing of the temperature_offset number beyond limits.""" + with pytest.raises(ServiceValidationError, match="valid range"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: "number.zone_thermostat_jessie_temperature_offset", + ATTR_VALUE: 3.0, + }, + blocking=True, + ) diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index 0fab41cdbae..8891a88bb91 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.select import ( ATTR_OPTION, DOMAIN as SELECT_DOMAIN, @@ -9,6 +11,7 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from tests.common import MockConfigEntry @@ -65,8 +68,8 @@ async def test_adam_select_regulation_mode( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { - "entity_id": "select.adam_regulation_mode", - "option": "heating", + ATTR_ENTITY_ID: "select.adam_regulation_mode", + ATTR_OPTION: "heating", }, blocking=True, ) @@ -86,3 +89,20 @@ async def test_legacy_anna_select_entities( ) -> None: """Test not creating a select-entity for a legacy Anna without a thermostat-schedule.""" assert not hass.states.get("select.anna_thermostat_schedule") + + +async def test_adam_select_unavailable_regulation_mode( + hass: HomeAssistant, mock_smile_anna: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test a regulation_mode non-available preset.""" + + with pytest.raises(ServiceValidationError, match="valid options"): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.anna_thermostat_schedule", + ATTR_OPTION: "freezing", + }, + blocking=True, + ) diff --git a/tests/components/plugwise/test_sensor.py b/tests/components/plugwise/test_sensor.py index 0745adb786a..f10f3f00933 100644 --- a/tests/components/plugwise/test_sensor.py +++ b/tests/components/plugwise/test_sensor.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.plugwise.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant @@ -135,6 +137,7 @@ async def test_p1_dsmr_sensor_entities( assert not state +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_p1_3ph_dsmr_sensor_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -154,21 +157,23 @@ async def test_p1_3ph_dsmr_sensor_entities( assert state assert int(state.state) == 2080 - entity_id = "sensor.p1_voltage_phase_one" - state = hass.states.get(entity_id) - assert not state - - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - + # Default disabled sensor test state = hass.states.get("sensor.p1_voltage_phase_one") assert state assert float(state.state) == 233.2 +async def test_p1_3ph_dsmr_sensor_disabled_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_smile_p1_2: MagicMock, + init_integration: MockConfigEntry, +) -> None: + """Test disabled power related sensor entities intent.""" + state = hass.states.get("sensor.p1_voltage_phase_one") + assert not state + + async def test_stretch_sensor_entities( hass: HomeAssistant, mock_stretch: MagicMock, init_integration: MockConfigEntry ) -> None: diff --git a/tests/components/plugwise/test_switch.py b/tests/components/plugwise/test_switch.py index d9a4792ddb1..fa8a8a434e7 100644 --- a/tests/components/plugwise/test_switch.py +++ b/tests/components/plugwise/test_switch.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components.plugwise.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, @@ -44,7 +45,7 @@ async def test_adam_climate_switch_negative_testing( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.cv_pomp_relay"}, + {ATTR_ENTITY_ID: "switch.cv_pomp_relay"}, blocking=True, ) @@ -57,7 +58,7 @@ async def test_adam_climate_switch_negative_testing( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -74,7 +75,7 @@ async def test_adam_climate_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.cv_pomp_relay"}, + {ATTR_ENTITY_ID: "switch.cv_pomp_relay"}, blocking=True, ) @@ -86,7 +87,7 @@ async def test_adam_climate_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TOGGLE, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -98,7 +99,7 @@ async def test_adam_climate_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -128,7 +129,7 @@ async def test_stretch_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.koelkast_92c4a_relay"}, + {ATTR_ENTITY_ID: "switch.koelkast_92c4a_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 1 @@ -139,7 +140,7 @@ async def test_stretch_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TOGGLE, - {"entity_id": "switch.droger_52559_relay"}, + {ATTR_ENTITY_ID: "switch.droger_52559_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 2 @@ -150,7 +151,7 @@ async def test_stretch_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.droger_52559_relay"}, + {ATTR_ENTITY_ID: "switch.droger_52559_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 3 From 8fd64d2ca4bc130580d71fc4832bf6b308abb110 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 10 Dec 2024 08:04:00 -0800 Subject: [PATCH 0474/1198] Add a quality scale for fitbit integration (#131326) Co-authored-by: Joost Lekkerkerker --- .../components/fitbit/quality_scale.yaml | 70 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/fitbit/quality_scale.yaml diff --git a/homeassistant/components/fitbit/quality_scale.yaml b/homeassistant/components/fitbit/quality_scale.yaml new file mode 100644 index 00000000000..abf127cdb98 --- /dev/null +++ b/homeassistant/components/fitbit/quality_scale.yaml @@ -0,0 +1,70 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration has no actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: todo + docs-actions: + status: exempt + comment: There are no actions in Fitbit integration. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: Fitbit is a polling integration that does use async events. + entity-unique-id: done + has-entity-name: done + runtime-data: + status: todo + comment: | + The integration uses `hass.data` for data associated with a configuration + entry and needs to be updated to use `runtime_data`. + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: todo + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: todo + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: todo + diagnostics: todo + discovery: todo + discovery-update-info: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 9f6d1e0b783..119a66408b6 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -389,7 +389,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "fints", "fireservicerota", "firmata", - "fitbit", "fivem", "fixer", "fjaraskupan", From d4546c94b05cd4401987e390f6eb83ab12ff9b03 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Tue, 10 Dec 2024 18:01:12 +0100 Subject: [PATCH 0475/1198] Add beolink_join source_id parameter to Bang & Olufsen (#132377) * Add source as parameter to beolink join service * Add beolink join source and responses * Improve comment Add translation * Remove result from beolink join custom action * Cleanup * Use options selector instead of string for source ID Fix test docstring * Update options * Use translation dict for source ids Add input validation Add tests for invalid sources Improve source id description * Use list instead of translation dict Remove platform prefixes Add test for Beolink Converter source * Fix source_id naming and order --- .../components/bang_olufsen/const.py | 17 ++ .../components/bang_olufsen/media_player.py | 22 +- .../components/bang_olufsen/services.yaml | 17 ++ .../components/bang_olufsen/strings.json | 20 ++ .../snapshots/test_media_player.ambr | 236 +++++++++++++++++- .../bang_olufsen/test_media_player.py | 85 ++++++- 6 files changed, 387 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 209311d3e8a..9f0649e610b 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -210,3 +210,20 @@ BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event" CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS" + +# Beolink Converter NL/ML sources need to be transformed to upper case +BEOLINK_JOIN_SOURCES_TO_UPPER = ( + "aux_a", + "cd", + "ph", + "radio", + "tp1", + "tp2", +) +BEOLINK_JOIN_SOURCES = ( + *BEOLINK_JOIN_SOURCES_TO_UPPER, + "beoradio", + "deezer", + "spotify", + "tidal", +) diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 96e7cca0175..282ecdd2ae5 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -74,6 +74,8 @@ from .const import ( BANG_OLUFSEN_REPEAT_FROM_HA, BANG_OLUFSEN_REPEAT_TO_HA, BANG_OLUFSEN_STATES, + BEOLINK_JOIN_SOURCES, + BEOLINK_JOIN_SOURCES_TO_UPPER, CONF_BEOLINK_JID, CONNECTION_STATUS, DOMAIN, @@ -135,7 +137,10 @@ async def async_setup_entry( platform.async_register_entity_service( name="beolink_join", - schema={vol.Optional("beolink_jid"): jid_regex}, + schema={ + vol.Optional("beolink_jid"): jid_regex, + vol.Optional("source_id"): vol.In(BEOLINK_JOIN_SOURCES), + }, func="async_beolink_join", ) @@ -985,12 +990,23 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): await self.async_beolink_leave() # Custom actions: - async def async_beolink_join(self, beolink_jid: str | None = None) -> None: + async def async_beolink_join( + self, beolink_jid: str | None = None, source_id: str | None = None + ) -> None: """Join a Beolink multi-room experience.""" + # Touch to join if beolink_jid is None: await self._client.join_latest_beolink_experience() - else: + # Join a peer + elif beolink_jid and source_id is None: await self._client.join_beolink_peer(jid=beolink_jid) + # Join a peer and select specific source + elif beolink_jid and source_id: + # Beolink Converter NL/ML sources need to be in upper case + if source_id in BEOLINK_JOIN_SOURCES_TO_UPPER: + source_id = source_id.upper() + + await self._client.join_beolink_peer(jid=beolink_jid, source=source_id) async def async_beolink_expand( self, beolink_jids: list[str] | None = None, all_discovered: bool = False diff --git a/homeassistant/components/bang_olufsen/services.yaml b/homeassistant/components/bang_olufsen/services.yaml index e5d61420dff..7c3a2d659bd 100644 --- a/homeassistant/components/bang_olufsen/services.yaml +++ b/homeassistant/components/bang_olufsen/services.yaml @@ -48,6 +48,23 @@ beolink_join: example: 1111.2222222.33333333@products.bang-olufsen.com selector: text: + source_id: + required: false + example: tidal + selector: + select: + translation_key: "source_ids" + options: + - beoradio + - deezer + - spotify + - tidal + - radio + - tp1 + - tp2 + - cd + - aux_a + - ph beolink_leave: target: diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index 6e75d2f26c8..b4aac78756c 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -29,6 +29,22 @@ } } }, + "selector": { + "source_ids": { + "options": { + "beoradio": "ASE Beoradio", + "deezer": "ASE / Mozart Deezer", + "spotify": "ASE / Mozart Spotify", + "tidal": "Mozart Tidal", + "aux_a": "Beolink Converter NL/ML AUX_A", + "cd": "Beolink Converter NL/ML CD", + "ph": "Beolink Converter NL/ML PH", + "radio": "Beolink Converter NL/ML RADIO", + "tp1": "Beolink Converter NL/ML TP1", + "tp2": "Beolink Converter NL/ML TP2" + } + } + }, "services": { "beolink_allstandby": { "name": "Beolink all standby", @@ -61,6 +77,10 @@ "beolink_jid": { "name": "Beolink JID", "description": "Manually specify Beolink JID to join." + }, + "source_id": { + "name": "Source", + "description": "Specify which source to join, behavior varies between hardware platforms. Source names prefaced by a platform name can only be used when connecting to that platform. For example \"ASE Beoradio\" can only be used when joining an ASE device, while ”ASE / Mozart Deezer” can be used with ASE or Mozart devices. A defined Beolink JID is required." } }, "sections": { diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr index 36fcc72aa22..327b7ecfacf 100644 --- a/tests/components/bang_olufsen/snapshots/test_media_player.ambr +++ b/tests/components/bang_olufsen/snapshots/test_media_player.ambr @@ -243,7 +243,7 @@ 'state': 'playing', }) # --- -# name: test_async_beolink_join +# name: test_async_beolink_join[service_parameters0-method_parameters0] StateSnapshot({ 'attributes': ReadOnlyDict({ 'beolink': dict({ @@ -291,6 +291,240 @@ 'state': 'playing', }) # --- +# name: test_async_beolink_join[service_parameters1-method_parameters1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join[service_parameters2-method_parameters2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters0-expected_result0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters1-expected_result1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters2-expected_result2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- # name: test_async_beolink_unexpand StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index aa35b0265dc..695b086b0a7 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -18,6 +18,7 @@ from mozart_api.models import ( import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props +from voluptuous import Invalid, MultipleInvalid from homeassistant.components.bang_olufsen.const import ( BANG_OLUFSEN_REPEAT_FROM_HA, @@ -1523,13 +1524,38 @@ async def test_async_unjoin_player( assert states == snapshot(exclude=props("media_position_updated_at")) +@pytest.mark.parametrize( + ( + "service_parameters", + "method_parameters", + ), + [ + # Defined JID + ( + {"beolink_jid": TEST_JID_2}, + {"jid": TEST_JID_2}, + ), + # Defined JID and source + ( + {"beolink_jid": TEST_JID_2, "source_id": TEST_SOURCE.id}, + {"jid": TEST_JID_2, "source": TEST_SOURCE.id}, + ), + # Defined JID and Beolink Converter NL/ML source + ( + {"beolink_jid": TEST_JID_2, "source_id": "cd"}, + {"jid": TEST_JID_2, "source": "CD"}, + ), + ], +) async def test_async_beolink_join( hass: HomeAssistant, snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, + service_parameters: dict[str, str], + method_parameters: dict[str, str], ) -> None: - """Test async_beolink_join with defined JID.""" + """Test async_beolink_join with defined JID and JID and source.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -1537,14 +1563,61 @@ async def test_async_beolink_join( await hass.services.async_call( DOMAIN, "beolink_join", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - "beolink_jid": TEST_JID_2, - }, + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, **service_parameters}, blocking=True, ) - mock_mozart_client.join_beolink_peer.assert_called_once_with(jid=TEST_JID_2) + mock_mozart_client.join_beolink_peer.assert_called_once_with(**method_parameters) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "service_parameters", + "expected_result", + ), + [ + # Defined invalid JID + ( + {"beolink_jid": "not_a_jid"}, + pytest.raises(Invalid), + ), + # Defined invalid source + ( + {"source_id": "invalid_source"}, + pytest.raises(MultipleInvalid), + ), + # Defined invalid JID and invalid source + ( + {"beolink_jid": "not_a_jid", "source_id": "invalid_source"}, + pytest.raises(MultipleInvalid), + ), + ], +) +async def test_async_beolink_join_invalid( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service_parameters: dict[str, str], + expected_result: AbstractContextManager, +) -> None: + """Test invalid async_beolink_join calls with defined JID or source ID.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with expected_result: + await hass.services.async_call( + DOMAIN, + "beolink_join", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, **service_parameters}, + blocking=True, + ) + + mock_mozart_client.join_beolink_peer.assert_not_called() assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states == snapshot(exclude=props("media_position_updated_at")) From dba405dd885f658f300528ee58bbde5ca0f97956 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Tue, 10 Dec 2024 18:21:59 +0100 Subject: [PATCH 0476/1198] Bump mozart-api to 4.1.1.116.4 (#132859) Bump API --- homeassistant/components/bang_olufsen/__init__.py | 2 ++ homeassistant/components/bang_olufsen/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bang_olufsen/__init__.py b/homeassistant/components/bang_olufsen/__init__.py index c8ba1f1c3dc..be99f8b5b7d 100644 --- a/homeassistant/components/bang_olufsen/__init__.py +++ b/homeassistant/components/bang_olufsen/__init__.py @@ -8,6 +8,7 @@ from aiohttp.client_exceptions import ( ClientConnectorError, ClientOSError, ServerTimeoutError, + WSMessageTypeError, ) from mozart_api.exceptions import ApiException from mozart_api.mozart_client import MozartClient @@ -62,6 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) ServerTimeoutError, ApiException, TimeoutError, + WSMessageTypeError, ) as error: await client.close_api_client() raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index 1565c98e979..b29fe9731de 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==4.1.1.116.3"], + "requirements": ["mozart-api==4.1.1.116.4"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 0ef33d06220..3f619ac2e0e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1403,7 +1403,7 @@ motionblindsble==0.1.3 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.3 +mozart-api==4.1.1.116.4 # homeassistant.components.mullvad mullvad-api==1.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a57a4e2a19..bfddb35b041 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1172,7 +1172,7 @@ motionblindsble==0.1.3 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.3 +mozart-api==4.1.1.116.4 # homeassistant.components.mullvad mullvad-api==1.0.0 From f99239538c5bc5582227410e55b34dfe7aa8205b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 10 Dec 2024 18:26:49 +0100 Subject: [PATCH 0477/1198] Add retry to api calls in Nord Pool (#132768) --- .../components/nordpool/coordinator.py | 30 ++++++++++++------- tests/components/nordpool/conftest.py | 8 +++++ tests/components/nordpool/test_coordinator.py | 8 ++--- 3 files changed, 31 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py index fa4e9ca2548..e6b36f7deee 100644 --- a/homeassistant/components/nordpool/coordinator.py +++ b/homeassistant/components/nordpool/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections.abc import Callable from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -69,23 +70,30 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): self.unsub = async_track_point_in_utc_time( self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) ) + data = await self.api_call() + if data: + self.async_set_updated_data(data) + + async def api_call(self, retry: int = 3) -> DeliveryPeriodData | None: + """Make api call to retrieve data with retry if failure.""" + data = None try: data = await self.client.async_get_delivery_period( dt_util.now(), Currency(self.config_entry.data[CONF_CURRENCY]), self.config_entry.data[CONF_AREAS], ) - except NordPoolEmptyResponseError as error: - LOGGER.debug("Empty response error: %s", error) - self.async_set_update_error(error) - return - except NordPoolResponseError as error: - LOGGER.debug("Response error: %s", error) - self.async_set_update_error(error) - return - except NordPoolError as error: + except ( + NordPoolEmptyResponseError, + NordPoolResponseError, + NordPoolError, + ) as error: LOGGER.debug("Connection error: %s", error) + if retry > 0: + next_run = (4 - retry) * 15 + LOGGER.debug("Wait %d seconds for next try", next_run) + await asyncio.sleep(next_run) + return await self.api_call(retry - 1) self.async_set_update_error(error) - return - self.async_set_updated_data(data) + return data diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py index d1c1972c568..9b7ab4b2afa 100644 --- a/tests/components/nordpool/conftest.py +++ b/tests/components/nordpool/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import AsyncGenerator from datetime import datetime import json from typing import Any @@ -23,6 +24,13 @@ from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.fixture(autouse=True) +async def no_sleep() -> AsyncGenerator[None]: + """No sleeping.""" + with patch("homeassistant.components.nordpool.coordinator.asyncio.sleep"): + yield + + @pytest.fixture async def load_int( hass: HomeAssistant, get_data: DeliveryPeriodData diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py index d2d912b1b99..68534237dee 100644 --- a/tests/components/nordpool/test_coordinator.py +++ b/tests/components/nordpool/test_coordinator.py @@ -58,7 +58,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() + assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE mock_data.reset_mock() @@ -68,7 +68,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() + assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Authentication error" in caplog.text @@ -79,7 +79,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() + assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Empty response" in caplog.text @@ -90,7 +90,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() + assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Response error" in caplog.text From d2303eb83fab7ded60659b1951191b11078ff400 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 10 Dec 2024 18:27:40 +0100 Subject: [PATCH 0478/1198] Bump pydantic to 2.10.3 and update required deps (#131963) --- .github/workflows/wheels.yml | 27 ------------------- .../components/aussie_broadband/manifest.json | 2 +- .../components/bang_olufsen/const.py | 16 +++++------ .../components/bang_olufsen/entity.py | 2 +- .../components/bang_olufsen/media_player.py | 14 +++++----- homeassistant/components/google/__init__.py | 4 +-- homeassistant/components/google/calendar.py | 10 +++---- .../components/google/coordinator.py | 4 +-- .../components/purpleair/diagnostics.py | 2 +- .../components/purpleair/manifest.json | 2 +- .../components/unifiprotect/services.py | 2 +- homeassistant/components/xbox/manifest.json | 2 +- .../components/zwave_js/triggers/event.py | 2 +- homeassistant/package_constraints.txt | 5 ++-- requirements_all.txt | 6 ++--- requirements_test.txt | 2 +- requirements_test_all.txt | 6 ++--- script/gen_requirements_all.py | 5 ++-- .../lacrosse_view/test_config_flow.py | 6 ++--- tests/components/peco/test_sensor.py | 4 +-- .../youtube/snapshots/test_sensor.ambr | 2 +- 21 files changed, 48 insertions(+), 77 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 749f95fa922..a36b3073aab 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -197,33 +197,6 @@ jobs: split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - - name: Create requirements for cython<3 - if: matrix.abi == 'cp312' - run: | - # Some dependencies still require 'cython<3' - # and don't yet use isolated build environments. - # Build these first. - # pydantic: https://github.com/pydantic/pydantic/issues/7689 - - touch requirements_old-cython.txt - cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt - - - name: Build wheels (old cython) - uses: home-assistant/wheels@2024.11.0 - if: matrix.abi == 'cp312' - with: - abi: ${{ matrix.abi }} - tag: musllinux_1_2 - arch: ${{ matrix.arch }} - wheels-key: ${{ secrets.WHEELS_KEY }} - env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl - constraints: "homeassistant/package_constraints.txt" - requirements-diff: "requirements_diff.txt" - requirements: "requirements_old-cython.txt" - pip: "'cython<3'" - - name: Build wheels (part 1) uses: home-assistant/wheels@2024.11.0 with: diff --git a/homeassistant/components/aussie_broadband/manifest.json b/homeassistant/components/aussie_broadband/manifest.json index 877a46a3650..456b8962461 100644 --- a/homeassistant/components/aussie_broadband/manifest.json +++ b/homeassistant/components/aussie_broadband/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aussie_broadband", "iot_class": "cloud_polling", "loggers": ["aussiebb"], - "requirements": ["pyaussiebb==0.0.15"] + "requirements": ["pyaussiebb==0.1.4"] } diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 9f0649e610b..7f87ce11097 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -137,7 +137,7 @@ VALID_MEDIA_TYPES: Final[tuple] = ( # Fallback sources to use in case of API failure. FALLBACK_SOURCES: Final[SourceArray] = SourceArray( items=[ - Source( + Source( # type: ignore[call-arg] id="uriStreamer", is_enabled=True, is_playable=True, @@ -145,7 +145,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="uriStreamer"), is_seekable=False, ), - Source( + Source( # type: ignore[call-arg] id="bluetooth", is_enabled=True, is_playable=True, @@ -153,7 +153,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="bluetooth"), is_seekable=False, ), - Source( + Source( # type: ignore[call-arg] id="spotify", is_enabled=True, is_playable=True, @@ -161,7 +161,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spotify"), is_seekable=True, ), - Source( + Source( # type: ignore[call-arg] id="lineIn", is_enabled=True, is_playable=True, @@ -169,7 +169,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="lineIn"), is_seekable=False, ), - Source( + Source( # type: ignore[call-arg] id="spdif", is_enabled=True, is_playable=True, @@ -177,7 +177,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spdif"), is_seekable=False, ), - Source( + Source( # type: ignore[call-arg] id="netRadio", is_enabled=True, is_playable=True, @@ -185,7 +185,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="netRadio"), is_seekable=False, ), - Source( + Source( # type: ignore[call-arg] id="deezer", is_enabled=True, is_playable=True, @@ -193,7 +193,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="deezer"), is_seekable=True, ), - Source( + Source( # type: ignore[call-arg] id="tidalConnect", is_enabled=True, is_playable=True, diff --git a/homeassistant/components/bang_olufsen/entity.py b/homeassistant/components/bang_olufsen/entity.py index 8ed68da1678..77fe7c6a1ff 100644 --- a/homeassistant/components/bang_olufsen/entity.py +++ b/homeassistant/components/bang_olufsen/entity.py @@ -42,7 +42,7 @@ class BangOlufsenBase: # Objects that get directly updated by notifications. self._playback_metadata: PlaybackContentMetadata = PlaybackContentMetadata() - self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) + self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) # type: ignore[call-arg] self._playback_source: Source = Source() self._playback_state: RenderingState = RenderingState() self._source_change: Source = Source() diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 282ecdd2ae5..d8b7a1bf940 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -210,9 +210,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Misc. variables. self._audio_sources: dict[str, str] = {} self._media_image: Art = Art() - self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( + self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( # type: ignore[call-arg] software_version="", - state=SoftwareUpdateState(seconds_remaining=0, value="idle"), + state=SoftwareUpdateState(seconds_remaining=0, value="idle"), # type: ignore[call-arg] ) self._sources: dict[str, str] = {} self._state: str = MediaPlayerState.IDLE @@ -896,9 +896,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): elif media_type == BangOlufsenMediaType.RADIO: await self._client.run_provided_scene( - scene_properties=SceneProperties( + scene_properties=SceneProperties( # type: ignore[call-arg] action_list=[ - Action( + Action( # type: ignore[call-arg] type="radio", radio_station_id=media_id, ) @@ -919,7 +919,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): deezer_id = kwargs[ATTR_MEDIA_EXTRA]["id"] await self._client.start_deezer_flow( - user_flow=UserFlow(user_id=deezer_id) + user_flow=UserFlow(user_id=deezer_id) # type: ignore[call-arg] ) # Play a playlist or album. @@ -929,7 +929,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): start_from = kwargs[ATTR_MEDIA_EXTRA]["start_from"] await self._client.add_to_queue( - play_queue_item=PlayQueueItem( + play_queue_item=PlayQueueItem( # type: ignore[call-arg] provider=PlayQueueItemType(value=media_type), start_now_from_position=start_from, type="playlist", @@ -940,7 +940,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Play a track. else: await self._client.add_to_queue( - play_queue_item=PlayQueueItem( + play_queue_item=PlayQueueItem( # type: ignore[call-arg] provider=PlayQueueItemType(value=media_type), start_now_from_position=0, type="track", diff --git a/homeassistant/components/google/__init__.py b/homeassistant/components/google/__init__.py index 2ad400aabab..1d204883579 100644 --- a/homeassistant/components/google/__init__.py +++ b/homeassistant/components/google/__init__.py @@ -277,10 +277,10 @@ async def async_setup_add_event_service( elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime( + start = DateOrDatetime( # type: ignore[call-arg] date_time=start_dt, timezone=str(hass.config.time_zone) ) - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] if start is None or end is None: raise ValueError( diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 5ac5dae616c..045e0e31b46 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -272,7 +272,7 @@ async def async_setup_entry( entity_description.search, ) else: - request_template = SyncEventsRequest( + request_template = SyncEventsRequest( # type: ignore[call-arg] calendar_id=calendar_id, start_time=dt_util.now() + SYNC_EVENT_MIN_TIME, ) @@ -437,11 +437,11 @@ class GoogleCalendarEntity( start: DateOrDatetime end: DateOrDatetime if isinstance(dtstart, datetime): - start = DateOrDatetime( + start = DateOrDatetime( # type: ignore[call-arg] date_time=dt_util.as_local(dtstart), timezone=str(dt_util.get_default_time_zone()), ) - end = DateOrDatetime( + end = DateOrDatetime( # type: ignore[call-arg] date_time=dt_util.as_local(dtend), timezone=str(dt_util.get_default_time_zone()), ) @@ -543,8 +543,8 @@ async def async_create_event(entity: GoogleCalendarEntity, call: ServiceCall) -> elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) + start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] if start is None or end is None: raise ValueError("Missing required fields to set start or end date/datetime") diff --git a/homeassistant/components/google/coordinator.py b/homeassistant/components/google/coordinator.py index 19198041c05..06f33782479 100644 --- a/homeassistant/components/google/coordinator.py +++ b/homeassistant/components/google/coordinator.py @@ -131,7 +131,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): self, start_date: datetime, end_date: datetime ) -> Iterable[Event]: """Get all events in a specific time frame.""" - request = ListEventsRequest( + request = ListEventsRequest( # type: ignore[call-arg] calendar_id=self.calendar_id, start_time=start_date, end_time=end_date, @@ -149,7 +149,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): async def _async_update_data(self) -> list[Event]: """Fetch data from API endpoint.""" - request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) + request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) # type: ignore[call-arg] try: result = await self.calendar_service.async_list_events(request) except ApiException as err: diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index a3b3af857fb..30f1deeb368 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -37,7 +37,7 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.dict(), + "data": coordinator.data.dict(), # type: ignore[deprecated] }, TO_REDACT, ) diff --git a/homeassistant/components/purpleair/manifest.json b/homeassistant/components/purpleair/manifest.json index cf74365d6d8..87cb375c347 100644 --- a/homeassistant/components/purpleair/manifest.json +++ b/homeassistant/components/purpleair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/purpleair", "iot_class": "cloud_polling", - "requirements": ["aiopurpleair==2022.12.1"] + "requirements": ["aiopurpleair==2023.12.0"] } diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 119fe52756c..9c045164d6d 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -6,7 +6,7 @@ import asyncio import functools from typing import Any, cast -from pydantic import ValidationError +from pydantic.v1 import ValidationError from uiprotect.api import ProtectApiClient from uiprotect.data import Camera, Chime from uiprotect.exceptions import ClientError diff --git a/homeassistant/components/xbox/manifest.json b/homeassistant/components/xbox/manifest.json index 30a6c3bc700..3fc2071e66b 100644 --- a/homeassistant/components/xbox/manifest.json +++ b/homeassistant/components/xbox/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/xbox", "iot_class": "cloud_polling", - "requirements": ["xbox-webapi==2.0.11"] + "requirements": ["xbox-webapi==2.1.0"] } diff --git a/homeassistant/components/zwave_js/triggers/event.py b/homeassistant/components/zwave_js/triggers/event.py index 9938d08408c..db52683c173 100644 --- a/homeassistant/components/zwave_js/triggers/event.py +++ b/homeassistant/components/zwave_js/triggers/event.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable import functools -from pydantic import ValidationError +from pydantic.v1 import ValidationError import voluptuous as vol from zwave_js_server.client import Client from zwave_js_server.model.controller import CONTROLLER_EVENT_MODEL_MAP diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index cd45f15fe7c..932c7439336 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -125,9 +125,8 @@ multidict>=6.0.2 # Version 2.0 added typing, prevent accidental fallbacks backoff>=2.0 -# Required to avoid breaking (#101042). -# v2 has breaking changes (#99218). -pydantic==1.10.19 +# ensure pydantic version does not float since it might have breaking changes +pydantic==2.10.3 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/requirements_all.txt b/requirements_all.txt index 3f619ac2e0e..85431a1ec9e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -328,7 +328,7 @@ aiopegelonline==0.1.0 aiopulse==0.4.6 # homeassistant.components.purpleair -aiopurpleair==2022.12.1 +aiopurpleair==2023.12.0 # homeassistant.components.hunterdouglas_powerview aiopvapi==3.1.1 @@ -1781,7 +1781,7 @@ pyatmo==8.1.0 pyatv==0.16.0 # homeassistant.components.aussie_broadband -pyaussiebb==0.0.15 +pyaussiebb==0.1.4 # homeassistant.components.balboa pybalboa==1.0.2 @@ -3020,7 +3020,7 @@ wolf-comm==0.0.15 wyoming==1.5.4 # homeassistant.components.xbox -xbox-webapi==2.0.11 +xbox-webapi==2.1.0 # homeassistant.components.xiaomi_ble xiaomi-ble==0.33.0 diff --git a/requirements_test.txt b/requirements_test.txt index 06a0fd035d3..50e5957bf96 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -14,7 +14,7 @@ license-expression==30.4.0 mock-open==1.4.0 mypy-dev==1.14.0a6 pre-commit==4.0.0 -pydantic==1.10.19 +pydantic==2.10.3 pylint==3.3.2 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bfddb35b041..5cf2a1f3e34 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -310,7 +310,7 @@ aiopegelonline==0.1.0 aiopulse==0.4.6 # homeassistant.components.purpleair -aiopurpleair==2022.12.1 +aiopurpleair==2023.12.0 # homeassistant.components.hunterdouglas_powerview aiopvapi==3.1.1 @@ -1455,7 +1455,7 @@ pyatmo==8.1.0 pyatv==0.16.0 # homeassistant.components.aussie_broadband -pyaussiebb==0.0.15 +pyaussiebb==0.1.4 # homeassistant.components.balboa pybalboa==1.0.2 @@ -2415,7 +2415,7 @@ wolf-comm==0.0.15 wyoming==1.5.4 # homeassistant.components.xbox -xbox-webapi==2.0.11 +xbox-webapi==2.1.0 # homeassistant.components.xiaomi_ble xiaomi-ble==0.33.0 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 97ffcac79a4..648798f79c8 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -158,9 +158,8 @@ multidict>=6.0.2 # Version 2.0 added typing, prevent accidental fallbacks backoff>=2.0 -# Required to avoid breaking (#101042). -# v2 has breaking changes (#99218). -pydantic==1.10.19 +# ensure pydantic version does not float since it might have breaking changes +pydantic==2.10.3 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/tests/components/lacrosse_view/test_config_flow.py b/tests/components/lacrosse_view/test_config_flow.py index 9ca7fb78bdd..f953d9a3841 100644 --- a/tests/components/lacrosse_view/test_config_flow.py +++ b/tests/components/lacrosse_view/test_config_flow.py @@ -30,7 +30,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: ), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( @@ -206,7 +206,7 @@ async def test_already_configured_device( ), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( @@ -262,7 +262,7 @@ async def test_reauth(hass: HomeAssistant) -> None: patch("lacrosse_view.LaCrosse.login", return_value=True), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/peco/test_sensor.py b/tests/components/peco/test_sensor.py index 9cbef9fa1e6..4c9a3fca104 100644 --- a/tests/components/peco/test_sensor.py +++ b/tests/components/peco/test_sensor.py @@ -39,7 +39,7 @@ async def test_sensor_available( "peco.PecoOutageApi.get_outage_totals", return_value=OutageResults( customers_out=123, - percent_customers_out=15.589, + percent_customers_out=15, outage_count=456, customers_served=789, ), @@ -74,7 +74,7 @@ async def test_sensor_available( "peco.PecoOutageApi.get_outage_count", return_value=OutageResults( customers_out=123, - percent_customers_out=15.589, + percent_customers_out=15, outage_count=456, customers_served=789, ), diff --git a/tests/components/youtube/snapshots/test_sensor.ambr b/tests/components/youtube/snapshots/test_sensor.ambr index dce546b4803..f4549e89c8c 100644 --- a/tests/components/youtube/snapshots/test_sensor.ambr +++ b/tests/components/youtube/snapshots/test_sensor.ambr @@ -4,7 +4,7 @@ 'attributes': ReadOnlyDict({ 'entity_picture': 'https://i.ytimg.com/vi/wysukDrMdqU/maxresdefault.jpg', 'friendly_name': 'Google for Developers Latest upload', - 'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=datetime.timezone.utc), + 'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=TzInfo(UTC)), 'video_id': 'wysukDrMdqU', }), 'context': , From 7fb5b17ac5fd098898a70f95d9422e340617718c Mon Sep 17 00:00:00 2001 From: Stefano Angeleri Date: Tue, 10 Dec 2024 18:29:28 +0100 Subject: [PATCH 0479/1198] Bump pydaikin to 2.13.8 (#132759) --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index f6e9cb78efb..f794d97a9ba 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.7"], + "requirements": ["pydaikin==2.13.8"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 85431a1ec9e..ff8950eb65c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1835,7 +1835,7 @@ pycsspeechtts==1.0.8 # pycups==2.0.4 # homeassistant.components.daikin -pydaikin==2.13.7 +pydaikin==2.13.8 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5cf2a1f3e34..536b67e393b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1488,7 +1488,7 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.7 +pydaikin==2.13.8 # homeassistant.components.deako pydeako==0.6.0 From 76b73fa9b1b2ec36c57f83847abf2be8581cf7c1 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 10 Dec 2024 19:03:43 +0100 Subject: [PATCH 0480/1198] Use floats instead of datetime in statistics (#132746) * Use floats instead of datetime in statistics * check if debug log --- homeassistant/components/statistics/sensor.py | 236 +++++++++--------- 1 file changed, 120 insertions(+), 116 deletions(-) diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index 8988e0cdd63..5252c23fd3d 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -9,6 +9,7 @@ from datetime import datetime, timedelta import logging import math import statistics +import time from typing import Any, cast import voluptuous as vol @@ -100,9 +101,7 @@ STAT_VARIANCE = "variance" def _callable_characteristic_fn( characteristic: str, binary: bool -) -> Callable[ - [deque[bool | float], deque[datetime], int], float | int | datetime | None -]: +) -> Callable[[deque[bool | float], deque[float], int], float | int | datetime | None]: """Return the function callable of one characteristic function.""" Callable[[deque[bool | float], deque[datetime], int], datetime | int | float | None] if binary: @@ -114,45 +113,41 @@ def _callable_characteristic_fn( def _stat_average_linear( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return states[0] if len(states) >= 2: area: float = 0 for i in range(1, len(states)): - area += ( - 0.5 - * (states[i] + states[i - 1]) - * (ages[i] - ages[i - 1]).total_seconds() - ) - age_range_seconds = (ages[-1] - ages[0]).total_seconds() + area += 0.5 * (states[i] + states[i - 1]) * (ages[i] - ages[i - 1]) + age_range_seconds = ages[-1] - ages[0] return area / age_range_seconds return None def _stat_average_step( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return states[0] if len(states) >= 2: area: float = 0 for i in range(1, len(states)): - area += states[i - 1] * (ages[i] - ages[i - 1]).total_seconds() - age_range_seconds = (ages[-1] - ages[0]).total_seconds() + area += states[i - 1] * (ages[i] - ages[i - 1]) + age_range_seconds = ages[-1] - ages[0] return area / age_range_seconds return None def _stat_average_timeless( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: return _stat_mean(states, ages, percentile) def _stat_change( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return states[-1] - states[0] @@ -160,7 +155,7 @@ def _stat_change( def _stat_change_sample( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 1: return (states[-1] - states[0]) / (len(states) - 1) @@ -168,55 +163,55 @@ def _stat_change_sample( def _stat_change_second( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 1: - age_range_seconds = (ages[-1] - ages[0]).total_seconds() + age_range_seconds = ages[-1] - ages[0] if age_range_seconds > 0: return (states[-1] - states[0]) / age_range_seconds return None def _stat_count( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> int | None: return len(states) def _stat_datetime_newest( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: if len(states) > 0: - return ages[-1] + return dt_util.utc_from_timestamp(ages[-1]) return None def _stat_datetime_oldest( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: if len(states) > 0: - return ages[0] + return dt_util.utc_from_timestamp(ages[0]) return None def _stat_datetime_value_max( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: if len(states) > 0: - return ages[states.index(max(states))] + return dt_util.utc_from_timestamp(ages[states.index(max(states))]) return None def _stat_datetime_value_min( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: if len(states) > 0: - return ages[states.index(min(states))] + return dt_util.utc_from_timestamp(ages[states.index(min(states))]) return None def _stat_distance_95_percent_of_values( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) >= 1: return ( @@ -226,7 +221,7 @@ def _stat_distance_95_percent_of_values( def _stat_distance_99_percent_of_values( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) >= 1: return ( @@ -236,7 +231,7 @@ def _stat_distance_99_percent_of_values( def _stat_distance_absolute( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return max(states) - min(states) @@ -244,7 +239,7 @@ def _stat_distance_absolute( def _stat_mean( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return statistics.mean(states) @@ -252,7 +247,7 @@ def _stat_mean( def _stat_mean_circular( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: sin_sum = sum(math.sin(math.radians(x)) for x in states) @@ -262,7 +257,7 @@ def _stat_mean_circular( def _stat_median( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return statistics.median(states) @@ -270,7 +265,7 @@ def _stat_median( def _stat_noisiness( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 0.0 @@ -282,7 +277,7 @@ def _stat_noisiness( def _stat_percentile( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return states[0] @@ -293,7 +288,7 @@ def _stat_percentile( def _stat_standard_deviation( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 0.0 @@ -303,7 +298,7 @@ def _stat_standard_deviation( def _stat_sum( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return sum(states) @@ -311,7 +306,7 @@ def _stat_sum( def _stat_sum_differences( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 0.0 @@ -323,7 +318,7 @@ def _stat_sum_differences( def _stat_sum_differences_nonnegative( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 0.0 @@ -336,13 +331,13 @@ def _stat_sum_differences_nonnegative( def _stat_total( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: return _stat_sum(states, ages, percentile) def _stat_value_max( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return max(states) @@ -350,7 +345,7 @@ def _stat_value_max( def _stat_value_min( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return min(states) @@ -358,7 +353,7 @@ def _stat_value_min( def _stat_variance( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 0.0 @@ -371,7 +366,7 @@ def _stat_variance( def _stat_binary_average_step( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) == 1: return 100.0 * int(states[0] is True) @@ -379,50 +374,50 @@ def _stat_binary_average_step( on_seconds: float = 0 for i in range(1, len(states)): if states[i - 1] is True: - on_seconds += (ages[i] - ages[i - 1]).total_seconds() - age_range_seconds = (ages[-1] - ages[0]).total_seconds() + on_seconds += ages[i] - ages[i - 1] + age_range_seconds = ages[-1] - ages[0] return 100 / age_range_seconds * on_seconds return None def _stat_binary_average_timeless( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: return _stat_binary_mean(states, ages, percentile) def _stat_binary_count( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> int | None: return len(states) def _stat_binary_count_on( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> int | None: return states.count(True) def _stat_binary_count_off( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> int | None: return states.count(False) def _stat_binary_datetime_newest( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: return _stat_datetime_newest(states, ages, percentile) def _stat_binary_datetime_oldest( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> datetime | None: return _stat_datetime_oldest(states, ages, percentile) def _stat_binary_mean( - states: deque[bool | float], ages: deque[datetime], percentile: int + states: deque[bool | float], ages: deque[float], percentile: int ) -> float | None: if len(states) > 0: return 100.0 / len(states) * states.count(True) @@ -630,12 +625,8 @@ async def async_setup_entry( sampling_size = int(sampling_size) max_age = None - if max_age_input := entry.options.get(CONF_MAX_AGE): - max_age = timedelta( - hours=max_age_input["hours"], - minutes=max_age_input["minutes"], - seconds=max_age_input["seconds"], - ) + if max_age := entry.options.get(CONF_MAX_AGE): + max_age = timedelta(**max_age) async_add_entities( [ @@ -688,20 +679,22 @@ class StatisticsSensor(SensorEntity): ) self._state_characteristic: str = state_characteristic self._samples_max_buffer_size: int | None = samples_max_buffer_size - self._samples_max_age: timedelta | None = samples_max_age + self._samples_max_age: float | None = ( + samples_max_age.total_seconds() if samples_max_age else None + ) self.samples_keep_last: bool = samples_keep_last self._precision: int = precision self._percentile: int = percentile self._attr_available: bool = False - self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size) - self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) + self.states: deque[float | bool] = deque(maxlen=samples_max_buffer_size) + self.ages: deque[float] = deque(maxlen=samples_max_buffer_size) self._attr_extra_state_attributes = {} self._state_characteristic_fn: Callable[ - [deque[bool | float], deque[datetime], int], + [deque[bool | float], deque[float], int], float | int | datetime | None, - ] = _callable_characteristic_fn(self._state_characteristic, self.is_binary) + ] = _callable_characteristic_fn(state_characteristic, self.is_binary) self._update_listener: CALLBACK_TYPE | None = None self._preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None @@ -807,7 +800,7 @@ class StatisticsSensor(SensorEntity): self.states.append(new_state.state == "on") else: self.states.append(float(new_state.state)) - self.ages.append(new_state.last_reported) + self.ages.append(new_state.last_reported_timestamp) self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = True except ValueError: self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False @@ -840,27 +833,24 @@ class StatisticsSensor(SensorEntity): base_unit: str | None = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) unit: str | None = None - if self.is_binary and self._state_characteristic in STATS_BINARY_PERCENTAGE: + stat_type = self._state_characteristic + if self.is_binary and stat_type in STATS_BINARY_PERCENTAGE: unit = PERCENTAGE elif not base_unit: unit = None - elif self._state_characteristic in STATS_NUMERIC_RETAIN_UNIT: + elif stat_type in STATS_NUMERIC_RETAIN_UNIT: unit = base_unit - elif ( - self._state_characteristic in STATS_NOT_A_NUMBER - or self._state_characteristic - in ( - STAT_COUNT, - STAT_COUNT_BINARY_ON, - STAT_COUNT_BINARY_OFF, - ) + elif stat_type in STATS_NOT_A_NUMBER or stat_type in ( + STAT_COUNT, + STAT_COUNT_BINARY_ON, + STAT_COUNT_BINARY_OFF, ): unit = None - elif self._state_characteristic == STAT_VARIANCE: + elif stat_type == STAT_VARIANCE: unit = base_unit + "²" - elif self._state_characteristic == STAT_CHANGE_SAMPLE: + elif stat_type == STAT_CHANGE_SAMPLE: unit = base_unit + "/sample" - elif self._state_characteristic == STAT_CHANGE_SECOND: + elif stat_type == STAT_CHANGE_SECOND: unit = base_unit + "/s" return unit @@ -876,9 +866,10 @@ class StatisticsSensor(SensorEntity): """ device_class: SensorDeviceClass | None = None - if self._state_characteristic in STATS_DATETIME: + stat_type = self._state_characteristic + if stat_type in STATS_DATETIME: return SensorDeviceClass.TIMESTAMP - if self._state_characteristic in STATS_NUMERIC_RETAIN_UNIT: + if stat_type in STATS_NUMERIC_RETAIN_UNIT: device_class = new_state.attributes.get(ATTR_DEVICE_CLASS) if device_class is None: return None @@ -917,55 +908,60 @@ class StatisticsSensor(SensorEntity): return None return SensorStateClass.MEASUREMENT - def _purge_old_states(self, max_age: timedelta) -> None: + def _purge_old_states(self, max_age: float) -> None: """Remove states which are older than a given age.""" - now = dt_util.utcnow() + now_timestamp = time.time() + debug = _LOGGER.isEnabledFor(logging.DEBUG) - _LOGGER.debug( - "%s: purging records older then %s(%s)(keep_last_sample: %s)", - self.entity_id, - dt_util.as_local(now - max_age), - self._samples_max_age, - self.samples_keep_last, - ) + if debug: + _LOGGER.debug( + "%s: purging records older then %s(%s)(keep_last_sample: %s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(now_timestamp - max_age)), + self._samples_max_age, + self.samples_keep_last, + ) - while self.ages and (now - self.ages[0]) > max_age: + while self.ages and (now_timestamp - self.ages[0]) > max_age: if self.samples_keep_last and len(self.ages) == 1: # Under normal circumstance this will not be executed, as a purge will not # be scheduled for the last value if samples_keep_last is enabled. # If this happens to be called outside normal scheduling logic or a # source sensor update, this ensures the last value is preserved. - _LOGGER.debug( - "%s: preserving expired record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (now - self.ages[0]), - ) + if debug: + _LOGGER.debug( + "%s: preserving expired record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + dt_util.utc_from_timestamp(now_timestamp - self.ages[0]), + ) break - _LOGGER.debug( - "%s: purging record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (now - self.ages[0]), - ) + if debug: + _LOGGER.debug( + "%s: purging record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + dt_util.utc_from_timestamp(now_timestamp - self.ages[0]), + ) self.ages.popleft() self.states.popleft() @callback - def _async_next_to_purge_timestamp(self) -> datetime | None: + def _async_next_to_purge_timestamp(self) -> float | None: """Find the timestamp when the next purge would occur.""" if self.ages and self._samples_max_age: if self.samples_keep_last and len(self.ages) == 1: # Preserve the most recent entry if it is the only value. # Do not schedule another purge. When a new source # value is inserted it will restart purge cycle. - _LOGGER.debug( - "%s: skipping purge cycle for last record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (dt_util.utcnow() - self.ages[0]), - ) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "%s: skipping purge cycle for last record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + (dt_util.utcnow() - dt_util.utc_from_timestamp(self.ages[0])), + ) return None # Take the oldest entry from the ages list and add the configured max_age. # If executed after purging old states, the result is the next timestamp @@ -990,10 +986,17 @@ class StatisticsSensor(SensorEntity): # By basing updates off the timestamps of sampled data we avoid updating # when none of the observed entities change. if timestamp := self._async_next_to_purge_timestamp(): - _LOGGER.debug("%s: scheduling update at %s", self.entity_id, timestamp) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "%s: scheduling update at %s", + self.entity_id, + dt_util.utc_from_timestamp(timestamp), + ) self._async_cancel_update_listener() self._update_listener = async_track_point_in_utc_time( - self.hass, self._async_scheduled_update, timestamp + self.hass, + self._async_scheduled_update, + dt_util.utc_from_timestamp(timestamp), ) @callback @@ -1017,9 +1020,11 @@ class StatisticsSensor(SensorEntity): """Fetch the states from the database.""" _LOGGER.debug("%s: initializing values from the database", self.entity_id) lower_entity_id = self._source_entity_id.lower() - if self._samples_max_age is not None: + if (max_age := self._samples_max_age) is not None: start_date = ( - dt_util.utcnow() - self._samples_max_age - timedelta(microseconds=1) + dt_util.utcnow() + - timedelta(seconds=max_age) + - timedelta(microseconds=1) ) _LOGGER.debug( "%s: retrieve records not older then %s", @@ -1071,11 +1076,10 @@ class StatisticsSensor(SensorEntity): len(self.states) / self._samples_max_buffer_size, 2 ) - if self._samples_max_age is not None: + if (max_age := self._samples_max_age) is not None: if len(self.states) >= 1: self._attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = round( - (self.ages[-1] - self.ages[0]).total_seconds() - / self._samples_max_age.total_seconds(), + (self.ages[-1] - self.ages[0]) / max_age, 2, ) else: From 5dc27573243379042299c7174010ff1ebca4f578 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 10 Dec 2024 19:35:21 +0100 Subject: [PATCH 0481/1198] Add quality scale to Nord Pool (#132415) * Add quality scale to Nord Pool * Update * a * fix --- .../components/nordpool/quality_scale.yaml | 95 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 95 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nordpool/quality_scale.yaml diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml new file mode 100644 index 00000000000..2cb0b655b17 --- /dev/null +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -0,0 +1,95 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities doesn't subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + No actions. + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: todo + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + This integration has no options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: | + No discovery, cloud service + stale-devices: + status: exempt + comment: | + This integration devices (services) will be removed with config entry if needed. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: + status: exempt + comment: | + This integration has fixed devices. + discovery-update-info: + status: exempt + comment: | + No discovery + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: todo + docs-supported-devices: + status: exempt + comment: | + Only service, no device + docs-supported-functions: done + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 119a66408b6..72f01f3d1d1 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -732,7 +732,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "no_ip", "noaa_tides", "nobo_hub", - "nordpool", "norway_air", "notify_events", "notion", From 1b300a438931cd080c2d8bbf40d0bef74fd5e933 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Tue, 10 Dec 2024 20:52:39 +0100 Subject: [PATCH 0482/1198] Set config-flow rule in IQS to todo in Bring integration (#132855) Set config-flow rule in IQS to todo --- homeassistant/components/bring/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml index 922306930f2..1fdb3f13f1b 100644 --- a/homeassistant/components/bring/quality_scale.yaml +++ b/homeassistant/components/bring/quality_scale.yaml @@ -7,7 +7,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: done + config-flow: todo dependency-transparency: done docs-actions: done docs-high-level-description: todo From fb3ffaf18ded9c80a7e3e32d19c030788b745dcd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 20:59:12 +0100 Subject: [PATCH 0483/1198] Migrate demo lights to use Kelvin (#132837) * Migrate demo lights to use Kelvin * Adjust google_assistant tests --- homeassistant/components/demo/light.py | 12 ++++++------ tests/components/google_assistant/test_smart_home.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/demo/light.py b/homeassistant/components/demo/light.py index c859fef3b76..8bb4e403c3d 100644 --- a/homeassistant/components/demo/light.py +++ b/homeassistant/components/demo/light.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGBW_COLOR, @@ -28,7 +28,7 @@ LIGHT_COLORS = [(56, 86), (345, 75)] LIGHT_EFFECT_LIST = ["rainbow", "none"] -LIGHT_TEMPS = [240, 380] +LIGHT_TEMPS = [4166, 2631] SUPPORT_DEMO = {ColorMode.HS, ColorMode.COLOR_TEMP} SUPPORT_DEMO_HS_WHITE = {ColorMode.HS, ColorMode.WHITE} @@ -185,8 +185,8 @@ class DemoLight(LightEntity): return self._rgbww_color @property - def color_temp(self) -> int: - """Return the CT color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" return self._ct @property @@ -216,9 +216,9 @@ class DemoLight(LightEntity): if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: self._color_mode = ColorMode.COLOR_TEMP - self._ct = kwargs[ATTR_COLOR_TEMP] + self._ct = kwargs[ATTR_COLOR_TEMP_KELVIN] if ATTR_EFFECT in kwargs: self._effect = kwargs[ATTR_EFFECT] diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index f1b7108c348..c5e17155067 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -402,7 +402,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light.async_write_ha_state() light2 = DemoLight( - None, "Another Light", state=True, hs_color=(180, 75), ct=400, brightness=78 + None, "Another Light", state=True, hs_color=(180, 75), ct=2500, brightness=78 ) light2.hass = hass light2.entity_id = "light.another_light" @@ -410,7 +410,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light2._attr_name = "Another Light" light2.async_write_ha_state() - light3 = DemoLight(None, "Color temp Light", state=True, ct=400, brightness=200) + light3 = DemoLight(None, "Color temp Light", state=True, ct=2500, brightness=200) light3.hass = hass light3.entity_id = "light.color_temp_light" light3._attr_device_info = None From b46392041f36cc932d0a12eb43af20ecfb7f25db Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Tue, 10 Dec 2024 21:44:00 +0100 Subject: [PATCH 0484/1198] Add model_id to flexit (bacnet) entity (#132875) * Add model_id to flexit (bacnet) entity * Add model to mock --- homeassistant/components/flexit_bacnet/entity.py | 1 + tests/components/flexit_bacnet/conftest.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/flexit_bacnet/entity.py b/homeassistant/components/flexit_bacnet/entity.py index bd92550db19..38efa838c93 100644 --- a/homeassistant/components/flexit_bacnet/entity.py +++ b/homeassistant/components/flexit_bacnet/entity.py @@ -26,6 +26,7 @@ class FlexitEntity(CoordinatorEntity[FlexitCoordinator]): name=coordinator.device.device_name, manufacturer="Flexit", model="Nordic", + model_id=coordinator.device.model, serial_number=coordinator.device.serial_number, ) diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index cc7c9fa0570..a6205bac506 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -44,6 +44,7 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: ): flexit_bacnet.serial_number = "0000-0001" flexit_bacnet.device_name = "Device Name" + flexit_bacnet.model = "S4 RER" flexit_bacnet.room_temperature = 19.0 flexit_bacnet.air_temp_setpoint_away = 18.0 flexit_bacnet.air_temp_setpoint_home = 22.0 From 77debcbe8b2c46c85d147ce21274159a2a44803c Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 10 Dec 2024 22:28:30 +0100 Subject: [PATCH 0485/1198] Update numpy to 2.2.0 (#132874) --- homeassistant/components/compensation/manifest.json | 2 +- homeassistant/components/iqvia/manifest.json | 2 +- homeassistant/components/stream/manifest.json | 2 +- homeassistant/components/tensorflow/manifest.json | 2 +- homeassistant/components/trend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/gen_requirements_all.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index 5b3cc5ac2ac..ac82938b97b 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", "quality_scale": "legacy", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 11c99a7428f..0236b72c89d 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==2.1.3", "pyiqvia==2022.04.0"] + "requirements": ["numpy==2.2.0", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index fdf81d99e65..b9368565e2f 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.1.3"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.2.0"] } diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 1ddfa188c0a..16de386b15d 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -10,7 +10,7 @@ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==2.1.3", + "numpy==2.2.0", "Pillow==11.0.0" ] } diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index d7981105fd2..85012939fc1 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 932c7439336..726dad56ccb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -115,7 +115,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 +numpy==2.2.0 pandas~=2.2.3 # Constrain multidict to avoid typing issues diff --git a/requirements_all.txt b/requirements_all.txt index ff8950eb65c..872a2123a9c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1494,7 +1494,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==2.2.0 # homeassistant.components.nyt_games nyt_games==0.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 536b67e393b..5b428194aa2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1245,7 +1245,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==2.2.0 # homeassistant.components.nyt_games nyt_games==0.4.4 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 648798f79c8..fa46710d100 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -148,7 +148,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 +numpy==2.2.0 pandas~=2.2.3 # Constrain multidict to avoid typing issues From 355e80aa56cf087f7b5b545e4209b2cb718eea87 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 10 Dec 2024 19:01:50 -0800 Subject: [PATCH 0486/1198] Test the google tasks api connection in setup (#132657) Improve google tasks setup --- .../components/google_tasks/__init__.py | 25 +++++--- homeassistant/components/google_tasks/todo.py | 14 ++--- .../components/google_tasks/types.py | 19 ++++++ tests/components/google_tasks/conftest.py | 40 +++++++++++- tests/components/google_tasks/test_init.py | 28 +++++++++ tests/components/google_tasks/test_todo.py | 62 ++----------------- 6 files changed, 115 insertions(+), 73 deletions(-) create mode 100644 homeassistant/components/google_tasks/types.py diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index 29a1b20f2bc..2ff22068ca9 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from aiohttp import ClientError, ClientResponseError -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -12,11 +11,17 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .exceptions import GoogleTasksApiError +from .types import GoogleTasksConfigEntry, GoogleTasksData + +__all__ = [ + "DOMAIN", +] PLATFORMS: list[Platform] = [Platform.TODO] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: GoogleTasksConfigEntry) -> bool: """Set up Google Tasks from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( @@ -36,16 +41,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ClientError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = auth + try: + task_lists = await auth.list_task_lists() + except GoogleTasksApiError as err: + raise ConfigEntryNotReady from err + + entry.runtime_data = GoogleTasksData(auth, task_lists) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GoogleTasksConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 86cb5e09300..d749adbfb2b 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -11,15 +11,13 @@ from homeassistant.components.todo import ( TodoListEntity, TodoListEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .api import AsyncConfigEntryAuth -from .const import DOMAIN from .coordinator import TaskUpdateCoordinator +from .types import GoogleTasksConfigEntry SCAN_INTERVAL = timedelta(minutes=15) @@ -69,20 +67,20 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GoogleTasksConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Google Tasks todo platform.""" - api: AsyncConfigEntryAuth = hass.data[DOMAIN][entry.entry_id] - task_lists = await api.list_task_lists() async_add_entities( ( GoogleTaskTodoListEntity( - TaskUpdateCoordinator(hass, api, task_list["id"]), + TaskUpdateCoordinator(hass, entry.runtime_data.api, task_list["id"]), task_list["title"], entry.entry_id, task_list["id"], ) - for task_list in task_lists + for task_list in entry.runtime_data.task_lists ), True, ) diff --git a/homeassistant/components/google_tasks/types.py b/homeassistant/components/google_tasks/types.py new file mode 100644 index 00000000000..eaaec23ddf5 --- /dev/null +++ b/homeassistant/components/google_tasks/types.py @@ -0,0 +1,19 @@ +"""Types for the Google Tasks integration.""" + +from dataclasses import dataclass +from typing import Any + +from homeassistant.config_entries import ConfigEntry + +from .api import AsyncConfigEntryAuth + + +@dataclass +class GoogleTasksData: + """Class to hold Google Tasks data.""" + + api: AsyncConfigEntryAuth + task_lists: list[dict[str, Any]] + + +type GoogleTasksConfigEntry = ConfigEntry[GoogleTasksData] diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index 7db78af6232..e519cac9bdc 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -1,10 +1,12 @@ """Test fixtures for Google Tasks.""" from collections.abc import Awaitable, Callable +import json import time from typing import Any -from unittest.mock import patch +from unittest.mock import Mock, patch +from httplib2 import Response import pytest from homeassistant.components.application_credentials import ( @@ -24,6 +26,14 @@ FAKE_ACCESS_TOKEN = "some-access-token" FAKE_REFRESH_TOKEN = "some-refresh-token" FAKE_AUTH_IMPL = "conftest-imported-cred" +TASK_LIST = { + "id": "task-list-id-1", + "title": "My tasks", +} +LIST_TASK_LIST_RESPONSE = { + "items": [TASK_LIST], +} + @pytest.fixture def platforms() -> list[Platform]: @@ -89,3 +99,31 @@ async def mock_integration_setup( return result return run + + +@pytest.fixture(name="api_responses") +def mock_api_responses() -> list[dict | list]: + """Fixture forcreate_response_object API responses to return during test.""" + return [] + + +def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: + """Create an http response.""" + return ( + Response({"Content-Type": "application/json"}), + json.dumps(api_response).encode(), + ) + + +@pytest.fixture(name="response_handler") +def mock_response_handler(api_responses: list[dict | list]) -> list: + """Create a mock http2lib response handler.""" + return [create_response_object(api_response) for api_response in api_responses] + + +@pytest.fixture +def mock_http_response(response_handler: list | Callable) -> Mock: + """Fixture to fake out http2lib responses.""" + + with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: + yield mock_response diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 1fe0e4a0c36..4bb2bd1eed7 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -2,8 +2,11 @@ from collections.abc import Awaitable, Callable import http +from http import HTTPStatus import time +from unittest.mock import Mock +from httplib2 import Response import pytest from homeassistant.components.google_tasks import DOMAIN @@ -11,15 +14,19 @@ from homeassistant.components.google_tasks.const import OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from .conftest import LIST_TASK_LIST_RESPONSE + from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_setup( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test successful setup and unload.""" assert config_entry.state is ConfigEntryState.NOT_LOADED @@ -35,12 +42,14 @@ async def test_setup( @pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_expired_token_refresh_success( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test expired token is refreshed.""" @@ -98,3 +107,22 @@ async def test_expired_token_refresh_failure( await integration_setup() assert config_entry.state is expected_state + + +@pytest.mark.parametrize( + "response_handler", + [ + ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), + ], +) +async def test_setup_error( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Mock, + config_entry: MockConfigEntry, +) -> None: + """Test an error returned by the server when setting up the platform.""" + + assert not await integration_setup() + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index c5ecc0ca2cf..c713b9fd44f 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable from http import HTTPStatus import json from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import Mock from httplib2 import Response import pytest @@ -23,16 +23,11 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from .conftest import LIST_TASK_LIST_RESPONSE, create_response_object + from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" -ITEM = { - "id": "task-list-id-1", - "title": "My tasks", -} -LIST_TASK_LIST_RESPONSE = { - "items": [ITEM], -} EMPTY_RESPONSE = {} LIST_TASKS_RESPONSE = { "items": [], @@ -149,20 +144,6 @@ async def ws_get_items( return get -@pytest.fixture(name="api_responses") -def mock_api_responses() -> list[dict | list]: - """Fixture for API responses to return during test.""" - return [] - - -def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: - """Create an http response.""" - return ( - Response({"Content-Type": "application/json"}), - json.dumps(api_response).encode(), - ) - - def create_batch_response_object( content_ids: list[str], api_responses: list[dict | list | Response | None] ) -> tuple[Response, bytes]: @@ -225,18 +206,10 @@ def create_batch_response_handler( return _handler -@pytest.fixture(name="response_handler") -def mock_response_handler(api_responses: list[dict | list]) -> list: - """Create a mock http2lib response handler.""" - return [create_response_object(api_response) for api_response in api_responses] - - @pytest.fixture(autouse=True) -def mock_http_response(response_handler: list | Callable) -> Mock: - """Fixture to fake out http2lib responses.""" - - with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: - yield mock_response +def setup_http_response(mock_http_response: Mock) -> None: + """Fixture to load the http response mock.""" + return @pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) @@ -303,29 +276,6 @@ async def test_get_items( assert state.state == "1" -@pytest.mark.parametrize( - "response_handler", - [ - ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), - ], -) -async def test_list_items_server_error( - hass: HomeAssistant, - setup_credentials: None, - integration_setup: Callable[[], Awaitable[bool]], - hass_ws_client: WebSocketGenerator, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], -) -> None: - """Test an error returned by the server when setting up the platform.""" - - assert await integration_setup() - - await hass_ws_client(hass) - - state = hass.states.get("todo.my_tasks") - assert state is None - - @pytest.mark.parametrize( "api_responses", [ From 73feeacc396021d05b6611dad93bb442dfa55cc0 Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Tue, 10 Dec 2024 23:55:58 -0600 Subject: [PATCH 0487/1198] Use runtime_data for roku (#132781) * use runtime_data for roku * unload cleanup * tweaks * tweaks * fix tests * fix tests * Update config_flow.py * Update config_flow.py --- homeassistant/components/roku/__init__.py | 16 ++++++++-------- homeassistant/components/roku/binary_sensor.py | 9 +++------ homeassistant/components/roku/config_flow.py | 10 +++------- homeassistant/components/roku/diagnostics.py | 14 +++++--------- homeassistant/components/roku/media_player.py | 9 +++------ homeassistant/components/roku/remote.py | 10 +++------- homeassistant/components/roku/select.py | 13 +++++-------- homeassistant/components/roku/sensor.py | 10 +++------- tests/components/roku/test_init.py | 9 +-------- 9 files changed, 34 insertions(+), 66 deletions(-) diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index b318a91e4c7..e6b92d91335 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -6,7 +6,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID from .coordinator import RokuDataUpdateCoordinator PLATFORMS = [ @@ -17,8 +17,10 @@ PLATFORMS = [ Platform.SENSOR, ] +type RokuConfigEntry = ConfigEntry[RokuDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Set up Roku from a config entry.""" if (device_id := entry.unique_id) is None: device_id = entry.entry_id @@ -33,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -42,13 +44,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> None: """Reload the config entry when it changed.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/roku/binary_sensor.py b/homeassistant/components/roku/binary_sensor.py index 0f5f29f63f6..cd51c30c250 100644 --- a/homeassistant/components/roku/binary_sensor.py +++ b/homeassistant/components/roku/binary_sensor.py @@ -11,12 +11,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import RokuConfigEntry from .entity import RokuEntity @@ -56,15 +55,13 @@ BINARY_SENSORS: tuple[RokuBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Roku binary sensors based on a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuBinarySensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in BINARY_SENSORS diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index 18e3b3ed68a..b92ff819701 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,16 +10,12 @@ from rokuecp import Roku, RokuError import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession +from . import RokuConfigEntry from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -164,7 +160,7 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RokuConfigEntry, ) -> RokuOptionsFlowHandler: """Create the options flow.""" return RokuOptionsFlowHandler() diff --git a/homeassistant/components/roku/diagnostics.py b/homeassistant/components/roku/diagnostics.py index 6c6809ee33a..e98837ca442 100644 --- a/homeassistant/components/roku/diagnostics.py +++ b/homeassistant/components/roku/diagnostics.py @@ -4,25 +4,21 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, entry: RokuConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - return { "entry": { "data": { - **config_entry.data, + **entry.data, }, - "unique_id": config_entry.unique_id, + "unique_id": entry.unique_id, }, - "data": coordinator.data.as_dict(), + "data": entry.runtime_data.data.as_dict(), } diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index 35f01553cdd..d43d62c9438 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -23,13 +23,13 @@ from homeassistant.components.media_player import ( async_process_play_media_url, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType +from . import RokuConfigEntry from .browse_media import async_browse_media from .const import ( ATTR_ARTIST_NAME, @@ -38,7 +38,6 @@ from .const import ( ATTR_KEYWORD, ATTR_MEDIA_TYPE, ATTR_THUMBNAIL, - DOMAIN, SERVICE_SEARCH, ) from .coordinator import RokuDataUpdateCoordinator @@ -83,15 +82,13 @@ SEARCH_SCHEMA: VolDictType = {vol.Required(ATTR_KEYWORD): str} async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the Roku config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuMediaPlayer( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, diff --git a/homeassistant/components/roku/remote.py b/homeassistant/components/roku/remote.py index fa351e021e8..9a31f9fd7a0 100644 --- a/homeassistant/components/roku/remote.py +++ b/homeassistant/components/roku/remote.py @@ -6,28 +6,24 @@ from collections.abc import Iterable from typing import Any from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import roku_exception_handler async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Load Roku remote based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuRemote( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, diff --git a/homeassistant/components/roku/select.py b/homeassistant/components/roku/select.py index 5f3b9d4049b..6977f8c0d24 100644 --- a/homeassistant/components/roku/select.py +++ b/homeassistant/components/roku/select.py @@ -9,12 +9,10 @@ from rokuecp import Roku from rokuecp.models import Device as RokuDevice from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import format_channel_name, roku_exception_handler @@ -108,16 +106,15 @@ CHANNEL_ENTITY = RokuSelectEntityDescription( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku select based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - device: RokuDevice = coordinator.data + device: RokuDevice = entry.runtime_data.data entities: list[RokuSelectEntity] = [ RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in ENTITIES @@ -126,7 +123,7 @@ async def async_setup_entry( if len(device.channels) > 0: entities.append( RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=CHANNEL_ENTITY, ) ) diff --git a/homeassistant/components/roku/sensor.py b/homeassistant/components/roku/sensor.py index ed134cc4c2a..56a84ead402 100644 --- a/homeassistant/components/roku/sensor.py +++ b/homeassistant/components/roku/sensor.py @@ -8,13 +8,11 @@ from dataclasses import dataclass from rokuecp.models import Device as RokuDevice from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity @@ -43,15 +41,13 @@ SENSORS: tuple[RokuSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku sensor based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuSensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in SENSORS diff --git a/tests/components/roku/test_init.py b/tests/components/roku/test_init.py index a4fc8477ac3..9c414bcf62a 100644 --- a/tests/components/roku/test_init.py +++ b/tests/components/roku/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from rokuecp import RokuConnectionError -from homeassistant.components.roku.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -38,12 +37,7 @@ async def test_config_entry_no_unique_id( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED - assert ( - hass.data[DOMAIN][mock_config_entry.entry_id].device_id - == mock_config_entry.entry_id - ) async def test_load_unload_config_entry( @@ -56,10 +50,9 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From 9f40074d6635d8917b2c87d4037e9ec4b686cc73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Wed, 11 Dec 2024 07:36:09 +0100 Subject: [PATCH 0488/1198] Fix typo in water heater integration (#132891) Fix typo in water heater componant --- homeassistant/components/water_heater/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 43a9364e59d..67ce3a97fd1 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -56,7 +56,7 @@ STATE_GAS = "gas" class WaterHeaterEntityFeature(IntFlag): - """Supported features of the fan entity.""" + """Supported features of the water heater entity.""" TARGET_TEMPERATURE = 1 OPERATION_MODE = 2 From f0f0b4b8fa2f1bb04385c9a076adb61d5cae32e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:24:25 +0100 Subject: [PATCH 0489/1198] Bump github/codeql-action from 3.27.6 to 3.27.7 (#132900) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5b8ac94e570..8f6e393f853 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.6 + uses: github/codeql-action/init@v3.27.7 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.6 + uses: github/codeql-action/analyze@v3.27.7 with: category: "/language:python" From 4ff41ed2f800e1f04922278f04e498791c972eda Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:42:48 +0100 Subject: [PATCH 0490/1198] Refactor light significant change to use kelvin attribute (#132853) --- homeassistant/components/light/significant_change.py | 10 +++++----- tests/components/light/test_significant_change.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/light/significant_change.py b/homeassistant/components/light/significant_change.py index 1877c925622..773b7a6b898 100644 --- a/homeassistant/components/light/significant_change.py +++ b/homeassistant/components/light/significant_change.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.significant_change import check_absolute_change -from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR +from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR @callback @@ -44,10 +44,10 @@ def async_check_significant_change( return True if check_absolute_change( - # Default range 153..500 - old_attrs.get(ATTR_COLOR_TEMP), - new_attrs.get(ATTR_COLOR_TEMP), - 5, + # Default range 2000..6500 + old_attrs.get(ATTR_COLOR_TEMP_KELVIN), + new_attrs.get(ATTR_COLOR_TEMP_KELVIN), + 50, ): return True diff --git a/tests/components/light/test_significant_change.py b/tests/components/light/test_significant_change.py index 87a60b58325..cf03f37228e 100644 --- a/tests/components/light/test_significant_change.py +++ b/tests/components/light/test_significant_change.py @@ -2,7 +2,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ) @@ -26,10 +26,10 @@ async def test_significant_change() -> None: # Color temp assert not async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 64} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2049} ) assert async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 65} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2050} ) # Effect From 5e1772156856c8c1114acdb3b1a1064a3925672f Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:53:19 +0100 Subject: [PATCH 0491/1198] Remove old codeowner no longer working on the integration (#132807) --- CODEOWNERS | 4 ++-- homeassistant/components/iotty/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 3a407308275..03b0e7b893b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -727,8 +727,8 @@ build.json @home-assistant/supervisor /tests/components/ios/ @robbiet480 /homeassistant/components/iotawatt/ @gtdiehl @jyavenard /tests/components/iotawatt/ @gtdiehl @jyavenard -/homeassistant/components/iotty/ @pburgio @shapournemati-iotty -/tests/components/iotty/ @pburgio @shapournemati-iotty +/homeassistant/components/iotty/ @shapournemati-iotty +/tests/components/iotty/ @shapournemati-iotty /homeassistant/components/iperf3/ @rohankapoorcom /homeassistant/components/ipma/ @dgomes /tests/components/ipma/ @dgomes diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index 1c0d5cc3df2..db81f7c5839 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -1,7 +1,7 @@ { "domain": "iotty", "name": "iotty", - "codeowners": ["@pburgio", "@shapournemati-iotty"], + "codeowners": ["@shapournemati-iotty"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/iotty", From af838077ccad92ba77a9ecff0f3e6b1dcf180c5e Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 11 Dec 2024 08:55:00 +0100 Subject: [PATCH 0492/1198] Fix docker hassfest (#132823) --- .github/workflows/builder.yml | 2 +- script/gen_requirements_all.py | 1 - script/hassfest/__main__.py | 14 +++++----- script/hassfest/docker.py | 6 ++--- script/hassfest/docker/entrypoint.sh | 26 ++++++++++++++----- script/hassfest/model.py | 6 ++++- script/hassfest/quality_scale.py | 2 +- .../quality_scale_validation/__init__.py | 4 +-- .../config_entry_unloading.py | 6 +++-- .../quality_scale_validation/config_flow.py | 6 +++-- .../quality_scale_validation/diagnostics.py | 6 +++-- .../quality_scale_validation/discovery.py | 6 +++-- .../parallel_updates.py | 6 +++-- .../reauthentication_flow.py | 6 +++-- .../reconfiguration_flow.py | 6 +++-- .../quality_scale_validation/runtime_data.py | 6 +++-- .../quality_scale_validation/strict_typing.py | 13 ++++++---- .../unique_config_entry.py | 6 +++-- tests/hassfest/test_requirements.py | 3 +-- tests/hassfest/test_version.py | 3 +-- 20 files changed, 85 insertions(+), 49 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 9d3ab18f7c1..8f419cca1da 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -517,7 +517,7 @@ jobs: tags: ${{ env.HASSFEST_IMAGE_TAG }} - name: Run hassfest against core - run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components + run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace - name: Push Docker image if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index fa46710d100..5cc609eec2a 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -628,7 +628,6 @@ def _get_hassfest_config() -> Config: specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ) diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index 81670de5afd..c93d8fd4499 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -110,10 +110,10 @@ def get_config() -> Config: help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) parser.add_argument( - "--core-integrations-path", + "--core-path", type=Path, - default=Path("homeassistant/components"), - help="Path to core integrations", + default=Path(), + help="Path to core", ) parsed = parser.parse_args() @@ -125,16 +125,18 @@ def get_config() -> Config: "Generate is not allowed when limiting to specific integrations" ) - if not parsed.integration_path and not Path("requirements_all.txt").is_file(): + if ( + not parsed.integration_path + and not (parsed.core_path / "requirements_all.txt").is_file() + ): raise RuntimeError("Run from Home Assistant root") return Config( - root=Path().absolute(), + root=parsed.core_path.absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), - core_integrations_path=parsed.core_integrations_path, ) diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 57d86bc4def..022caee30cd 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -185,12 +185,12 @@ def _generate_files(config: Config) -> list[File]: + 10 ) * 1000 - package_versions = _get_package_versions(Path("requirements.txt"), {"uv"}) + package_versions = _get_package_versions(config.root / "requirements.txt", {"uv"}) package_versions |= _get_package_versions( - Path("requirements_test.txt"), {"pipdeptree", "tqdm"} + config.root / "requirements_test.txt", {"pipdeptree", "tqdm"} ) package_versions |= _get_package_versions( - Path("requirements_test_pre_commit.txt"), {"ruff"} + config.root / "requirements_test_pre_commit.txt", {"ruff"} ) return [ diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh index 7b75eb186d2..eabc08a9499 100755 --- a/script/hassfest/docker/entrypoint.sh +++ b/script/hassfest/docker/entrypoint.sh @@ -2,16 +2,28 @@ integrations="" integration_path="" +core_path_provided=false -# Enable recursive globbing using find -for manifest in $(find . -name "manifest.json"); do - manifest_path=$(realpath "${manifest}") - integrations="$integrations --integration-path ${manifest_path%/*}" +for arg in "$@"; do + case "$arg" in + --core-path=*) + core_path_provided=true + break + ;; + esac done -if [ -z "$integrations" ]; then - echo "Error: No integrations found!" - exit 1 +if [ "$core_path_provided" = false ]; then + # Enable recursive globbing using find + for manifest in $(find . -name "manifest.json"); do + manifest_path=$(realpath "${manifest}") + integrations="$integrations --integration-path ${manifest_path%/*}" + done + + if [ -z "$integrations" ]; then + echo "Error: No integrations found!" + exit 1 + fi fi cd /usr/src/homeassistant || exit 1 diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 377f82b0d5c..08ded687096 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -30,11 +30,15 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool - core_integrations_path: pathlib.Path + core_integrations_path: pathlib.Path = field(init=False) errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) + def __post_init__(self) -> None: + """Post init.""" + self.core_integrations_path = self.root / "homeassistant/components" + def add_error(self, *args: Any, **kwargs: Any) -> None: """Add an error.""" self.errors.append(Error(*args, **kwargs)) diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 72f01f3d1d1..5a09f8c7bd8 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1358,7 +1358,7 @@ def validate_iqs_file(config: Config, integration: Integration) -> None: for rule_name in rules_done: if (validator := VALIDATORS.get(rule_name)) and ( - errors := validator.validate(integration, rules_done=rules_done) + errors := validator.validate(config, integration, rules_done=rules_done) ): for error in errors: integration.add_error("quality_scale", f"[{rule_name}] {error}") diff --git a/script/hassfest/quality_scale_validation/__init__.py b/script/hassfest/quality_scale_validation/__init__.py index 892bb70fabd..7c41a58b601 100644 --- a/script/hassfest/quality_scale_validation/__init__.py +++ b/script/hassfest/quality_scale_validation/__init__.py @@ -2,14 +2,14 @@ from typing import Protocol -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration class RuleValidationProtocol(Protocol): """Protocol for rule validation.""" def validate( - self, integration: Integration, *, rules_done: set[str] + self, config: Config, integration: Integration, *, rules_done: set[str] ) -> list[str] | None: """Validate a quality scale rule. diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index fb636a7f2ed..4874ddc4625 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/c import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_unload_entry_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_unload_entry_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a config flow.""" init_file = integration.path / "__init__.py" diff --git a/script/hassfest/quality_scale_validation/config_flow.py b/script/hassfest/quality_scale_validation/config_flow.py index 6e88aa462f4..d1ac70ab469 100644 --- a/script/hassfest/quality_scale_validation/config_flow.py +++ b/script/hassfest/quality_scale_validation/config_flow.py @@ -3,10 +3,12 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-flow/ """ -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements config flow.""" if not integration.config_flow: diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py index 44012208bcb..ea143002b09 100644 --- a/script/hassfest/quality_scale_validation/diagnostics.py +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration DIAGNOSTICS_FUNCTIONS = { "async_get_config_entry_diagnostics", @@ -22,7 +22,9 @@ def _has_diagnostics_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements diagnostics.""" diagnostics_file = integration.path / "diagnostics.py" diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py index db50cdba55a..d11bcaf2cec 100644 --- a/script/hassfest/quality_scale_validation/discovery.py +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration MANIFEST_KEYS = [ "bluetooth", @@ -38,7 +38,9 @@ def _has_discovery_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements diagnostics.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py index 3483a44f504..00ad891774d 100644 --- a/script/hassfest/quality_scale_validation/parallel_updates.py +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -7,7 +7,7 @@ import ast from homeassistant.const import Platform from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_parallel_updates_defined(module: ast.Module) -> bool: @@ -18,7 +18,9 @@ def _has_parallel_updates_defined(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration sets PARALLEL_UPDATES constant.""" errors = [] diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py index 81d34ec4f7f..3db9700af98 100644 --- a/script/hassfest/quality_scale_validation/reauthentication_flow.py +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_step_reauth_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_step_reauth_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a reauthentication flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py index b27475e8c70..28cc0ef6d43 100644 --- a/script/hassfest/quality_scale_validation/reconfiguration_flow.py +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_step_reconfigure_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_step_reconfigure_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a reconfiguration flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py index 8ad721a218c..cfc4c5224de 100644 --- a/script/hassfest/quality_scale_validation/runtime_data.py +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -8,7 +8,7 @@ import re from homeassistant.const import Platform from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration _ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$") _FUNCTIONS: dict[str, dict[str, int]] = { @@ -102,7 +102,9 @@ def _check_typed_config_entry(integration: Integration) -> list[str]: return errors -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate correct use of ConfigEntry.runtime_data.""" init_file = integration.path / "__init__.py" init = ast_parse_module(init_file) diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py index a7755b6bb40..a27ab752cf0 100644 --- a/script/hassfest/quality_scale_validation/strict_typing.py +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -7,27 +7,30 @@ from functools import lru_cache from pathlib import Path import re -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration _STRICT_TYPING_FILE = Path(".strict-typing") _COMPONENT_REGEX = r"homeassistant.components.([^.]+).*" @lru_cache -def _strict_typing_components() -> set[str]: +def _strict_typing_components(strict_typing_file: Path) -> set[str]: return set( { match.group(1) - for line in _STRICT_TYPING_FILE.read_text(encoding="utf-8").splitlines() + for line in strict_typing_file.read_text(encoding="utf-8").splitlines() if (match := re.match(_COMPONENT_REGEX, line)) is not None } ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has strict typing enabled.""" + strict_typing_file = config.root / _STRICT_TYPING_FILE - if integration.domain not in _strict_typing_components(): + if integration.domain not in _strict_typing_components(strict_typing_file): return [ "Integration does not have strict typing enabled " "(is missing from .strict-typing)" diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py index 8c38923e584..83b3d20bd80 100644 --- a/script/hassfest/quality_scale_validation/unique_config_entry.py +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/u import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_method_call(module: ast.Module, name: str) -> bool: @@ -30,7 +30,9 @@ def _has_abort_unique_id_configured(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration prevents duplicate devices.""" if integration.manifest.get("single_config_entry"): diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index e70bee104c9..b9259596c65 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -12,13 +12,12 @@ from script.hassfest.requirements import validate_requirements_format def integration(): """Fixture for hassfest integration model.""" return Integration( - path=Path("homeassistant/components/test"), + path=Path("homeassistant/components/test").absolute(), _config=Config( root=Path(".").absolute(), specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ), _manifest={ "domain": "test", diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index 30677356101..20c3d93bda5 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -16,13 +16,12 @@ from script.hassfest.model import Config, Integration def integration(): """Fixture for hassfest integration model.""" integration = Integration( - "", + Path(), _config=Config( root=Path(".").absolute(), specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ), ) integration._manifest = { From b780f31e63abbde7224bec6b2ab2cacc156516d0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:55:23 +0100 Subject: [PATCH 0493/1198] Migrate flux to use Kelvin over Mireds (#132839) --- homeassistant/components/flux/switch.py | 17 +++++++---------- tests/components/flux/test_switch.py | 4 ++-- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/flux/switch.py b/homeassistant/components/flux/switch.py index 8a3d7ec7260..f7cf5b2c03a 100644 --- a/homeassistant/components/flux/switch.py +++ b/homeassistant/components/flux/switch.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -43,7 +43,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import slugify from homeassistant.util.color import ( color_RGB_to_xy_brightness, - color_temperature_kelvin_to_mired, color_temperature_to_rgb, ) from homeassistant.util.dt import as_local, utcnow as dt_utcnow @@ -109,13 +108,13 @@ async def async_set_lights_xy(hass, lights, x_val, y_val, brightness, transition await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data) -async def async_set_lights_temp(hass, lights, mired, brightness, transition): +async def async_set_lights_temp(hass, lights, kelvin, brightness, transition): """Set color of array of lights.""" for light in lights: if is_on(hass, light): service_data = {ATTR_ENTITY_ID: light} - if mired is not None: - service_data[ATTR_COLOR_TEMP] = int(mired) + if kelvin is not None: + service_data[ATTR_COLOR_TEMP_KELVIN] = kelvin if brightness is not None: service_data[ATTR_BRIGHTNESS] = brightness if transition is not None: @@ -350,17 +349,15 @@ class FluxSwitch(SwitchEntity, RestoreEntity): now, ) else: - # Convert to mired and clamp to allowed values - mired = color_temperature_kelvin_to_mired(temp) await async_set_lights_temp( - self.hass, self._lights, mired, brightness, self._transition + self.hass, self._lights, int(temp), brightness, self._transition ) _LOGGER.debug( ( - "Lights updated to mired:%s brightness:%s, %s%% " + "Lights updated to kelvin:%s brightness:%s, %s%% " "of %s cycle complete at %s" ), - mired, + temp, brightness, round(percentage_complete * 100), time_state, diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab0e8a556c4..f7dc30db240 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1164,7 +1164,7 @@ async def test_flux_with_multiple_lights( assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376] -async def test_flux_with_mired( +async def test_flux_with_temp( hass: HomeAssistant, mock_light_entities: list[MockLight], ) -> None: @@ -1224,7 +1224,7 @@ async def test_flux_with_mired( async_fire_time_changed(hass, test_time) await hass.async_block_till_done() call = turn_on_calls[-1] - assert call.data[light.ATTR_COLOR_TEMP] == 269 + assert call.data[light.ATTR_COLOR_TEMP_KELVIN] == 3708 async def test_flux_with_rgb( From 2bb05296b8fa46b8b67967d8186ee9c50977f9f9 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 11 Dec 2024 09:46:53 +0100 Subject: [PATCH 0494/1198] Add remaining test coverage to yale_smart_alarm (#132869) --- .../test_alarm_control_panel.py | 123 +++++++++++++++++- 1 file changed, 121 insertions(+), 2 deletions(-) diff --git a/tests/components/yale_smart_alarm/test_alarm_control_panel.py b/tests/components/yale_smart_alarm/test_alarm_control_panel.py index 4e8330df071..0280223b72a 100644 --- a/tests/components/yale_smart_alarm/test_alarm_control_panel.py +++ b/tests/components/yale_smart_alarm/test_alarm_control_panel.py @@ -2,16 +2,27 @@ from __future__ import annotations +from copy import deepcopy from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from yalesmartalarmclient import YaleSmartAlarmData -from homeassistant.const import Platform +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + AlarmControlPanelState, +) +from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.parametrize( @@ -27,3 +38,111 @@ async def test_alarm_control_panel( """Test the Yale Smart Alarm alarm_control_panel.""" entry = load_config_entry[0] await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_service_calls( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], +) -> None: + """Test the Yale Smart Alarm alarm_control_panel action calls.""" + + client = load_config_entry[1] + + data = deepcopy(get_data.cycle) + data["data"] = data["data"].pop("device_status") + + client.auth.get_authenticated = Mock(return_value=data) + client.disarm = Mock(return_value=True) + client.arm_partial = Mock(return_value=True) + client.arm_full = Mock(return_value=True) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.disarm.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_partial.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_full.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(side_effect=ConnectionError("no connection")) + + with pytest.raises( + HomeAssistantError, + match="Could not set alarm for test-username: no connection", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(return_value=False) + + with pytest.raises( + HomeAssistantError, + match="Could not change alarm, check system ready for arming", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_not_available( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Yale Smart Alarm alarm_control_panel not being available.""" + + client = load_config_entry[1] + client.get_armed_status = Mock(return_value=None) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + freezer.tick(3600) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == STATE_UNAVAILABLE From 7ef3e92e2d4568ab07855ab8a2134733773ae69a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:57:29 +0100 Subject: [PATCH 0495/1198] Migrate tasmota lights to use Kelvin (#132798) --- homeassistant/components/tasmota/light.py | 38 ++++++++++++++++------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/tasmota/light.py b/homeassistant/components/tasmota/light.py index 9b69ee60524..a06e77eceb1 100644 --- a/homeassistant/components/tasmota/light.py +++ b/homeassistant/components/tasmota/light.py @@ -18,7 +18,7 @@ from hatasmota.models import DiscoveryHashType from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -32,6 +32,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW @@ -199,19 +200,27 @@ class TasmotaLight( return self._color_mode @property - def color_temp(self) -> int | None: - """Return the color temperature in mired.""" - return self._color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(self._color_temp) + if self._color_temp + else None + ) @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._tasmota_entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._tasmota_entity.max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.max_mireds + ) @property def effect(self) -> str | None: @@ -255,8 +264,13 @@ class TasmotaLight( if ATTR_BRIGHTNESS in kwargs and brightness_supported(supported_color_modes): attributes["brightness"] = scale_brightness(kwargs[ATTR_BRIGHTNESS]) - if ATTR_COLOR_TEMP in kwargs and ColorMode.COLOR_TEMP in supported_color_modes: - attributes["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ( + ATTR_COLOR_TEMP_KELVIN in kwargs + and ColorMode.COLOR_TEMP in supported_color_modes + ): + attributes["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if ATTR_EFFECT in kwargs: attributes["effect"] = kwargs[ATTR_EFFECT] From 9c9e82a93e052431954e1908ca8ddc0268b470d8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:58:08 +0100 Subject: [PATCH 0496/1198] Migrate zha lights to use Kelvin (#132816) --- homeassistant/components/zha/light.py | 43 +++++++++++++++++++-------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 9a22dfb02e9..2f5d9e9e4c9 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -15,7 +15,7 @@ from zha.application.platforms.light.const import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_TRANSITION, @@ -29,6 +29,7 @@ from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .entity import ZHAEntity from .helpers import ( @@ -128,14 +129,18 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.brightness @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self.entity_data.entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self.entity_data.entity.max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.max_mireds + ) @property def xy_color(self) -> tuple[float, float] | None: @@ -143,9 +148,13 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.xy_color @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.entity_data.entity.color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(mireds) + if (mireds := self.entity_data.entity.color_temp) + else None + ) @property def color_mode(self) -> ColorMode | None: @@ -167,12 +176,17 @@ class Light(LightEntity, ZHAEntity): @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) await self.entity_data.entity.async_turn_on( transition=kwargs.get(ATTR_TRANSITION), brightness=kwargs.get(ATTR_BRIGHTNESS), effect=kwargs.get(ATTR_EFFECT), flash=kwargs.get(ATTR_FLASH), - color_temp=kwargs.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=kwargs.get(ATTR_XY_COLOR), ) self.async_write_ha_state() @@ -188,12 +202,17 @@ class Light(LightEntity, ZHAEntity): @callback def restore_external_state_attributes(self, state: State) -> None: """Restore entity state.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := state.attributes.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) self.entity_data.entity.restore_external_state_attributes( state=(state.state == STATE_ON), off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), off_brightness=state.attributes.get(OFF_BRIGHTNESS), brightness=state.attributes.get(ATTR_BRIGHTNESS), - color_temp=state.attributes.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=state.attributes.get(ATTR_XY_COLOR), color_mode=( HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] From 0e8961276fed60a7892945625e1e10b66820d459 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 10:50:42 +0100 Subject: [PATCH 0497/1198] Enable pydantic.v1 mypy plugin (#132907) --- homeassistant/components/bang_olufsen/const.py | 16 ++++++++-------- homeassistant/components/bang_olufsen/entity.py | 2 +- .../components/bang_olufsen/media_player.py | 14 +++++++------- homeassistant/components/google/__init__.py | 4 ++-- homeassistant/components/google/calendar.py | 10 +++++----- homeassistant/components/google/coordinator.py | 4 ++-- mypy.ini | 2 +- script/hassfest/mypy_config.py | 7 ++++++- 8 files changed, 32 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 7f87ce11097..9f0649e610b 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -137,7 +137,7 @@ VALID_MEDIA_TYPES: Final[tuple] = ( # Fallback sources to use in case of API failure. FALLBACK_SOURCES: Final[SourceArray] = SourceArray( items=[ - Source( # type: ignore[call-arg] + Source( id="uriStreamer", is_enabled=True, is_playable=True, @@ -145,7 +145,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="uriStreamer"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="bluetooth", is_enabled=True, is_playable=True, @@ -153,7 +153,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="bluetooth"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="spotify", is_enabled=True, is_playable=True, @@ -161,7 +161,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spotify"), is_seekable=True, ), - Source( # type: ignore[call-arg] + Source( id="lineIn", is_enabled=True, is_playable=True, @@ -169,7 +169,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="lineIn"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="spdif", is_enabled=True, is_playable=True, @@ -177,7 +177,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spdif"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="netRadio", is_enabled=True, is_playable=True, @@ -185,7 +185,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="netRadio"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="deezer", is_enabled=True, is_playable=True, @@ -193,7 +193,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="deezer"), is_seekable=True, ), - Source( # type: ignore[call-arg] + Source( id="tidalConnect", is_enabled=True, is_playable=True, diff --git a/homeassistant/components/bang_olufsen/entity.py b/homeassistant/components/bang_olufsen/entity.py index 77fe7c6a1ff..8ed68da1678 100644 --- a/homeassistant/components/bang_olufsen/entity.py +++ b/homeassistant/components/bang_olufsen/entity.py @@ -42,7 +42,7 @@ class BangOlufsenBase: # Objects that get directly updated by notifications. self._playback_metadata: PlaybackContentMetadata = PlaybackContentMetadata() - self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) # type: ignore[call-arg] + self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) self._playback_source: Source = Source() self._playback_state: RenderingState = RenderingState() self._source_change: Source = Source() diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index d8b7a1bf940..282ecdd2ae5 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -210,9 +210,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Misc. variables. self._audio_sources: dict[str, str] = {} self._media_image: Art = Art() - self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( # type: ignore[call-arg] + self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( software_version="", - state=SoftwareUpdateState(seconds_remaining=0, value="idle"), # type: ignore[call-arg] + state=SoftwareUpdateState(seconds_remaining=0, value="idle"), ) self._sources: dict[str, str] = {} self._state: str = MediaPlayerState.IDLE @@ -896,9 +896,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): elif media_type == BangOlufsenMediaType.RADIO: await self._client.run_provided_scene( - scene_properties=SceneProperties( # type: ignore[call-arg] + scene_properties=SceneProperties( action_list=[ - Action( # type: ignore[call-arg] + Action( type="radio", radio_station_id=media_id, ) @@ -919,7 +919,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): deezer_id = kwargs[ATTR_MEDIA_EXTRA]["id"] await self._client.start_deezer_flow( - user_flow=UserFlow(user_id=deezer_id) # type: ignore[call-arg] + user_flow=UserFlow(user_id=deezer_id) ) # Play a playlist or album. @@ -929,7 +929,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): start_from = kwargs[ATTR_MEDIA_EXTRA]["start_from"] await self._client.add_to_queue( - play_queue_item=PlayQueueItem( # type: ignore[call-arg] + play_queue_item=PlayQueueItem( provider=PlayQueueItemType(value=media_type), start_now_from_position=start_from, type="playlist", @@ -940,7 +940,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Play a track. else: await self._client.add_to_queue( - play_queue_item=PlayQueueItem( # type: ignore[call-arg] + play_queue_item=PlayQueueItem( provider=PlayQueueItemType(value=media_type), start_now_from_position=0, type="track", diff --git a/homeassistant/components/google/__init__.py b/homeassistant/components/google/__init__.py index 1d204883579..2ad400aabab 100644 --- a/homeassistant/components/google/__init__.py +++ b/homeassistant/components/google/__init__.py @@ -277,10 +277,10 @@ async def async_setup_add_event_service( elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime( # type: ignore[call-arg] + start = DateOrDatetime( date_time=start_dt, timezone=str(hass.config.time_zone) ) - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) if start is None or end is None: raise ValueError( diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 045e0e31b46..5ac5dae616c 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -272,7 +272,7 @@ async def async_setup_entry( entity_description.search, ) else: - request_template = SyncEventsRequest( # type: ignore[call-arg] + request_template = SyncEventsRequest( calendar_id=calendar_id, start_time=dt_util.now() + SYNC_EVENT_MIN_TIME, ) @@ -437,11 +437,11 @@ class GoogleCalendarEntity( start: DateOrDatetime end: DateOrDatetime if isinstance(dtstart, datetime): - start = DateOrDatetime( # type: ignore[call-arg] + start = DateOrDatetime( date_time=dt_util.as_local(dtstart), timezone=str(dt_util.get_default_time_zone()), ) - end = DateOrDatetime( # type: ignore[call-arg] + end = DateOrDatetime( date_time=dt_util.as_local(dtend), timezone=str(dt_util.get_default_time_zone()), ) @@ -543,8 +543,8 @@ async def async_create_event(entity: GoogleCalendarEntity, call: ServiceCall) -> elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] + start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) if start is None or end is None: raise ValueError("Missing required fields to set start or end date/datetime") diff --git a/homeassistant/components/google/coordinator.py b/homeassistant/components/google/coordinator.py index 06f33782479..19198041c05 100644 --- a/homeassistant/components/google/coordinator.py +++ b/homeassistant/components/google/coordinator.py @@ -131,7 +131,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): self, start_date: datetime, end_date: datetime ) -> Iterable[Event]: """Get all events in a specific time frame.""" - request = ListEventsRequest( # type: ignore[call-arg] + request = ListEventsRequest( calendar_id=self.calendar_id, start_time=start_date, end_time=end_date, @@ -149,7 +149,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): async def _async_update_data(self) -> list[Event]: """Fetch data from API endpoint.""" - request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) # type: ignore[call-arg] + request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) try: result = await self.calendar_service.async_list_events(request) except ApiException as err: diff --git a/mypy.ini b/mypy.ini index fb58810515b..4e5d4212ee9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -5,7 +5,7 @@ [mypy] python_version = 3.12 platform = linux -plugins = pydantic.mypy +plugins = pydantic.mypy, pydantic.v1.mypy show_error_codes = true follow_imports = normal local_partial_types = true diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index ec4d4b3d3a9..5767066c943 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -33,7 +33,12 @@ HEADER: Final = """ GENERAL_SETTINGS: Final[dict[str, str]] = { "python_version": ".".join(str(x) for x in REQUIRED_PYTHON_VER[:2]), "platform": "linux", - "plugins": "pydantic.mypy", + "plugins": ", ".join( # noqa: FLY002 + [ + "pydantic.mypy", + "pydantic.v1.mypy", + ] + ), "show_error_codes": "true", "follow_imports": "normal", # "enable_incomplete_feature": ", ".join( # noqa: FLY002 From beda2737212bc8ac365eaeaf28e24e83565b4978 Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Wed, 11 Dec 2024 10:52:47 +0100 Subject: [PATCH 0498/1198] upgrade iottycloud lib to 0.3.0 (#132836) --- homeassistant/components/iotty/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index db81f7c5839..5425ce3b480 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/iotty", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["iottycloud==0.2.1"] + "requirements": ["iottycloud==0.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 872a2123a9c..bf6b5bbaeec 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1207,7 +1207,7 @@ insteon-frontend-home-assistant==0.5.0 intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.2.1 +iottycloud==0.3.0 # homeassistant.components.iperf3 iperf3==0.1.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b428194aa2..5d8a15bc202 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1018,7 +1018,7 @@ insteon-frontend-home-assistant==0.5.0 intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.2.1 +iottycloud==0.3.0 # homeassistant.components.isal isal==1.7.1 From b26583b0bf501bc229403a2cc7b7de08cb9c6b96 Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:12:05 +0100 Subject: [PATCH 0499/1198] Bump python-linkplay to v0.1.1 (#132091) --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/linkplay/test_diagnostics.py | 6 ++++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index e74d22b8207..cc124ceb611 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.0.20"], + "requirements": ["python-linkplay==0.1.1"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index bf6b5bbaeec..b263779e67f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2368,7 +2368,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5d8a15bc202..d641a0fa4e2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1898,7 +1898,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.matter python-matter-server==6.6.0 diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py index 369142978a3..de60b7ecb3a 100644 --- a/tests/components/linkplay/test_diagnostics.py +++ b/tests/components/linkplay/test_diagnostics.py @@ -31,8 +31,10 @@ async def test_diagnostics( patch.object(LinkPlayMultiroom, "update_status", return_value=None), ): endpoints = [ - LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), - LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), + LinkPlayApiEndpoint( + protocol="https", port=443, endpoint=HOST, session=None + ), + LinkPlayApiEndpoint(protocol="http", port=80, endpoint=HOST, session=None), ] for endpoint in endpoints: mock_session.get( From dc8b7cfede78891d44c86c16a454582116cea9ed Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:51:16 +0100 Subject: [PATCH 0500/1198] Allow bytearray for mqtt payload type (#132906) --- homeassistant/components/mqtt/client.py | 2 +- homeassistant/components/mqtt/switch.py | 2 +- homeassistant/helpers/service_info/mqtt.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index d8bc0862d29..0091d2370a4 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -119,7 +119,7 @@ MAX_PACKETS_TO_READ = 500 type SocketType = socket.socket | ssl.SSLSocket | mqtt.WebsocketWrapper | Any -type SubscribePayloadType = str | bytes # Only bytes if encoding is None +type SubscribePayloadType = str | bytes | bytearray # Only bytes if encoding is None def publish( diff --git a/homeassistant/components/mqtt/switch.py b/homeassistant/components/mqtt/switch.py index c90174e8a01..0a54bcdb378 100644 --- a/homeassistant/components/mqtt/switch.py +++ b/homeassistant/components/mqtt/switch.py @@ -91,7 +91,7 @@ class MqttSwitch(MqttEntity, SwitchEntity, RestoreEntity): _entity_id_format = switch.ENTITY_ID_FORMAT _optimistic: bool - _is_on_map: dict[str | bytes, bool | None] + _is_on_map: dict[str | bytes | bytearray, bool | None] _command_template: Callable[[PublishPayloadType], PublishPayloadType] _value_template: Callable[[ReceivePayloadType], ReceivePayloadType] diff --git a/homeassistant/helpers/service_info/mqtt.py b/homeassistant/helpers/service_info/mqtt.py index 6ffc981ced1..a5284807617 100644 --- a/homeassistant/helpers/service_info/mqtt.py +++ b/homeassistant/helpers/service_info/mqtt.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from homeassistant.data_entry_flow import BaseServiceInfo -type ReceivePayloadType = str | bytes +type ReceivePayloadType = str | bytes | bytearray @dataclass(slots=True) From 7103b7fd8098bbc4d0a71403a47d45a3eab86de0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 13:01:02 +0100 Subject: [PATCH 0501/1198] Use snapshot tests for remaining myuplink platforms (#132915) Co-authored-by: Joost Lekkerkerker --- .../components/myuplink/quality_scale.yaml | 6 +- .../myuplink/snapshots/test_number.ambr | 335 ++++++++++++++++++ .../myuplink/snapshots/test_select.ambr | 119 +++++++ .../myuplink/snapshots/test_switch.ambr | 185 ++++++++++ tests/components/myuplink/test_number.py | 34 +- tests/components/myuplink/test_select.py | 37 +- tests/components/myuplink/test_switch.py | 31 +- 7 files changed, 689 insertions(+), 58 deletions(-) create mode 100644 tests/components/myuplink/snapshots/test_number.ambr create mode 100644 tests/components/myuplink/snapshots/test_select.ambr create mode 100644 tests/components/myuplink/snapshots/test_switch.ambr diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index b876f4c329c..661986a2f71 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -7,7 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: todo + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: @@ -47,9 +47,7 @@ rules: status: exempt comment: Handled by coordinator reauthentication-flow: done - test-coverage: - status: todo - comment: PR is pending review + test-coverage: done # Gold devices: done diff --git a/tests/components/myuplink/snapshots/test_number.ambr b/tests/components/myuplink/snapshots/test_number.ambr new file mode 100644 index 00000000000..db1a8e0949f --- /dev/null +++ b/tests/components/myuplink/snapshots/test_number.ambr @@ -0,0 +1,335 @@ +# serializer version: 1 +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_select.ambr b/tests/components/myuplink/snapshots/test_select.ambr new file mode 100644 index 00000000000..eff06bc7f2d --- /dev/null +++ b/tests/components/myuplink/snapshots/test_select.ambr @@ -0,0 +1,119 @@ +# serializer version: 1 +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_switch.ambr b/tests/components/myuplink/snapshots/test_switch.ambr new file mode 100644 index 00000000000..5d621e661ee --- /dev/null +++ b/tests/components/myuplink/snapshots/test_switch.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/myuplink/test_number.py b/tests/components/myuplink/test_number.py index 4106af1b5b9..ef7b1749782 100644 --- a/tests/components/myuplink/test_number.py +++ b/tests/components/myuplink/test_number.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.number import SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -11,6 +12,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.NUMBER pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -31,24 +34,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the entity attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == "1.0" - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - "min": -10.0, - "max": 10.0, - "mode": "auto", - "step": 1.0, - } - - async def test_set_value( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -98,3 +83,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("number.gotham_city_change_in_curve") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47028" + + +async def test_number_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test number entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py index 7ad2d17cb5d..f1797ebe5ad 100644 --- a/tests/components/myuplink/test_select.py +++ b/tests/components/myuplink/test_select.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, @@ -15,6 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SELECT pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -23,27 +26,6 @@ ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" -async def test_select_entity( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test that the entities are registered in the entity registry.""" - - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == ENTITY_UID - - # Test the select attributes are correct. - - state = hass.states.get(ENTITY_ID) - assert state.state == "Economy" - assert state.attributes == { - "options": ["Smart control", "Economy", "Normal", "Luxury"], - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - async def test_selecting( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -87,3 +69,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("select.gotham_city_all") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" + + +async def test_select_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test select entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_switch.py b/tests/components/myuplink/test_switch.py index 5e309e7152e..82d381df7fc 100644 --- a/tests/components/myuplink/test_switch.py +++ b/tests/components/myuplink/test_switch.py @@ -4,18 +4,20 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SWITCH pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -36,20 +38,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the switch attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OFF - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - @pytest.mark.parametrize( ("service"), [ @@ -109,3 +97,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get(ENTITY_ID) assert entry.unique_id == ENTITY_UID + + +async def test_switch_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test switch entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From ecfa88891868bd3ca0685d8dc9edc0ec87c1eec8 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 11 Dec 2024 13:52:53 +0100 Subject: [PATCH 0502/1198] Create quality_scale.yaml from integration scaffold script (#132199) Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com> --- script/scaffold/__main__.py | 2 +- script/scaffold/generate.py | 2 +- .../config_flow/integration/config_flow.py | 2 +- .../integration/config_flow.py | 2 +- .../integration/config_flow.py | 2 +- .../integration/application_credentials.py | 6 +- .../integration/quality_scale.yaml | 60 +++++++++++++++++++ 7 files changed, 67 insertions(+), 9 deletions(-) create mode 100644 script/scaffold/templates/integration/integration/quality_scale.yaml diff --git a/script/scaffold/__main__.py b/script/scaffold/__main__.py index 45dbed790e6..93c787df50f 100644 --- a/script/scaffold/__main__.py +++ b/script/scaffold/__main__.py @@ -28,7 +28,7 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def main(): +def main() -> int: """Scaffold an integration.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") diff --git a/script/scaffold/generate.py b/script/scaffold/generate.py index 0bee69b93f8..9ca5ead5719 100644 --- a/script/scaffold/generate.py +++ b/script/scaffold/generate.py @@ -19,7 +19,7 @@ def generate(template: str, info: Info) -> None: print() -def _generate(src_dir, target_dir, info: Info) -> None: +def _generate(src_dir: Path, target_dir: Path, info: Info) -> None: """Generate an integration.""" replaces = {"NEW_DOMAIN": info.domain, "NEW_NAME": info.name} diff --git a/script/scaffold/templates/config_flow/integration/config_flow.py b/script/scaffold/templates/config_flow/integration/config_flow.py index 0bff976f288..06db7592840 100644 --- a/script/scaffold/templates/config_flow/integration/config_flow.py +++ b/script/scaffold/templates/config_flow/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py index e2cfed40e1d..570b70b85aa 100644 --- a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME.""" +"""Config flow for the NEW_NAME integration.""" import my_pypi_dependency diff --git a/script/scaffold/templates/config_flow_helper/integration/config_flow.py b/script/scaffold/templates/config_flow_helper/integration/config_flow.py index 5d89fec2da2..c2ab7a205da 100644 --- a/script/scaffold/templates/config_flow_helper/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_helper/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py index 51ef70b1885..0f01c8402df 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py @@ -1,11 +1,9 @@ -"""application_credentials platform the NEW_NAME integration.""" +"""Application credentials platform for the NEW_NAME integration.""" from homeassistant.components.application_credentials import AuthorizationServer from homeassistant.core import HomeAssistant -# TODO Update with your own urls -OAUTH2_AUTHORIZE = "https://www.example.com/auth/authorize" -OAUTH2_TOKEN = "https://www.example.com/auth/token" +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: diff --git a/script/scaffold/templates/integration/integration/quality_scale.yaml b/script/scaffold/templates/integration/integration/quality_scale.yaml new file mode 100644 index 00000000000..201a91652e5 --- /dev/null +++ b/script/scaffold/templates/integration/integration/quality_scale.yaml @@ -0,0 +1,60 @@ +rules: + # Bronze + action-setup: todo + appropriate-polling: todo + brands: todo + common-modules: todo + config-flow-test-coverage: todo + config-flow: todo + dependency-transparency: todo + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: todo + entity-unique-id: todo + has-entity-name: todo + runtime-data: todo + test-before-configure: todo + test-before-setup: todo + unique-config-entry: todo + + # Silver + action-exceptions: todo + config-entry-unloading: todo + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: todo + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: todo + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo From f9744799704ce91abb7988d09bcae924a4bdae2e Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 11 Dec 2024 13:53:14 +0100 Subject: [PATCH 0503/1198] Velbus add quality_scale.yaml (#131377) Co-authored-by: Allen Porter Co-authored-by: Joost Lekkerkerker --- .../components/velbus/quality_scale.yaml | 82 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/velbus/quality_scale.yaml diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml new file mode 100644 index 00000000000..f3ab8f607b6 --- /dev/null +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -0,0 +1,82 @@ +rules: + # Bronze + action-setup: todo + appropriate-polling: + status: exempt + comment: | + This integration does not poll. + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: | + Split test_flow_usb from the test that tests already_configured, test_flow_usb should also assert the unique_id of the entry + config-flow: + status: todo + comment: | + Dynamically build up the port parameter based on inputs provided by the user, do not fill-in a name parameter, build it up in the config flow + dependency-transparency: done + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: todo + entity-unique-id: done + has-entity-name: todo + runtime-data: todo + test-before-configure: done + test-before-setup: todo + unique-config-entry: + status: todo + comment: | + Manual step does not generate an unique-id + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: todo + comment: | + Dynamic devices are discovered, but no entities are created for them + entity-category: done + entity-device-class: todo + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration communicates via serial/usb/tcp and does not require a web session. + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 5a09f8c7bd8..aa62b5a5120 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1105,7 +1105,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "v2c", "vallox", "vasttrafik", - "velbus", "velux", "venstar", "vera", From 05b23d081b023a26adde0ad836cbec2212ac5f6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 14:09:33 +0100 Subject: [PATCH 0504/1198] Set quality_scale for myUplink to Silver (#132923) --- homeassistant/components/myuplink/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/myuplink/manifest.json b/homeassistant/components/myuplink/manifest.json index 0e638a72715..8438d24194c 100644 --- a/homeassistant/components/myuplink/manifest.json +++ b/homeassistant/components/myuplink/manifest.json @@ -6,5 +6,6 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/myuplink", "iot_class": "cloud_polling", + "quality_scale": "silver", "requirements": ["myuplink==0.6.0"] } From 17533823075d68068ca9cf69c90b12088a0a2eb8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:11:29 +0100 Subject: [PATCH 0505/1198] Adjust lifx to use local _ATTR_COLOR_TEMP constant (#132840) --- homeassistant/components/lifx/const.py | 3 +++ homeassistant/components/lifx/manager.py | 6 +++--- homeassistant/components/lifx/util.py | 7 +++++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/lifx/const.py b/homeassistant/components/lifx/const.py index 9b213cc9f6d..667afe1125d 100644 --- a/homeassistant/components/lifx/const.py +++ b/homeassistant/components/lifx/const.py @@ -64,3 +64,6 @@ DATA_LIFX_MANAGER = "lifx_manager" LIFX_CEILING_PRODUCT_IDS = {176, 177} _LOGGER = logging.getLogger(__package__) + +# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 +_ATTR_COLOR_TEMP = "color_temp" diff --git a/homeassistant/components/lifx/manager.py b/homeassistant/components/lifx/manager.py index 759d08707cd..27e62717e96 100644 --- a/homeassistant/components/lifx/manager.py +++ b/homeassistant/components/lifx/manager.py @@ -15,7 +15,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -30,7 +29,7 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.service import async_extract_referenced_entity_ids -from .const import ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN +from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN from .coordinator import LIFXUpdateCoordinator, Light from .util import convert_8_to_16, find_hsbk @@ -126,7 +125,8 @@ LIFX_EFFECT_PULSE_SCHEMA = cv.make_entity_service_schema( vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1500, max=9000) ), - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, + # _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 + vol.Exclusive(_ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)), ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)), ATTR_MODE: vol.In(PULSE_MODES), diff --git a/homeassistant/components/lifx/util.py b/homeassistant/components/lifx/util.py index 62d0ea66f81..ffffe7a4856 100644 --- a/homeassistant/components/lifx/util.py +++ b/homeassistant/components/lifx/util.py @@ -27,6 +27,7 @@ from homeassistant.helpers import device_registry as dr import homeassistant.util.color as color_util from .const import ( + _ATTR_COLOR_TEMP, _LOGGER, DEFAULT_ATTEMPTS, DOMAIN, @@ -112,13 +113,15 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] | saturation = int(saturation / 100 * 65535) kelvin = 3500 - if "color_temp" in kwargs: # old ATTR_COLOR_TEMP + if _ATTR_COLOR_TEMP in kwargs: # added in 2025.1, can be removed in 2026.1 _LOGGER.warning( "The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for" " all service calls" ) - kelvin = color_util.color_temperature_mired_to_kelvin(kwargs.pop("color_temp")) + kelvin = color_util.color_temperature_mired_to_kelvin( + kwargs.pop(_ATTR_COLOR_TEMP) + ) saturation = 0 if ATTR_COLOR_TEMP_KELVIN in kwargs: From 555d7f1ea420acb969194ab00d91e85626a368d9 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Wed, 11 Dec 2024 09:40:18 -0500 Subject: [PATCH 0506/1198] Guard Vodafone Station updates against bad data (#132921) guard Vodafone Station updates against bad data --- homeassistant/components/vodafone_station/coordinator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index d2f408e355b..e95ca2b5976 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta +from json.decoder import JSONDecodeError from typing import Any from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions @@ -107,6 +108,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, + JSONDecodeError, ) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except (ConfigEntryAuthFailed, UpdateFailed): From ee4db13c2aa64044ba5524d17881c97f694b6ab9 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:52:43 +0100 Subject: [PATCH 0507/1198] Add data description to suez_water config flow (#132466) * Suez_water: config flow data_descriptions * Rename counter by meter * Use placeholders --- homeassistant/components/suez_water/config_flow.py | 5 ++++- .../components/suez_water/quality_scale.yaml | 4 ++-- homeassistant/components/suez_water/strings.json | 12 +++++++++--- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index 2a1edea35f1..b24dc1815ee 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -82,7 +82,10 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + description_placeholders={"tout_sur_mon_eau": "Tout sur mon Eau"}, ) diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 0ca4c2e0f27..0980ee472eb 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -1,9 +1,9 @@ rules: # Bronze - config-flow: todo + config-flow: done test-before-configure: done unique-config-entry: done - config-flow-test-coverage: todo + config-flow-test-coverage: done runtime-data: status: todo comment: coordinator is created during setup, should be stored in runtime_data diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index 6be2affab97..be2d4849e76 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -5,15 +5,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "counter_id": "Counter id" - } + "counter_id": "Meter id" + }, + "data_description": { + "username": "Enter your login associated with your {tout_sur_mon_eau} account", + "password": "Enter your password associated with your {tout_sur_mon_eau} account", + "counter_id": "Enter your meter id (ex: 12345678). Should be found automatically during setup, if not see integration documentation for more information" + }, + "description": "Connect your suez water {tout_sur_mon_eau} account to retrieve your water consumption" } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "counter_not_found": "Could not find counter id automatically" + "counter_not_found": "Could not find meter id automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" From 0d71828defe04b03dda3fc5c8995a69452f65318 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 16:11:14 +0100 Subject: [PATCH 0508/1198] Migrate mqtt lights to use Kelvin (#132828) * Migrate mqtt lights to use Kelvin * Adjust restore_cache tests * Adjust tests --- .../components/mqtt/light/schema_basic.py | 25 +++++++---- .../components/mqtt/light/schema_json.py | 42 +++++++++++++------ .../components/mqtt/light/schema_template.py | 38 +++++++++++------ tests/components/mqtt/test_light.py | 4 +- tests/components/mqtt/test_light_json.py | 6 +-- tests/components/mqtt/test_light_template.py | 4 +- 6 files changed, 80 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 8a1b7a2a76a..d58d52377dd 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -246,7 +246,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): _optimistic: bool _optimistic_brightness: bool _optimistic_color_mode: bool - _optimistic_color_temp: bool + _optimistic_color_temp_kelvin: bool _optimistic_effect: bool _optimistic_hs_color: bool _optimistic_rgb_color: bool @@ -327,7 +327,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): and topic[CONF_RGB_STATE_TOPIC] is None ) ) - self._optimistic_color_temp = ( + self._optimistic_color_temp_kelvin = ( optimistic or topic[CONF_COLOR_TEMP_STATE_TOPIC] is None ) self._optimistic_effect = optimistic or topic[CONF_EFFECT_STATE_TOPIC] is None @@ -518,7 +518,9 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): if self._optimistic_color_mode: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = int(payload) + self._attr_color_temp_kelvin = color_util.color_temperature_mired_to_kelvin( + int(payload) + ) @callback def _effect_received(self, msg: ReceiveMessage) -> None: @@ -592,7 +594,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): self.add_subscription( CONF_COLOR_TEMP_STATE_TOPIC, self._color_temp_received, - {"_attr_color_mode", "_attr_color_temp"}, + {"_attr_color_mode", "_attr_color_temp_kelvin"}, ) self.add_subscription( CONF_EFFECT_STATE_TOPIC, self._effect_received, {"_attr_effect"} @@ -631,7 +633,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): restore_state(ATTR_RGBW_COLOR) restore_state(ATTR_RGBWW_COLOR) restore_state(ATTR_COLOR_MODE) - restore_state(ATTR_COLOR_TEMP) + restore_state(ATTR_COLOR_TEMP_KELVIN) restore_state(ATTR_EFFECT) restore_state(ATTR_HS_COLOR) restore_state(ATTR_XY_COLOR) @@ -803,14 +805,21 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): await publish(CONF_RGBWW_COMMAND_TOPIC, rgbww_s) should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS]) if ( - ATTR_COLOR_TEMP in kwargs + ATTR_COLOR_TEMP_KELVIN in kwargs and self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None ): ct_command_tpl = self._command_templates[CONF_COLOR_TEMP_COMMAND_TEMPLATE] - color_temp = ct_command_tpl(int(kwargs[ATTR_COLOR_TEMP]), None) + color_temp = ct_command_tpl( + color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ), + None, + ) await publish(CONF_COLOR_TEMP_COMMAND_TOPIC, color_temp) should_update |= set_optimistic( - ATTR_COLOR_TEMP, kwargs[ATTR_COLOR_TEMP], ColorMode.COLOR_TEMP + ATTR_COLOR_TEMP_KELVIN, + kwargs[ATTR_COLOR_TEMP_KELVIN], + ColorMode.COLOR_TEMP, ) if ( diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 89f338f6bab..703117190eb 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -12,7 +12,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -273,8 +273,16 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topic = { @@ -370,7 +378,11 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): return try: if color_mode == ColorMode.COLOR_TEMP: - self._attr_color_temp = int(values["color_temp"]) + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] + ) + ) self._attr_color_mode = ColorMode.COLOR_TEMP elif color_mode == ColorMode.HS: hue = float(values["color"]["h"]) @@ -469,9 +481,13 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): # Deprecated color handling try: if values["color_temp"] is None: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None else: - self._attr_color_temp = int(values["color_temp"]) # type: ignore[arg-type] + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] # type: ignore[arg-type] + ) + ) except KeyError: pass except ValueError: @@ -496,7 +512,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._state_received, { "_attr_brightness", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -522,8 +538,8 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_color_mode = last_attributes.get( ATTR_COLOR_MODE, self.color_mode ) - self._attr_color_temp = last_attributes.get( - ATTR_COLOR_TEMP, self.color_temp + self._attr_color_temp_kelvin = last_attributes.get( + ATTR_COLOR_TEMP_KELVIN, self.color_temp_kelvin ) self._attr_effect = last_attributes.get(ATTR_EFFECT, self.effect) self._attr_hs_color = last_attributes.get(ATTR_HS_COLOR, self.hs_color) @@ -690,12 +706,14 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_brightness = kwargs[ATTR_BRIGHTNESS] should_update = True - if ATTR_COLOR_TEMP in kwargs: - message["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + message["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None should_update = True diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index c4f9cad44c5..7427d25533e 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -126,8 +126,16 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topics = { @@ -213,8 +221,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): color_temp = self._value_templates[CONF_COLOR_TEMP_TEMPLATE]( msg.payload ) - self._attr_color_temp = ( - int(color_temp) if color_temp != "None" else None + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(int(color_temp)) + if color_temp != "None" + else None ) except ValueError: _LOGGER.warning("Invalid color temperature value received") @@ -256,7 +266,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): { "_attr_brightness", "_attr_color_mode", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -275,8 +285,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if last_state.attributes.get(ATTR_HS_COLOR): self._attr_hs_color = last_state.attributes.get(ATTR_HS_COLOR) self._update_color_mode() - if last_state.attributes.get(ATTR_COLOR_TEMP): - self._attr_color_temp = last_state.attributes.get(ATTR_COLOR_TEMP) + if last_state.attributes.get(ATTR_COLOR_TEMP_KELVIN): + self._attr_color_temp_kelvin = last_state.attributes.get( + ATTR_COLOR_TEMP_KELVIN + ) if last_state.attributes.get(ATTR_EFFECT): self._attr_effect = last_state.attributes.get(ATTR_EFFECT) @@ -295,11 +307,13 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if self._optimistic: self._attr_brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: - values["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + values["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None self._update_color_mode() @@ -325,7 +339,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): values["sat"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] self._update_color_mode() diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index b11484d55fb..8e9e2abb85a 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1008,7 +1008,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 100000, "color_mode": "hs", }, ) @@ -1021,7 +1021,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert state.attributes.get(ATTR_ASSUMED_STATE) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index f0da483e706..7d8ff241d3c 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -1053,7 +1053,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -1065,7 +1065,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS] assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes expected_features = ( @@ -1205,7 +1205,7 @@ async def test_sending_mqtt_commands_and_optimistic2( "on", { "brightness": 95, - "color_temp": 100, + "color_temp_kelvin": 10000, "color_mode": "rgb", "effect": "random", "hs_color": [100, 100], diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 59fd3eb88ed..64cdff370be 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -432,7 +432,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -443,7 +443,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_off(hass, "light.test") From 00ab5db6612ff5b7cf541df2639738f3b7a42473 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 11 Dec 2024 16:41:48 +0100 Subject: [PATCH 0509/1198] Split the velbus services code in its own file (#131375) --- homeassistant/components/velbus/__init__.py | 121 ++---------------- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/services.py | 116 +++++++++++++++++ 3 files changed, 130 insertions(+), 109 deletions(-) create mode 100644 homeassistant/components/velbus/services.py diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index ca8cfb0f2a7..fec6395c890 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -2,30 +2,22 @@ from __future__ import annotations -from contextlib import suppress import logging import os import shutil from velbusaio.controller import Velbus -import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import CONF_PORT, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.helpers.typing import ConfigType -from .const import ( - CONF_INTERFACE, - CONF_MEMO_TEXT, - DOMAIN, - SERVICE_CLEAR_CACHE, - SERVICE_SCAN, - SERVICE_SET_MEMO_TEXT, - SERVICE_SYNC, -) +from .const import DOMAIN +from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -40,6 +32,8 @@ PLATFORMS = [ Platform.SWITCH, ] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + async def velbus_connect_task( controller: Velbus, hass: HomeAssistant, entry_id: str @@ -67,6 +61,12 @@ def _migrate_device_identifiers(hass: HomeAssistant, entry_id: str) -> None: dev_reg.async_update_device(device.id, new_identifiers=new_identifier) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the actions for the Velbus component.""" + setup_services(hass) + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with velbus.""" hass.data.setdefault(DOMAIN, {}) @@ -85,97 +85,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - if hass.services.has_service(DOMAIN, SERVICE_SCAN): - return True - - def check_entry_id(interface: str) -> str: - for config_entry in hass.config_entries.async_entries(DOMAIN): - if "port" in config_entry.data and config_entry.data["port"] == interface: - return config_entry.entry_id - raise vol.Invalid( - "The interface provided is not defined as a port in a Velbus integration" - ) - - async def scan(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() - - hass.services.async_register( - DOMAIN, - SERVICE_SCAN, - scan, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def syn_clock(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() - - hass.services.async_register( - DOMAIN, - SERVICE_SYNC, - syn_clock, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def set_memo_text(call: ServiceCall) -> None: - """Handle Memo Text service call.""" - memo_text = call.data[CONF_MEMO_TEXT] - await ( - hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] - .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text) - ) - - hass.services.async_register( - DOMAIN, - SERVICE_SET_MEMO_TEXT, - set_memo_text, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Required(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - vol.Optional(CONF_MEMO_TEXT, default=""): cv.string, - } - ), - ) - - async def clear_cache(call: ServiceCall) -> None: - """Handle a clear cache service call.""" - # clear the cache - with suppress(FileNotFoundError): - if call.data.get(CONF_ADDRESS): - await hass.async_add_executor_job( - os.unlink, - hass.config.path( - STORAGE_DIR, - f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", - ), - ) - else: - await hass.async_add_executor_job( - shutil.rmtree, - hass.config.path( - STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" - ), - ) - # call a scan to repopulate - await scan(call) - - hass.services.async_register( - DOMAIN, - SERVICE_CLEAR_CACHE, - clear_cache, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Optional(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - } - ), - ) - return True @@ -186,10 +95,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) if not hass.data[DOMAIN]: hass.data.pop(DOMAIN) - hass.services.async_remove(DOMAIN, SERVICE_SCAN) - hass.services.async_remove(DOMAIN, SERVICE_SYNC) - hass.services.async_remove(DOMAIN, SERVICE_SET_MEMO_TEXT) - hass.services.async_remove(DOMAIN, SERVICE_CLEAR_CACHE) return unload_ok diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index f3ab8f607b6..adea896a1c6 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -1,6 +1,6 @@ rules: # Bronze - action-setup: todo + action-setup: done appropriate-polling: status: exempt comment: | diff --git a/homeassistant/components/velbus/services.py b/homeassistant/components/velbus/services.py new file mode 100644 index 00000000000..83633eb66bc --- /dev/null +++ b/homeassistant/components/velbus/services.py @@ -0,0 +1,116 @@ +"""Support for Velbus devices.""" + +from __future__ import annotations + +from contextlib import suppress +import os +import shutil + +import voluptuous as vol + +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.storage import STORAGE_DIR + +from .const import ( + CONF_INTERFACE, + CONF_MEMO_TEXT, + DOMAIN, + SERVICE_CLEAR_CACHE, + SERVICE_SCAN, + SERVICE_SET_MEMO_TEXT, + SERVICE_SYNC, +) + + +def setup_services(hass: HomeAssistant) -> None: + """Register the velbus services.""" + + def check_entry_id(interface: str) -> str: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry.entry_id + raise vol.Invalid( + "The interface provided is not defined as a port in a Velbus integration" + ) + + async def scan(call: ServiceCall) -> None: + await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() + + async def syn_clock(call: ServiceCall) -> None: + await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() + + async def set_memo_text(call: ServiceCall) -> None: + """Handle Memo Text service call.""" + memo_text = call.data[CONF_MEMO_TEXT] + await ( + hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] + .get_module(call.data[CONF_ADDRESS]) + .set_memo_text(memo_text.async_render()) + ) + + async def clear_cache(call: ServiceCall) -> None: + """Handle a clear cache service call.""" + # clear the cache + with suppress(FileNotFoundError): + if call.data.get(CONF_ADDRESS): + await hass.async_add_executor_job( + os.unlink, + hass.config.path( + STORAGE_DIR, + f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", + ), + ) + else: + await hass.async_add_executor_job( + shutil.rmtree, + hass.config.path( + STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" + ), + ) + # call a scan to repopulate + await scan(call) + + hass.services.async_register( + DOMAIN, + SERVICE_SCAN, + scan, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SYNC, + syn_clock, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_MEMO_TEXT, + set_memo_text, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Required(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, + } + ), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_CLEAR_CACHE, + clear_cache, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Optional(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + } + ), + ) From 39f8de015910ae6ef0b4d224802435d22b2b008e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 17:18:54 +0100 Subject: [PATCH 0510/1198] Fix mqtt light attributes (#132941) --- homeassistant/components/mqtt/light/schema_basic.py | 12 ++++++++++-- homeassistant/components/mqtt/light/schema_json.py | 2 +- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index d58d52377dd..a4d3ecb5f21 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -261,8 +261,16 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) topic: dict[str, str | None] = { diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 703117190eb..5901967610a 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -639,7 +639,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): message["color"]["s"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] should_update = True From 502a221feb345ce434e265be5dcfb44176828950 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 11 Dec 2024 17:20:49 +0100 Subject: [PATCH 0511/1198] Set go2rtc quality scale to internal (#132945) --- homeassistant/components/go2rtc/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index 1cd9e8c1107..07dbd3bd29b 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/go2rtc", "integration_type": "system", "iot_class": "local_polling", - "quality_scale": "legacy", + "quality_scale": "internal", "requirements": ["go2rtc-client==0.1.2"], "single_config_entry": true } From 94260147d757a7f70ce94f685b952cc66794dc99 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 11 Dec 2024 11:52:02 -0600 Subject: [PATCH 0512/1198] Fix pipeline conversation language (#132896) --- .../components/assist_pipeline/pipeline.py | 12 ++- .../assist_pipeline/snapshots/test_init.ambr | 55 +++++++++++++- tests/components/assist_pipeline/test_init.py | 75 +++++++++++++++++++ .../conversation/test_default_agent.py | 47 ++++++++++++ 4 files changed, 185 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9e9e84fb5d6..f8f6be3a40f 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -29,6 +29,7 @@ from homeassistant.components import ( from homeassistant.components.tts import ( generate_media_source_id as tts_generate_media_source_id, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent @@ -1009,12 +1010,19 @@ class PipelineRun: if self.intent_agent is None: raise RuntimeError("Recognize intent was not prepared") + if self.pipeline.conversation_language == MATCH_ALL: + # LLMs support all languages ('*') so use pipeline language for + # intent fallback. + input_language = self.pipeline.language + else: + input_language = self.pipeline.conversation_language + self.process_event( PipelineEvent( PipelineEventType.INTENT_START, { "engine": self.intent_agent, - "language": self.pipeline.conversation_language, + "language": input_language, "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, @@ -1029,7 +1037,7 @@ class PipelineRun: context=self.context, conversation_id=conversation_id, device_id=device_id, - language=self.pipeline.language, + language=input_language, agent_id=self.intent_agent, ) processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 3b829e0e14a..d3241b8ac1f 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -142,7 +142,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -233,7 +233,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -387,6 +387,57 @@ }), ]) # --- +# name: test_pipeline_language_used_instead_of_conversation_language + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'conversation_id': None, + 'device_id': None, + 'engine': 'conversation.home_assistant', + 'intent_input': 'test input', + 'language': 'en', + 'prefer_local_intents': False, + }), + 'type': , + }), + dict({ + 'data': dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + }), + }), + }), + 'processed_locally': True, + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_wake_word_detection_aborted list([ dict({ diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index b177530219e..a3e65766c34 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -23,6 +23,7 @@ from homeassistant.components.assist_pipeline.const import ( CONF_DEBUG_RECORDING_DIR, DOMAIN, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -1098,3 +1099,77 @@ async def test_prefer_local_intents( ] == "Order confirmed" ) + + +async def test_pipeline_language_used_instead_of_conversation_language( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test that the pipeline language is used when the conversation language is '*' (all languages).""" + client = await hass_ws_client(hass) + + events: list[assist_pipeline.PipelineEvent] = [] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/create", + "conversation_engine": "homeassistant", + "conversation_language": MATCH_ALL, + "language": "en", + "name": "test_name", + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "Arnold Schwarzenegger", + "wake_word_entity": None, + "wake_word_id": None, + } + ) + msg = await client.receive_json() + assert msg["success"] + pipeline_id = msg["result"]["id"] + pipeline = assist_pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test input", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + intent.IntentResponse(pipeline.language) + ), + ) as mock_async_converse: + await pipeline_input.execute() + + # Check intent start event + assert process_events(events) == snapshot + intent_start: assist_pipeline.PipelineEvent | None = None + for event in events: + if event.type == assist_pipeline.PipelineEventType.INTENT_START: + intent_start = event + break + + assert intent_start is not None + + # Pipeline language (en) should be used instead of '*' + assert intent_start.data.get("language") == pipeline.language + + # Check input to async_converse + mock_async_converse.assert_called_once() + assert ( + mock_async_converse.call_args_list[0].kwargs.get("language") + == pipeline.language + ) diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 58d2b0d48bf..8df1647d18c 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -30,6 +30,7 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED, + STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory, @@ -3049,3 +3050,49 @@ async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: assert result is not None assert result.response.response_type == intent.IntentResponseType.ERROR + + +@pytest.mark.parametrize( + ("language", "light_name", "on_sentence", "off_sentence"), + [ + ("en", "test light", "turn on test light", "turn off test light"), + ("zh-cn", "卧室灯", "打开卧室灯", "关闭卧室灯"), + ("zh-hk", "睡房燈", "打開睡房燈", "關閉睡房燈"), + ("zh-tw", "臥室檯燈", "打開臥室檯燈", "關臥室檯燈"), + ], +) +@pytest.mark.usefixtures("init_components") +async def test_turn_on_off( + hass: HomeAssistant, + language: str, + light_name: str, + on_sentence: str, + off_sentence: str, +) -> None: + """Test turn on/off in multiple languages.""" + entity_id = "light.light1234" + hass.states.async_set( + entity_id, STATE_OFF, attributes={ATTR_FRIENDLY_NAME: light_name} + ) + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + await conversation.async_converse( + hass, + on_sentence, + None, + Context(), + language=language, + ) + assert len(on_calls) == 1 + assert on_calls[0].data.get("entity_id") == [entity_id] + + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + await conversation.async_converse( + hass, + off_sentence, + None, + Context(), + language=language, + ) + assert len(off_calls) == 1 + assert off_calls[0].data.get("entity_id") == [entity_id] From 233d927c01656956a868b483de0183c7c3761f66 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 11 Dec 2024 18:56:21 +0100 Subject: [PATCH 0513/1198] Update xknx to 3.4.0 (#132943) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index aed7f3ed455..55c19443aa0 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -10,7 +10,7 @@ "iot_class": "local_push", "loggers": ["xknx", "xknxproject"], "requirements": [ - "xknx==3.3.0", + "xknx==3.4.0", "xknxproject==3.8.1", "knx-frontend==2024.11.16.205004" ], diff --git a/requirements_all.txt b/requirements_all.txt index b263779e67f..e039a6b486b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3026,7 +3026,7 @@ xbox-webapi==2.1.0 xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==3.4.0 # homeassistant.components.knx xknxproject==3.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d641a0fa4e2..f67bee3f32f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2421,7 +2421,7 @@ xbox-webapi==2.1.0 xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==3.4.0 # homeassistant.components.knx xknxproject==3.8.1 From 3a7fc15656f85d1a6577976482a9e45c0c61a2a2 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 11 Dec 2024 19:01:20 +0100 Subject: [PATCH 0514/1198] Add Dutch locale on supported Alexa interfaces (#132936) --- .../components/alexa/capabilities.py | 19 +++++++++++++++++++ homeassistant/components/alexa/const.py | 1 + homeassistant/components/alexa/handlers.py | 1 + 3 files changed, 21 insertions(+) diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index 8672512acde..c5b4ad15904 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -317,6 +317,7 @@ class Alexa(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -403,6 +404,7 @@ class AlexaPowerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -469,6 +471,7 @@ class AlexaLockController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -523,6 +526,7 @@ class AlexaSceneController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -562,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -611,6 +616,7 @@ class AlexaColorController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -669,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -715,6 +722,7 @@ class AlexaSpeaker(AlexaCapability): "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", "ja-JP", + "nl-NL", } def name(self) -> str: @@ -772,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability): "es-ES", "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", + "nl-NL", } def name(self) -> str: @@ -801,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -859,6 +869,7 @@ class AlexaInputController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1104,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1245,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability): "fr-CA", "fr-FR", "it-IT", + "nl-NL", "ja-JP", } @@ -1723,6 +1736,7 @@ class AlexaRangeController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2066,6 +2080,7 @@ class AlexaToggleController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2212,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2267,6 +2283,7 @@ class AlexaSeekController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2360,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2470,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } diff --git a/homeassistant/components/alexa/const.py b/homeassistant/components/alexa/const.py index 4862e4d8a8c..27e9bbd5b67 100644 --- a/homeassistant/components/alexa/const.py +++ b/homeassistant/components/alexa/const.py @@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = ( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", ) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 21365076def..9b857ff4dfd 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -527,6 +527,7 @@ async def async_api_unlock( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", }: msg = ( From 096d653059b2c38ed4c90452c4ecf9b61daf2023 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 11 Dec 2024 13:03:43 -0500 Subject: [PATCH 0515/1198] Record current IQS state for Russound RIO (#131219) --- .../russound_rio/quality_scale.yaml | 88 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/russound_rio/quality_scale.yaml diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml new file mode 100644 index 00000000000..603485705a3 --- /dev/null +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -0,0 +1,88 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: | + Missing unique_id check in test_form() and test_import(). Test for adding same device twice missing. + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: + status: todo + comment: Can use RussoundConfigEntry in async_unload_entry + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: todo + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + # Gold + entity-translations: + status: exempt + comment: | + There are no entities to translate. + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: | + This integration doesn't have enough / noisy entities that warrant being disabled by default. + discovery: todo + stale-devices: todo + diagnostics: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: done + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration uses telnet exclusively and does not make http calls. + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index aa62b5a5120..a69311672da 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -890,7 +890,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "rtorrent", "rtsp_to_webrtc", "ruckus_unleashed", - "russound_rio", "russound_rnet", "ruuvi_gateway", "ruuvitag_ble", From fa05cc5e70df31f20d9a46a7c398b0b01db1b2de Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 11 Dec 2024 10:04:16 -0800 Subject: [PATCH 0516/1198] Add quality scale for nest integration (#131330) Co-authored-by: Joost Lekkerkerker Co-authored-by: Franck Nijhof --- .../components/nest/quality_scale.yaml | 86 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nest/quality_scale.yaml diff --git a/homeassistant/components/nest/quality_scale.yaml b/homeassistant/components/nest/quality_scale.yaml new file mode 100644 index 00000000000..969ee66059d --- /dev/null +++ b/homeassistant/components/nest/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + config-flow: + status: todo + comment: Some fields are missing a data_description + brands: done + dependency-transparency: done + common-modules: + status: exempt + comment: The integration does not have a base entity or coordinator. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register actions. + appropriate-polling: + status: exempt + comment: The integration does not poll. + test-before-configure: + status: todo + comment: | + The integration does a connection test in the configuration flow, however + it does not fail if the user has ipv6 misconfigured. + entity-event-setup: done + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: + status: todo + comment: | + The integration does tests on setup, however the most common issues + observed are related to ipv6 misconfigurations and the error messages + are not self explanatory and can be improved. + docs-high-level-description: done + config-flow-test-coverage: + status: todo + comment: | + The integration has full test coverage however it does not yet assert the specific contents of the + unique id of the created entry. Additional tests coverage for combinations of features like + `test_dhcp_discovery_with_creds` would also be useful. + Tests can be improved so that all end in either CREATE_ENTRY or ABORT. + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: + status: todo + comment: | + Supports reauthentication, however can be improved to ensure the user does not change accounts + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index a69311672da..49f05b78a16 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -707,7 +707,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "neato", "nederlandse_spoorwegen", "ness_alarm", - "nest", "netatmo", "netdata", "netgear", From 0e8fe1eb41252b0241d9cc16e0bc8247bb842c3c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 19:15:36 +0100 Subject: [PATCH 0517/1198] Improve coverage in light reproduce state (#132929) --- .../components/light/test_reproduce_state.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index aa698129915..30a5e3f6842 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -193,6 +193,54 @@ async def test_filter_color_modes( assert len(turn_on_calls) == 1 +async def test_filter_color_modes_missing_attributes( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test warning on missing attribute when filtering for color mode.""" + color_mode = light.ColorMode.COLOR_TEMP + hass.states.async_set("light.entity", "off", {}) + expected_log = ( + "Color mode color_temp specified " + "but attribute color_temp missing for: light.entity" + ) + + turn_on_calls = async_mock_service(hass, "light", "turn_on") + + all_colors = { + **VALID_COLOR_TEMP, + **VALID_HS_COLOR, + **VALID_RGB_COLOR, + **VALID_RGBW_COLOR, + **VALID_RGBWW_COLOR, + **VALID_XY_COLOR, + **VALID_BRIGHTNESS, + } + + # Test missing `color_temp` attribute + stored_attributes = {**all_colors} + stored_attributes.pop("color_temp") + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 0 + assert expected_log in caplog.text + + # Test with correct `color_temp` attribute + stored_attributes["color_temp"] = 240 + expected = {"brightness": 180, "color_temp": 240} + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**all_colors, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].domain == "light" + assert dict(turn_on_calls[0].data) == {"entity_id": "light.entity", **expected} + assert expected_log not in caplog.text + + @pytest.mark.parametrize( "saved_state", [ From 833557fad5a136dc83b49e350b7999891eccb043 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 19:16:49 +0100 Subject: [PATCH 0518/1198] Trigger full ci run on global mypy config change (#132909) --- .core_files.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.core_files.yaml b/.core_files.yaml index 6fd3a74df92..cc99487f68d 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -6,6 +6,7 @@ core: &core - homeassistant/helpers/** - homeassistant/package_constraints.txt - homeassistant/util/** + - mypy.ini - pyproject.toml - requirements.txt - setup.cfg From 73e68971e80a07d2a5b11a5540486228037d5148 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 11 Dec 2024 20:48:55 +0100 Subject: [PATCH 0519/1198] Remove port from Elgato configuration flow (#132961) --- homeassistant/components/elgato/config_flow.py | 9 ++------- homeassistant/components/elgato/coordinator.py | 3 +-- homeassistant/components/elgato/quality_scale.yaml | 5 +---- homeassistant/components/elgato/strings.json | 3 +-- tests/components/elgato/conftest.py | 3 +-- tests/components/elgato/snapshots/test_config_flow.ambr | 6 ------ tests/components/elgato/test_config_flow.py | 8 ++++---- 7 files changed, 10 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/elgato/config_flow.py b/homeassistant/components/elgato/config_flow.py index 5329fcee90a..e20afc73a2d 100644 --- a/homeassistant/components/elgato/config_flow.py +++ b/homeassistant/components/elgato/config_flow.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components import onboarding, zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -34,7 +34,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): return self._async_show_setup_form() self.host = user_input[CONF_HOST] - self.port = user_input[CONF_PORT] try: await self._get_elgato_serial_number(raise_on_progress=False) @@ -49,7 +48,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): """Handle zeroconf discovery.""" self.host = discovery_info.host self.mac = discovery_info.properties.get("id") - self.port = discovery_info.port or 9123 try: await self._get_elgato_serial_number() @@ -81,7 +79,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_HOST): str, - vol.Optional(CONF_PORT, default=9123): int, } ), errors=errors or {}, @@ -93,7 +90,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): title=self.serial_number, data={ CONF_HOST: self.host, - CONF_PORT: self.port, CONF_MAC: self.mac, }, ) @@ -103,7 +99,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): session = async_get_clientsession(self.hass) elgato = Elgato( host=self.host, - port=self.port, session=session, ) info = await elgato.info() @@ -113,7 +108,7 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): info.serial_number, raise_on_progress=raise_on_progress ) self._abort_if_unique_id_configured( - updates={CONF_HOST: self.host, CONF_PORT: self.port, CONF_MAC: self.mac} + updates={CONF_HOST: self.host, CONF_MAC: self.mac} ) self.serial_number = info.serial_number diff --git a/homeassistant/components/elgato/coordinator.py b/homeassistant/components/elgato/coordinator.py index c2bc79491a1..f3cf9216374 100644 --- a/homeassistant/components/elgato/coordinator.py +++ b/homeassistant/components/elgato/coordinator.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from elgato import BatteryInfo, Elgato, ElgatoConnectionError, Info, Settings, State from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -34,7 +34,6 @@ class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): self.config_entry = entry self.client = Elgato( entry.data[CONF_HOST], - port=entry.data[CONF_PORT], session=async_get_clientsession(hass), ) super().__init__( diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml index 301d00931d2..513940e2438 100644 --- a/homeassistant/components/elgato/quality_scale.yaml +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -5,10 +5,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - The data_description for port is missing. + config-flow: done dependency-transparency: done docs-actions: done docs-high-level-description: done diff --git a/homeassistant/components/elgato/strings.json b/homeassistant/components/elgato/strings.json index 6e1031c8ddf..727b8ee7024 100644 --- a/homeassistant/components/elgato/strings.json +++ b/homeassistant/components/elgato/strings.json @@ -5,8 +5,7 @@ "user": { "description": "Set up your Elgato Light to integrate with Home Assistant.", "data": { - "host": "[%key:common::config_flow::data::host%]", - "port": "[%key:common::config_flow::data::port%]" + "host": "[%key:common::config_flow::data::host%]" }, "data_description": { "host": "The hostname or IP address of your Elgato device." diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index 73b09421576..afa89f8eb27 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -7,7 +7,7 @@ from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest from homeassistant.components.elgato.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, get_fixture_path, load_fixture @@ -35,7 +35,6 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: "127.0.0.1", CONF_MAC: "AA:BB:CC:DD:EE:FF", - CONF_PORT: 9123, }, unique_id="CN11A1A00001", ) diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr index d5d005cff9c..522482ab602 100644 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ b/tests/components/elgato/snapshots/test_config_flow.ambr @@ -8,7 +8,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': None, - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -21,7 +20,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': None, - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ @@ -53,7 +51,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -66,7 +63,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ @@ -97,7 +93,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -110,7 +105,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ diff --git a/tests/components/elgato/test_config_flow.py b/tests/components/elgato/test_config_flow.py index 6da99241b64..42abc0cde63 100644 --- a/tests/components/elgato/test_config_flow.py +++ b/tests/components/elgato/test_config_flow.py @@ -10,7 +10,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.elgato.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE +from homeassistant.const import CONF_HOST, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,7 +33,7 @@ async def test_full_user_flow_implementation( assert result.get("step_id") == "user" result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "127.0.0.1", CONF_PORT: 9123} + result["flow_id"], user_input={CONF_HOST: "127.0.0.1"} ) assert result2.get("type") is FlowResultType.CREATE_ENTRY @@ -94,7 +94,7 @@ async def test_connection_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) assert result.get("type") is FlowResultType.FORM @@ -135,7 +135,7 @@ async def test_user_device_exists_abort( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) assert result.get("type") is FlowResultType.ABORT From 525614b7cda1440e94f8794a84d6f4fd5a6a410f Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Wed, 11 Dec 2024 20:52:20 +0100 Subject: [PATCH 0520/1198] Bump pylamarzocco to 1.4.0 (#132917) * Bump pylamarzocco to 1.4.0 * update device snapshot --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lamarzocco/snapshots/test_diagnostics.ambr | 2 ++ 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 00e76096e7f..0d2111a2026 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.3.3"] + "requirements": ["pylamarzocco==1.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e039a6b486b..c6ab1e2dfae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2030,7 +2030,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.3.3 +pylamarzocco==1.4.0 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f67bee3f32f..f9ed2bebf99 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1638,7 +1638,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.3.3 +pylamarzocco==1.4.0 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr index b185557bd08..b1d8140b2ce 100644 --- a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr +++ b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr @@ -3,6 +3,7 @@ dict({ 'config': dict({ 'backflush_enabled': False, + 'bbw_settings': None, 'boilers': dict({ 'CoffeeBoiler1': dict({ 'current_temperature': 96.5, @@ -44,6 +45,7 @@ }), }), 'prebrew_mode': 'TypeB', + 'scale': None, 'smart_standby': dict({ 'enabled': True, 'minutes': 10, From d43d84a67fa1a97ee7eb4bd60168ee81eceaaeb4 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:07:29 -0500 Subject: [PATCH 0521/1198] Add parallel updates & use typed config entry for Russound RIO (#132958) --- homeassistant/components/russound_rio/__init__.py | 2 +- homeassistant/components/russound_rio/media_player.py | 2 ++ homeassistant/components/russound_rio/quality_scale.yaml | 6 ++---- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 784629ea0bc..b068fbd1892 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -58,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): await entry.runtime_data.disconnect() diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 45818d3e25b..12b41485167 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -28,6 +28,8 @@ from .entity import RussoundBaseEntity, command _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + async def async_setup_platform( hass: HomeAssistant, diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 603485705a3..4c7214cfd8b 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -26,9 +26,7 @@ rules: entity-event-setup: done entity-unique-id: done has-entity-name: done - runtime-data: - status: todo - comment: Can use RussoundConfigEntry in async_unload_entry + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done @@ -42,7 +40,7 @@ rules: status: exempt comment: | This integration does not require authentication. - parallel-updates: todo + parallel-updates: done test-coverage: todo integration-owner: done docs-installation-parameters: todo From a1e4b3b0af1191b02bad30f281960a31b53e949b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 11 Dec 2024 21:23:26 +0100 Subject: [PATCH 0522/1198] Update quality scale for nordpool (#132964) * Update quality scale for nordpool * more --- .../components/nordpool/quality_scale.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index 2cb0b655b17..79d5ac0ecea 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -20,8 +20,8 @@ rules: This integration does not provide additional actions. common-modules: done docs-high-level-description: done - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-installation-instructions: done + docs-removal-instructions: done docs-actions: status: exempt comment: | @@ -39,7 +39,7 @@ rules: status: exempt comment: | This integration does not require authentication. - parallel-updates: todo + parallel-updates: done test-coverage: done integration-owner: done docs-installation-parameters: done @@ -78,16 +78,16 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: status: exempt comment: | Only service, no device docs-supported-functions: done - docs-data-update: todo - docs-known-limitations: todo + docs-data-update: done + docs-known-limitations: done docs-troubleshooting: todo - docs-examples: todo + docs-examples: done # Platinum async-dependency: done From 8e991fc92fe095079f74c46b3bf1be897bd881ef Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 11 Dec 2024 21:49:34 +0100 Subject: [PATCH 0523/1198] Merge feature branch with backup changes to dev (#132954) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Reapply "Make WS command backup/generate send events" (#131530) This reverts commit 9b8316df3f78d136ae73c096168bd73ffebc4465. * MVP implementation of Backup sync agents (#126122) * init sync agent * add syncing * root import * rename list to info and add sync state * Add base backup class * Revert unneded change * adjust tests * move to kitchen_sink * split * move * Adjustments * Adjustment * update * Tests * Test unknown agent * adjust * Adjust for different test environments * Change /info WS to contain a dictinary * reorder * Add websocket command to trigger sync from the supervisor * cleanup * Make mypy happier --------- Co-authored-by: Erik * Make BackupSyncMetadata model a dataclass (#130555) Make backup BackupSyncMetadata model a dataclass * Rename backup sync agent to backup agent (#130575) * Rename sync agent module to agent * Rename BackupSyncAgent to BackupAgent * Fix test typo * Rename async_get_backup_sync_agents to async_get_backup_agents * Rename and clean up remaining sync things * Update kitchen sink * Apply suggestions from code review * Update test_manager.py --------- Co-authored-by: Erik Montnemery * Add additional options to WS command backup/generate (#130530) * Add additional options to WS command backup/generate * Improve test * Improve test * Align parameter names in backup/agents/* WS commands (#130590) * Allow setting password for backups (#110630) * Allow setting password for backups * use is_hassio from helpers * move it * Fix getting psw * Fix restoring with psw * Address review comments * Improve docstring * Adjust kitchen sink * Adjust --------- Co-authored-by: Erik * Export relevant names from backup integration (#130596) * Tweak backup agent interface (#130613) * Tweak backup agent interface * Adjust kitchen_sink * Test kitchen sink backup (#130609) * Test agents_list_backups * Test agents_info * Test agents_download * Export Backup from manager * Test agents_upload * Update tests after rebase * Use backup domain * Remove WS command backup/upload (#130588) * Remove WS command backup/upload * Disable failing kitchen_sink test * Make local backup a backup agent (#130623) * Make local backup a backup agent * Adjust * Adjust * Adjust * Adjust tests * Adjust * Adjust * Adjust docstring * Adjust * Protect members of CoreLocalBackupAgent * Remove redundant check for file * Make the backup.create service use the first local agent * Add BackupAgent.async_get_backup * Fix some TODOs * Add support for downloading backup from a remote agent * Fix restore * Fix test * Adjust kitchen_sink test * Remove unused method BackupManager.async_get_backup_path * Re-enable kitchen sink test * Remove BaseBackupManager.async_upload_backup * Support restore from remote agent * Fix review comments * Include backup agent error in response to WS command backup/info (#130884) * Adjust code related to WS command backup/info (#130890) * Include backup agent error in response to WS command backup/details (#130892) * Remove LOCAL_AGENT_ID constant from backup manager (#130895) * Add backup config storage (#130871) * Add base for backup config * Allow updating backup config * Test loading backup config * Add backup config update method * Add temporary check for BackupAgent.async_remove_backup (#130893) * Rename backup slug to backup_id (#130902) * Improve backup websocket API tests (#130912) * Improve backup websocket API tests * Add missing snapshot * Fix tests leaving files behind * Improve backup manager backup creation tests (#130916) * Remove class backup.backup.LocalBackup (#130919) * Add agent delete backup (#130921) * Add backup agent delete backup * Remove agents delete websocket command * Update docstring Co-authored-by: Erik Montnemery --------- Co-authored-by: Erik Montnemery * Disable core local backup agent in hassio (#130933) * Rename remove backup to delete backup (#130940) * Rename remove backup to delete backup * Revert "backup/delete" * Refactor BackupManager (#130947) * Refactor BackupManager * Adjust * Adjust backup creation * Copy in executor * Fix BackupManager.async_get_backup (#130975) * Fix typo in backup tests (#130978) * Adjust backup NewBackup class (#130976) * Remove class backup.BackupUploadMetadata (#130977) Remove class backup.BackupMetadata * Report backup size in bytes instead of MB (#131028) Co-authored-by: Robert Resch * Speed up CI for feature branch (#131030) * Speed up CI for feature branch * adjust * fix * fix * fix * fix * Rename remove to delete in backup websocket type (#131023) * Revert "Speed up CI for feature branch" (#131074) Revert "Speed up CI for feature branch (#131030)" This reverts commit 791280506d1859b1a722f5064d75bcbe48acc1c3. * Rename class BaseBackup to AgentBackup (#131083) * Rename class BaseBackup to AgentBackup * Update tests * Speed up CI for backup feature branch (#131079) * Add backup platform to the hassio integration (#130991) * Add backup platform to the hassio integration * Add hassio to after_dependencies of backup * Address review comments * Remove redundant hassio parametrization of tests * Add tests * Address review comments * Bump CI cache version * Revert "Bump CI cache version" This reverts commit 2ab4d2b1795c953ccfc9b17c47f9df3faac83749. * Extend backup info class AgentBackup (#131110) * Extend backup info class AgentBackup * Update kitchen sink * Update kitchen sink test * Update kitchen sink test * Exclude cloud and hassio from core files (#131117) * Remove unnecessary **kwargs from backup API (#131124) * Fix backup tests (#131128) * Freeze backup dataclasses (#131122) * Protect CoreLocalBackupAgent.load_backups (#131126) * Use backup metadata v2 in core/container backups (#131125) * Extend backup creation API (#131121) * Extend backup creation API * Add tests * Fix merge * Fix merge * Return agent errors when deleting a backup (#131142) * Return agent errors when deleting a backup * Remove redundant calls to dict.keys() * Add enum type for backup folder (#131158) * Add method AgentBackup.from_dict (#131164) * Remove WS command backup/agents/list_backups (#131163) * Handle backup schedule (#131127) * Add backup schedule handling * Fix unrelated incorrect type annotation in test * Clarify delay save * Make the backup time compatible with the recorder nightly job * Update create backup parameters * Use typed dict for create backup parameters * Simplify schedule state * Group create backup parameters * Move parameter * Fix typo * Use Folder model * Handle deserialization of folders better * Fail on attempt to include addons or folders in core backup (#131204) * Fix AgentBackup test (#131201) * Add options to WS command backup/restore (#131194) * Add options to WS command backup/restore * Add tests * Fix test * Teach core backup to restore only database or only settings (#131225) * Exclude tmp_backups/*.tar from backups (#131243) * Add WS command backup/subscribe_events (#131250) * Clean up temporary directory after restoring backup (#131263) * Improve hassio backup agent list (#131268) * Include `last_automatic_backup` in reply to backup/info (#131293) Include last_automatic_backup in reply to backup/info * Handle backup delete after config (#131259) * Handle delete after copies * Handle delete after days * Add some test examples * Test config_delete_after_logic * Test config_delete_after_copies_logic * Test more delete after days * Add debug logs * Always delete the oldest backup first * Never remove the last backup * Clean up words Co-authored-by: Erik Montnemery * Fix after cleaning words * Use utcnow * Remove duplicate guard * Simplify sorting * Delete backups even if there are agent errors on get backups --------- Co-authored-by: Erik Montnemery * Rename backup delete after to backup retention (#131364) * Rename backup delete after to backup retention * Tweak * Remove length limit on `agent_ids` when configuring backup (#132057) Remove length limit on agent_ids when configuring backup * Rename backup retention_config to retention (#132068) * Modify backup agent API to be stream oriented (#132090) * Modify backup agent API to be stream oriented * Fix tests * Adjust after code review * Remove no longer needed pylint override * Improve test coverage * Change BackupAgent API to work with AsyncIterator objects * Don't close files in the event loop * Don't close files in the event loop * Fix backup manager create backup log (#132174) * Fix debug log level (#132186) * Add cloud backup agent (#129621) * Init cloud backup sync * Add more metadata * Fix typo * Adjust to base changes * Don't raise on list if more than one backup is available * Adjust to base branch * Fetch always and verify on download * Update homeassistant/components/cloud/backup.py Co-authored-by: Martin Hjelmare * Adjust to base branch changes * Not required anymore * Workaround * Fix blocking event loop * Fix * Add some tests * some tests * Add cloud backup delete functionality * Enable check * Fix ruff * Use fixture * Use iter_chunks instead * Remove read * Remove explicit export of read_backup * Align with BackupAgent API changes * Improve test coverage * Improve error handling * Adjust docstrings * Catch aiohttp.ClientError bubbling up from hass_nabucasa * Improve iteration --------- Co-authored-by: Erik Co-authored-by: Robert Resch Co-authored-by: Martin Hjelmare Co-authored-by: Krisjanis Lejejs * Extract file receiver from `BackupManager.async_receive_backup` to util (#132271) * Extract file receiver from BackupManager.async_receive_backup to util * Apply suggestions from code review Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare * Make sure backup directory exists (#132269) * Make sure backup directory exists * Hand off directory creation to executor * Use mkdir's exist_ok feeature * Organize BackupManager instance attributes (#132277) * Don't store received backups in a TempDir (#132272) * Don't store received backups in a TempDir * Fix tests * Make sure backup directory exists * Address review comments * Fix tests * Rewrite backup manager state handling (#132375) * Rewrite backup manager state handling * Address review comments * Modify backup reader/writer API to be stream oriented (#132464) * Internalize backup tasks (#132482) * Internalize backup tasks * Update test after rebase * Handle backup error during automatic backup (#132511) * Improve backup manager state logging (#132549) * Fix backup manager state when restore completes (#132548) * Remove WS command backup/agents/download (#132664) * Add WS command backup/generate_with_stored_settings (#132671) * Add WS command backup/generate_with_stored_settings * Register the new command, add tests * Refactor local agent backup tests (#132683) * Refactor test_load_backups * Refactor test loading agents * Refactor test_delete_backup * Refactor test_upload * Clean up duplicate tests * Refactor backup manager receive tests (#132701) * Refactor backup manager receive tests * Clean up * Refactor pre and post platform tests (#132708) * Refactor backup pre platform test * Refactor backup post platform test * Bump aiohasupervisor to version 0.2.2b0 (#132704) * Bump aiohasupervisor to version 0.2.2b0 * Adjust tests * Publish event when manager is idle after creating backup (#132724) * Handle busy backup manager when uploading backup (#132736) * Adjust hassio backup agent to supervisor changes (#132732) * Adjust hassio backup agent to supervisor changes * Fix typo * Refactor test for create backup with wrong parameters (#132763) * Refactor test not loading bad backup platforms (#132769) * Improve receive backup coverage (#132758) * Refactor initiate backup test (#132829) * Rename Backup to ManagerBackup (#132841) * Refactor backup config (#132845) * Refactor backup config * Remove unnecessary condition * Adjust tests * Improve initiate backup test (#132858) * Store the time of automatic backup attempts (#132860) * Store the time of automatic backup attempts * Address review comments * Update test * Update cloud test * Save agent failures when creating backups (#132850) * Save agent failures when creating backups * Update tests * Store KnownBackups * Add test * Only clear known_backups on no error, add tests * Address review comments * Store known backups as a list * Update tests * Track all backups created with backup strategy settings (#132916) * Track all backups created with saved settings * Rename * Add explicit call to save the store * Don't register service backup.create in HassOS installations (#132932) * Revert changes to action service backup.create (#132938) * Fix logic for cleaning up temporary backup file (#132934) * Fix logic for cleaning up temporary backup file * Reduce scope of patch * Fix with_strategy_settings info not sent over websocket (#132939) * Fix with_strategy_settings info not sent over websocket * Fix kitchen sink tests * Fix cloud and hassio tests * Revert backup ci changes (#132955) Revert changes speeding up CI * Fix revert of CI changes (#132960) --------- Co-authored-by: Joakim Sørensen Co-authored-by: Martin Hjelmare Co-authored-by: Robert Resch Co-authored-by: Paul Bottein Co-authored-by: Krisjanis Lejejs --- homeassistant/backup_restore.py | 101 +- homeassistant/components/backup/__init__.py | 75 +- homeassistant/components/backup/agent.py | 100 + homeassistant/components/backup/backup.py | 124 + homeassistant/components/backup/config.py | 444 +++ homeassistant/components/backup/const.py | 7 + homeassistant/components/backup/http.py | 55 +- homeassistant/components/backup/manager.py | 1262 ++++++-- homeassistant/components/backup/manifest.json | 3 +- homeassistant/components/backup/models.py | 61 + homeassistant/components/backup/store.py | 52 + homeassistant/components/backup/util.py | 111 + homeassistant/components/backup/websocket.py | 220 +- homeassistant/components/cloud/backup.py | 196 ++ homeassistant/components/cloud/manifest.json | 7 +- homeassistant/components/hassio/backup.py | 365 +++ homeassistant/components/hassio/manifest.json | 2 +- .../components/kitchen_sink/backup.py | 92 + homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 3 +- requirements_test_all.txt | 3 +- tests/components/backup/common.py | 153 +- tests/components/backup/conftest.py | 97 + .../backup/snapshots/test_backup.ambr | 206 ++ .../backup/snapshots/test_websocket.ambr | 2748 ++++++++++++++++- tests/components/backup/test_backup.py | 129 + tests/components/backup/test_http.py | 42 +- tests/components/backup/test_init.py | 22 +- tests/components/backup/test_manager.py | 1074 +++++-- tests/components/backup/test_models.py | 11 + tests/components/backup/test_websocket.py | 1600 +++++++++- tests/components/cloud/test_backup.py | 568 ++++ tests/components/conftest.py | 4 + tests/components/hassio/test_backup.py | 403 +++ tests/components/kitchen_sink/test_backup.py | 194 ++ tests/test_backup_restore.py | 210 +- 38 files changed, 9977 insertions(+), 773 deletions(-) create mode 100644 homeassistant/components/backup/agent.py create mode 100644 homeassistant/components/backup/backup.py create mode 100644 homeassistant/components/backup/config.py create mode 100644 homeassistant/components/backup/models.py create mode 100644 homeassistant/components/backup/store.py create mode 100644 homeassistant/components/backup/util.py create mode 100644 homeassistant/components/cloud/backup.py create mode 100644 homeassistant/components/hassio/backup.py create mode 100644 homeassistant/components/kitchen_sink/backup.py create mode 100644 tests/components/backup/conftest.py create mode 100644 tests/components/backup/snapshots/test_backup.ambr create mode 100644 tests/components/backup/test_backup.py create mode 100644 tests/components/backup/test_models.py create mode 100644 tests/components/cloud/test_backup.py create mode 100644 tests/components/hassio/test_backup.py create mode 100644 tests/components/kitchen_sink/test_backup.py diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py index 32991dfb2d3..f9250e3129e 100644 --- a/homeassistant/backup_restore.py +++ b/homeassistant/backup_restore.py @@ -1,6 +1,10 @@ """Home Assistant module to handle restoring backups.""" +from __future__ import annotations + +from collections.abc import Iterable from dataclasses import dataclass +import hashlib import json import logging from pathlib import Path @@ -14,7 +18,12 @@ import securetar from .const import __version__ as HA_VERSION RESTORE_BACKUP_FILE = ".HA_RESTORE" -KEEP_PATHS = ("backups",) +KEEP_BACKUPS = ("backups",) +KEEP_DATABASE = ( + "home-assistant_v2.db", + "home-assistant_v2.db-wal", +) + _LOGGER = logging.getLogger(__name__) @@ -24,6 +33,21 @@ class RestoreBackupFileContent: """Definition for restore backup file content.""" backup_file_path: Path + password: str | None + remove_after_restore: bool + restore_database: bool + restore_homeassistant: bool + + +def password_to_key(password: str) -> bytes: + """Generate a AES Key from password. + + Matches the implementation in supervisor.backups.utils.password_to_key. + """ + key: bytes = password.encode() + for _ in range(100): + key = hashlib.sha256(key).digest() + return key[:16] def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: @@ -32,20 +56,24 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | try: instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) return RestoreBackupFileContent( - backup_file_path=Path(instruction_content["path"]) + backup_file_path=Path(instruction_content["path"]), + password=instruction_content["password"], + remove_after_restore=instruction_content["remove_after_restore"], + restore_database=instruction_content["restore_database"], + restore_homeassistant=instruction_content["restore_homeassistant"], ) - except (FileNotFoundError, json.JSONDecodeError): + except (FileNotFoundError, KeyError, json.JSONDecodeError): return None -def _clear_configuration_directory(config_dir: Path) -> None: - """Delete all files and directories in the config directory except for the backups directory.""" - keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS] - config_contents = sorted( - [entry for entry in config_dir.iterdir() if entry not in keep_paths] +def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None: + """Delete all files and directories in the config directory except entries in the keep list.""" + keep_paths = [config_dir.joinpath(path) for path in keep] + entries_to_remove = sorted( + entry for entry in config_dir.iterdir() if entry not in keep_paths ) - for entry in config_contents: + for entry in entries_to_remove: entrypath = config_dir.joinpath(entry) if entrypath.is_file(): @@ -54,12 +82,15 @@ def _clear_configuration_directory(config_dir: Path) -> None: shutil.rmtree(entrypath) -def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: +def _extract_backup( + config_dir: Path, + restore_content: RestoreBackupFileContent, +) -> None: """Extract the backup file to the config directory.""" with ( TemporaryDirectory() as tempdir, securetar.SecureTarFile( - backup_file_path, + restore_content.backup_file_path, gzip=False, mode="r", ) as ostf, @@ -88,22 +119,41 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", ), gzip=backup_meta["compressed"], + key=password_to_key(restore_content.password) + if restore_content.password is not None + else None, mode="r", ) as istf: - for member in istf.getmembers(): - if member.name == "data": - continue - member.name = member.name.replace("data/", "") - _clear_configuration_directory(config_dir) istf.extractall( - path=config_dir, - members=[ - member - for member in securetar.secure_path(istf) - if member.name != "data" - ], + path=Path(tempdir, "homeassistant"), + members=securetar.secure_path(istf), filter="fully_trusted", ) + if restore_content.restore_homeassistant: + keep = list(KEEP_BACKUPS) + if not restore_content.restore_database: + keep.extend(KEEP_DATABASE) + _clear_configuration_directory(config_dir, keep) + shutil.copytree( + Path(tempdir, "homeassistant", "data"), + config_dir, + dirs_exist_ok=True, + ignore=shutil.ignore_patterns(*(keep)), + ) + elif restore_content.restore_database: + for entry in KEEP_DATABASE: + entrypath = config_dir / entry + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + for entry in KEEP_DATABASE: + shutil.copy( + Path(tempdir, "homeassistant", "data", entry), + config_dir, + ) def restore_backup(config_dir_path: str) -> bool: @@ -119,8 +169,13 @@ def restore_backup(config_dir_path: str) -> bool: backup_file_path = restore_content.backup_file_path _LOGGER.info("Restoring %s", backup_file_path) try: - _extract_backup(config_dir, backup_file_path) + _extract_backup( + config_dir=config_dir, + restore_content=restore_content, + ) except FileNotFoundError as err: raise ValueError(f"Backup file {backup_file_path} does not exist") from err + if restore_content.remove_after_restore: + backup_file_path.unlink(missing_ok=True) _LOGGER.info("Restore complete, restarting") return True diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index 200cb4a3f65..f1a6f3be196 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -5,36 +5,81 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType -from .const import DATA_MANAGER, DOMAIN, LOGGER +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .const import DATA_MANAGER, DOMAIN from .http import async_register_http_views -from .manager import BackupManager +from .manager import ( + BackupManager, + BackupPlatformProtocol, + BackupReaderWriter, + CoreBackupReaderWriter, + CreateBackupEvent, + ManagerBackup, + NewBackup, + WrittenBackup, +) +from .models import AddonInfo, AgentBackup, Folder from .websocket import async_register_websocket_handlers +__all__ = [ + "AddonInfo", + "AgentBackup", + "ManagerBackup", + "BackupAgent", + "BackupAgentError", + "BackupAgentPlatformProtocol", + "BackupPlatformProtocol", + "BackupReaderWriter", + "CreateBackupEvent", + "Folder", + "LocalBackupAgent", + "NewBackup", + "WrittenBackup", +] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Backup integration.""" - backup_manager = BackupManager(hass) - hass.data[DATA_MANAGER] = backup_manager - with_hassio = is_hassio(hass) + reader_writer: BackupReaderWriter + if not with_hassio: + reader_writer = CoreBackupReaderWriter(hass) + else: + # pylint: disable-next=import-outside-toplevel, hass-component-root-import + from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter + + reader_writer = SupervisorBackupReaderWriter(hass) + + backup_manager = BackupManager(hass, reader_writer) + hass.data[DATA_MANAGER] = backup_manager + await backup_manager.async_setup() + async_register_websocket_handlers(hass, with_hassio) - if with_hassio: - if DOMAIN in config: - LOGGER.error( - "The backup integration is not supported on this installation method, " - "please remove it from your configuration" - ) - return True - async def async_handle_create_service(call: ServiceCall) -> None: """Service handler for creating backups.""" - await backup_manager.async_create_backup() + agent_id = list(backup_manager.local_backup_agents)[0] + await backup_manager.async_create_backup( + agent_ids=[agent_id], + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + name=None, + password=None, + ) - hass.services.async_register(DOMAIN, "create", async_handle_create_service) + if not with_hassio: + hass.services.async_register(DOMAIN, "create", async_handle_create_service) async_register_http_views(hass) diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py new file mode 100644 index 00000000000..36f2e7ee34e --- /dev/null +++ b/homeassistant/components/backup/agent.py @@ -0,0 +1,100 @@ +"""Backup agents for the Backup integration.""" + +from __future__ import annotations + +import abc +from collections.abc import AsyncIterator, Callable, Coroutine +from pathlib import Path +from typing import Any, Protocol + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .models import AgentBackup + + +class BackupAgentError(HomeAssistantError): + """Base class for backup agent errors.""" + + +class BackupAgentUnreachableError(BackupAgentError): + """Raised when the agent can't reach its API.""" + + _message = "The backup agent is unreachable." + + +class BackupAgent(abc.ABC): + """Backup agent interface.""" + + name: str + + @abc.abstractmethod + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + + @abc.abstractmethod + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + + @abc.abstractmethod + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + + @abc.abstractmethod + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + + @abc.abstractmethod + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + + +class LocalBackupAgent(BackupAgent): + """Local backup agent.""" + + @abc.abstractmethod + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup. + + The method should return the path to the backup file with the specified id. + """ + + +class BackupAgentPlatformProtocol(Protocol): + """Define the format of backup platforms which implement backup agents.""" + + async def async_get_backup_agents( + self, + hass: HomeAssistant, + **kwargs: Any, + ) -> list[BackupAgent]: + """Return a list of backup agents.""" diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py new file mode 100644 index 00000000000..b9aad89c7f3 --- /dev/null +++ b/homeassistant/components/backup/backup.py @@ -0,0 +1,124 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable, Coroutine +import json +from pathlib import Path +from tarfile import TarError +from typing import Any + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.hassio import is_hassio + +from .agent import BackupAgent, LocalBackupAgent +from .const import LOGGER +from .models import AgentBackup +from .util import read_backup + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the local backup agent.""" + if is_hassio(hass): + return [] + return [CoreLocalBackupAgent(hass)] + + +class CoreLocalBackupAgent(LocalBackupAgent): + """Local backup agent for Core and Container installations.""" + + name = "local" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path(hass.config.path("backups")) + self._backups: dict[str, AgentBackup] = {} + self._loaded_backups = False + + async def _load_backups(self) -> None: + """Load data of stored backup files.""" + backups = await self._hass.async_add_executor_job(self._read_backups) + LOGGER.debug("Loaded %s local backups", len(backups)) + self._backups = backups + self._loaded_backups = True + + def _read_backups(self) -> dict[str, AgentBackup]: + """Read backups from disk.""" + backups: dict[str, AgentBackup] = {} + for backup_path in self._backup_dir.glob("*.tar"): + try: + backup = read_backup(backup_path) + backups[backup.backup_id] = backup + except (OSError, TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to read backup %s: %s", backup_path, err) + return backups + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + raise NotImplementedError + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + if not self._loaded_backups: + await self._load_backups() + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + if not self._loaded_backups: + await self._load_backups() + + if not (backup := self._backups.get(backup_id)): + return None + + backup_path = self.get_backup_path(backup_id) + if not await self._hass.async_add_executor_job(backup_path.exists): + LOGGER.debug( + ( + "Removing tracked backup (%s) that does not exists on the expected" + " path %s" + ), + backup.backup_id, + backup_path, + ) + self._backups.pop(backup_id) + return None + + return backup + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return self._backup_dir / f"{backup_id}.tar" + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + if await self.async_get_backup(backup_id) is None: + return + + backup_path = self.get_backup_path(backup_id) + await self._hass.async_add_executor_job(backup_path.unlink, True) + LOGGER.debug("Deleted backup located at %s", backup_path) + self._backups.pop(backup_id) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py new file mode 100644 index 00000000000..6304d0aa90b --- /dev/null +++ b/homeassistant/components/backup/config.py @@ -0,0 +1,444 @@ +"""Provide persistent configuration for the backup integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from dataclasses import dataclass, field, replace +from datetime import datetime, timedelta +from enum import StrEnum +from typing import TYPE_CHECKING, Self, TypedDict + +from cronsim import CronSim + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.event import async_call_later, async_track_point_in_time +from homeassistant.helpers.typing import UNDEFINED, UndefinedType +from homeassistant.util import dt as dt_util + +from .const import LOGGER +from .models import Folder + +if TYPE_CHECKING: + from .manager import BackupManager, ManagerBackup + +# The time of the automatic backup event should be compatible with +# the time of the recorder's nightly job which runs at 04:12. +# Run the backup at 04:45. +CRON_PATTERN_DAILY = "45 4 * * *" +CRON_PATTERN_WEEKLY = "45 4 * * {}" + + +class StoredBackupConfig(TypedDict): + """Represent the stored backup config.""" + + create_backup: StoredCreateBackupConfig + last_attempted_strategy_backup: datetime | None + last_completed_strategy_backup: datetime | None + retention: StoredRetentionConfig + schedule: StoredBackupSchedule + + +@dataclass(kw_only=True) +class BackupConfigData: + """Represent loaded backup config data.""" + + create_backup: CreateBackupConfig + last_attempted_strategy_backup: datetime | None = None + last_completed_strategy_backup: datetime | None = None + retention: RetentionConfig + schedule: BackupSchedule + + @classmethod + def from_dict(cls, data: StoredBackupConfig) -> Self: + """Initialize backup config data from a dict.""" + include_folders_data = data["create_backup"]["include_folders"] + if include_folders_data: + include_folders = [Folder(folder) for folder in include_folders_data] + else: + include_folders = None + retention = data["retention"] + + return cls( + create_backup=CreateBackupConfig( + agent_ids=data["create_backup"]["agent_ids"], + include_addons=data["create_backup"]["include_addons"], + include_all_addons=data["create_backup"]["include_all_addons"], + include_database=data["create_backup"]["include_database"], + include_folders=include_folders, + name=data["create_backup"]["name"], + password=data["create_backup"]["password"], + ), + last_attempted_strategy_backup=data["last_attempted_strategy_backup"], + last_completed_strategy_backup=data["last_completed_strategy_backup"], + retention=RetentionConfig( + copies=retention["copies"], + days=retention["days"], + ), + schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])), + ) + + def to_dict(self) -> StoredBackupConfig: + """Convert backup config data to a dict.""" + return StoredBackupConfig( + create_backup=self.create_backup.to_dict(), + last_attempted_strategy_backup=self.last_attempted_strategy_backup, + last_completed_strategy_backup=self.last_completed_strategy_backup, + retention=self.retention.to_dict(), + schedule=self.schedule.to_dict(), + ) + + +class BackupConfig: + """Handle backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize backup config.""" + self.data = BackupConfigData( + create_backup=CreateBackupConfig(), + retention=RetentionConfig(), + schedule=BackupSchedule(), + ) + self._manager = manager + + def load(self, stored_config: StoredBackupConfig) -> None: + """Load config.""" + self.data = BackupConfigData.from_dict(stored_config) + self.data.schedule.apply(self._manager) + + async def update( + self, + *, + create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED, + retention: RetentionParametersDict | UndefinedType = UNDEFINED, + schedule: ScheduleState | UndefinedType = UNDEFINED, + ) -> None: + """Update config.""" + if create_backup is not UNDEFINED: + self.data.create_backup = replace(self.data.create_backup, **create_backup) + if retention is not UNDEFINED: + new_retention = RetentionConfig(**retention) + if new_retention != self.data.retention: + self.data.retention = new_retention + self.data.retention.apply(self._manager) + if schedule is not UNDEFINED: + new_schedule = BackupSchedule(state=schedule) + if new_schedule.to_dict() != self.data.schedule.to_dict(): + self.data.schedule = new_schedule + self.data.schedule.apply(self._manager) + + self._manager.store.save() + + +@dataclass(kw_only=True) +class RetentionConfig: + """Represent the backup retention configuration.""" + + copies: int | None = None + days: int | None = None + + def apply(self, manager: BackupManager) -> None: + """Apply backup retention configuration.""" + if self.days is not None: + self._schedule_next(manager) + else: + self._unschedule_next(manager) + + def to_dict(self) -> StoredRetentionConfig: + """Convert backup retention configuration to a dict.""" + return StoredRetentionConfig( + copies=self.copies, + days=self.days, + ) + + @callback + def _schedule_next( + self, + manager: BackupManager, + ) -> None: + """Schedule the next delete after days.""" + self._unschedule_next(manager) + + async def _delete_backups(now: datetime) -> None: + """Delete backups older than days.""" + self._schedule_next(manager) + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return backups older than days to delete.""" + # we need to check here since we await before + # this filter is applied + if self.days is None: + return {} + now = dt_util.utcnow() + return { + backup_id: backup + for backup_id, backup in backups.items() + if dt_util.parse_datetime(backup.date, raise_on_error=True) + + timedelta(days=self.days) + < now + } + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_delete_event = async_call_later( + manager.hass, timedelta(days=1), _delete_backups + ) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next delete after days.""" + if (remove_next_event := manager.remove_next_delete_event) is not None: + remove_next_event() + manager.remove_next_delete_event = None + + +class StoredRetentionConfig(TypedDict): + """Represent the stored backup retention configuration.""" + + copies: int | None + days: int | None + + +class RetentionParametersDict(TypedDict, total=False): + """Represent the parameters for retention.""" + + copies: int | None + days: int | None + + +class StoredBackupSchedule(TypedDict): + """Represent the stored backup schedule configuration.""" + + state: ScheduleState + + +class ScheduleState(StrEnum): + """Represent the schedule state.""" + + NEVER = "never" + DAILY = "daily" + MONDAY = "mon" + TUESDAY = "tue" + WEDNESDAY = "wed" + THURSDAY = "thu" + FRIDAY = "fri" + SATURDAY = "sat" + SUNDAY = "sun" + + +@dataclass(kw_only=True) +class BackupSchedule: + """Represent the backup schedule.""" + + state: ScheduleState = ScheduleState.NEVER + cron_event: CronSim | None = field(init=False, default=None) + + @callback + def apply( + self, + manager: BackupManager, + ) -> None: + """Apply a new schedule. + + There are only three possible state types: never, daily, or weekly. + """ + if self.state is ScheduleState.NEVER: + self._unschedule_next(manager) + return + + if self.state is ScheduleState.DAILY: + self._schedule_next(CRON_PATTERN_DAILY, manager) + else: + self._schedule_next( + CRON_PATTERN_WEEKLY.format(self.state.value), + manager, + ) + + @callback + def _schedule_next( + self, + cron_pattern: str, + manager: BackupManager, + ) -> None: + """Schedule the next backup.""" + self._unschedule_next(manager) + now = dt_util.now() + if (cron_event := self.cron_event) is None: + seed_time = manager.config.data.last_completed_strategy_backup or now + cron_event = self.cron_event = CronSim(cron_pattern, seed_time) + next_time = next(cron_event) + + if next_time < now: + # schedule a backup at next daily time once + # if we missed the last scheduled backup + cron_event = CronSim(CRON_PATTERN_DAILY, now) + next_time = next(cron_event) + # reseed the cron event attribute + # add a day to the next time to avoid scheduling at the same time again + self.cron_event = CronSim(cron_pattern, now + timedelta(days=1)) + + async def _create_backup(now: datetime) -> None: + """Create backup.""" + manager.remove_next_backup_event = None + config_data = manager.config.data + self._schedule_next(cron_pattern, manager) + + # create the backup + try: + await manager.async_create_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) + except Exception: # noqa: BLE001 + # another more specific exception will be added + # and handled in the future + LOGGER.exception("Unexpected error creating automatic backup") + + # delete old backups more numerous than copies + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return oldest backups more numerous than copies to delete.""" + # we need to check here since we await before + # this filter is applied + if config_data.retention.copies is None: + return {} + return dict( + sorted( + backups.items(), + key=lambda backup_item: backup_item[1].date, + )[: len(backups) - config_data.retention.copies] + ) + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_backup_event = async_track_point_in_time( + manager.hass, _create_backup, next_time + ) + + def to_dict(self) -> StoredBackupSchedule: + """Convert backup schedule to a dict.""" + return StoredBackupSchedule(state=self.state) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next backup.""" + if (remove_next_event := manager.remove_next_backup_event) is not None: + remove_next_event() + manager.remove_next_backup_event = None + + +@dataclass(kw_only=True) +class CreateBackupConfig: + """Represent the config for async_create_backup.""" + + agent_ids: list[str] = field(default_factory=list) + include_addons: list[str] | None = None + include_all_addons: bool = False + include_database: bool = True + include_folders: list[Folder] | None = None + name: str | None = None + password: str | None = None + + def to_dict(self) -> StoredCreateBackupConfig: + """Convert create backup config to a dict.""" + return { + "agent_ids": self.agent_ids, + "include_addons": self.include_addons, + "include_all_addons": self.include_all_addons, + "include_database": self.include_database, + "include_folders": self.include_folders, + "name": self.name, + "password": self.password, + } + + +class StoredCreateBackupConfig(TypedDict): + """Represent the stored config for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +class CreateBackupParametersDict(TypedDict, total=False): + """Represent the parameters for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +async def _delete_filtered_backups( + manager: BackupManager, + backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]], +) -> None: + """Delete backups parsed with a filter. + + :param manager: The backup manager. + :param backup_filter: A filter that should return the backups to delete. + """ + backups, get_agent_errors = await manager.async_get_backups() + if get_agent_errors: + LOGGER.debug( + "Error getting backups; continuing anyway: %s", + get_agent_errors, + ) + + LOGGER.debug("Total backups: %s", backups) + + filtered_backups = backup_filter(backups) + + if not filtered_backups: + return + + # always delete oldest backup first + filtered_backups = dict( + sorted( + filtered_backups.items(), + key=lambda backup_item: backup_item[1].date, + ) + ) + + if len(filtered_backups) >= len(backups): + # Never delete the last backup. + last_backup = filtered_backups.popitem() + LOGGER.debug("Keeping the last backup: %s", last_backup) + + LOGGER.debug("Backups to delete: %s", filtered_backups) + + if not filtered_backups: + return + + backup_ids = list(filtered_backups) + delete_results = await asyncio.gather( + *(manager.async_delete_backup(backup_id) for backup_id in filtered_backups) + ) + agent_errors = { + backup_id: error + for backup_id, error in zip(backup_ids, delete_results, strict=True) + if error + } + if agent_errors: + LOGGER.error( + "Error deleting old copies: %s", + agent_errors, + ) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index f613f7cc352..c2070a37b2d 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -10,6 +10,7 @@ from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: from .manager import BackupManager +BUF_SIZE = 2**20 * 4 # 4MB DOMAIN = "backup" DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) @@ -22,6 +23,12 @@ EXCLUDE_FROM_BACKUP = [ "*.log.*", "*.log", "backups/*.tar", + "tmp_backups/*.tar", "OZW_Log.txt", "tts/*", ] + +EXCLUDE_DATABASE_FROM_BACKUP = [ + "home-assistant_v2.db", + "home-assistant_v2.db-wal", +] diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 42693035bd3..73a8c8eb602 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -8,10 +8,11 @@ from typing import cast from aiohttp import BodyPartReader from aiohttp.hdrs import CONTENT_DISPOSITION -from aiohttp.web import FileResponse, Request, Response +from aiohttp.web import FileResponse, Request, Response, StreamResponse from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from .const import DATA_MANAGER @@ -27,30 +28,47 @@ def async_register_http_views(hass: HomeAssistant) -> None: class DownloadBackupView(HomeAssistantView): """Generate backup view.""" - url = "/api/backup/download/{slug}" + url = "/api/backup/download/{backup_id}" name = "api:backup:download" async def get( self, request: Request, - slug: str, - ) -> FileResponse | Response: + backup_id: str, + ) -> StreamResponse | FileResponse | Response: """Download a backup file.""" if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) + try: + agent_id = request.query.getone("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) manager = request.app[KEY_HASS].data[DATA_MANAGER] - backup = await manager.async_get_backup(slug=slug) + if agent_id not in manager.backup_agents: + return Response(status=HTTPStatus.BAD_REQUEST) + agent = manager.backup_agents[agent_id] + backup = await agent.async_get_backup(backup_id) - if backup is None or not backup.path.exists(): + # We don't need to check if the path exists, aiohttp.FileResponse will handle + # that + if backup is None: return Response(status=HTTPStatus.NOT_FOUND) - return FileResponse( - path=backup.path.as_posix(), - headers={ - CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" - }, - ) + headers = { + CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" + } + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + return FileResponse(path=path.as_posix(), headers=headers) + + stream = await agent.async_download_backup(backup_id) + response = StreamResponse(status=HTTPStatus.OK, headers=headers) + await response.prepare(request) + async for chunk in stream: + await response.write(chunk) + return response class UploadBackupView(HomeAssistantView): @@ -62,15 +80,24 @@ class UploadBackupView(HomeAssistantView): @require_admin async def post(self, request: Request) -> Response: """Upload a backup file.""" + try: + agent_ids = request.query.getall("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) manager = request.app[KEY_HASS].data[DATA_MANAGER] reader = await request.multipart() contents = cast(BodyPartReader, await reader.next()) try: - await manager.async_receive_backup(contents=contents) + await manager.async_receive_backup(contents=contents, agent_ids=agent_ids) except OSError as err: return Response( - body=f"Can't write backup file {err}", + body=f"Can't write backup file: {err}", + status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + except HomeAssistantError as err: + return Response( + body=f"Can't upload backup file: {err}", status=HTTPStatus.INTERNAL_SERVER_ERROR, ) except asyncio.CancelledError: diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 4300f75eed0..1defbd350fb 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -4,49 +4,181 @@ from __future__ import annotations import abc import asyncio -from dataclasses import asdict, dataclass +from collections.abc import AsyncIterator, Callable, Coroutine +from dataclasses import dataclass +from enum import StrEnum import hashlib import io import json from pathlib import Path -from queue import SimpleQueue import shutil import tarfile -from tarfile import TarError -from tempfile import TemporaryDirectory import time -from typing import Any, Protocol, cast +from typing import TYPE_CHECKING, Any, Protocol, TypedDict import aiohttp from securetar import SecureTarFile, atomic_contents_add -from homeassistant.backup_restore import RESTORE_BACKUP_FILE +from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import integration_platform from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util -from homeassistant.util.json import json_loads_object -from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER - -BUF_SIZE = 2**20 * 4 # 4MB +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .config import BackupConfig +from .const import ( + BUF_SIZE, + DATA_MANAGER, + DOMAIN, + EXCLUDE_DATABASE_FROM_BACKUP, + EXCLUDE_FROM_BACKUP, + LOGGER, +) +from .models import AgentBackup, Folder +from .store import BackupStore +from .util import make_backup_dir, read_backup -@dataclass(slots=True) -class Backup: +@dataclass(frozen=True, kw_only=True, slots=True) +class NewBackup: + """New backup class.""" + + backup_job_id: str + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerBackup(AgentBackup): """Backup class.""" - slug: str - name: str - date: str - path: Path - size: float + agent_ids: list[str] + failed_agent_ids: list[str] + with_strategy_settings: bool - def as_dict(self) -> dict: - """Return a dict representation of this backup.""" - return {**asdict(self), "path": self.path.as_posix()} + +@dataclass(frozen=True, kw_only=True, slots=True) +class WrittenBackup: + """Written backup class.""" + + backup: AgentBackup + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]] + release_stream: Callable[[], Coroutine[Any, Any, None]] + + +class BackupManagerState(StrEnum): + """Backup state type.""" + + IDLE = "idle" + CREATE_BACKUP = "create_backup" + RECEIVE_BACKUP = "receive_backup" + RESTORE_BACKUP = "restore_backup" + + +class CreateBackupStage(StrEnum): + """Create backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + DOCKER_CONFIG = "docker_config" + FINISHING_FILE = "finishing_file" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class CreateBackupState(StrEnum): + """Create backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class ReceiveBackupStage(StrEnum): + """Receive backup stage enum.""" + + RECEIVE_FILE = "receive_file" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class ReceiveBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class RestoreBackupStage(StrEnum): + """Restore backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart" + CHECK_HOME_ASSISTANT = "check_home_assistant" + DOCKER_CONFIG = "docker_config" + DOWNLOAD_FROM_AGENT = "download_from_agent" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + REMOVE_DELTA_ADDONS = "remove_delta_addons" + + +class RestoreBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerStateEvent: + """Backup state class.""" + + manager_state: BackupManagerState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class IdleEvent(ManagerStateEvent): + """Backup manager idle.""" + + manager_state: BackupManagerState = BackupManagerState.IDLE + + +@dataclass(frozen=True, kw_only=True, slots=True) +class CreateBackupEvent(ManagerStateEvent): + """Backup in progress.""" + + manager_state: BackupManagerState = BackupManagerState.CREATE_BACKUP + stage: CreateBackupStage | None + state: CreateBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ReceiveBackupEvent(ManagerStateEvent): + """Backup receive.""" + + manager_state: BackupManagerState = BackupManagerState.RECEIVE_BACKUP + stage: ReceiveBackupStage | None + state: ReceiveBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class RestoreBackupEvent(ManagerStateEvent): + """Backup restore.""" + + manager_state: BackupManagerState = BackupManagerState.RESTORE_BACKUP + stage: RestoreBackupStage | None + state: RestoreBackupState class BackupPlatformProtocol(Protocol): @@ -59,40 +191,143 @@ class BackupPlatformProtocol(Protocol): """Perform operations after a backup finishes.""" -class BaseBackupManager(abc.ABC): +class BackupReaderWriter(abc.ABC): + """Abstract class for reading and writing backups.""" + + @abc.abstractmethod + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + + @abc.abstractmethod + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + + @abc.abstractmethod + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + + +class BackupManager: """Define the format that backup managers can have.""" - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, reader_writer: BackupReaderWriter) -> None: """Initialize the backup manager.""" self.hass = hass - self.backing_up = False - self.backups: dict[str, Backup] = {} - self.loaded_platforms = False self.platforms: dict[str, BackupPlatformProtocol] = {} + self.backup_agents: dict[str, BackupAgent] = {} + self.local_backup_agents: dict[str, LocalBackupAgent] = {} + + self.config = BackupConfig(hass, self) + self._reader_writer = reader_writer + self.known_backups = KnownBackups(self) + self.store = BackupStore(hass, self) + + # Tasks and flags tracking backup and restore progress + self._backup_task: asyncio.Task[WrittenBackup] | None = None + self._backup_finish_task: asyncio.Task[None] | None = None + + # Backup schedule and retention listeners + self.remove_next_backup_event: Callable[[], None] | None = None + self.remove_next_delete_event: Callable[[], None] | None = None + + # Latest backup event and backup event subscribers + self.last_event: ManagerStateEvent = IdleEvent() + self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = [] + + async def async_setup(self) -> None: + """Set up the backup manager.""" + stored = await self.store.load() + if stored: + self.config.load(stored["config"]) + self.known_backups.load(stored["backups"]) + + await self.load_platforms() + + @property + def state(self) -> BackupManagerState: + """Return the state of the backup manager.""" + return self.last_event.manager_state @callback - def _add_platform( + def _add_platform_pre_post_handler( self, - hass: HomeAssistant, integration_domain: str, platform: BackupPlatformProtocol, ) -> None: - """Add a platform to the backup manager.""" + """Add a backup platform.""" if not hasattr(platform, "async_pre_backup") or not hasattr( platform, "async_post_backup" ): - LOGGER.warning( - "%s does not implement required functions for the backup platform", - integration_domain, - ) return + self.platforms[integration_domain] = platform - async def async_pre_backup_actions(self, **kwargs: Any) -> None: - """Perform pre backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() + async def _async_add_platform_agents( + self, + integration_domain: str, + platform: BackupAgentPlatformProtocol, + ) -> None: + """Add a platform to the backup manager.""" + if not hasattr(platform, "async_get_backup_agents"): + return + agents = await platform.async_get_backup_agents(self.hass) + self.backup_agents.update( + {f"{integration_domain}.{agent.name}": agent for agent in agents} + ) + self.local_backup_agents.update( + { + f"{integration_domain}.{agent.name}": agent + for agent in agents + if isinstance(agent, LocalBackupAgent) + } + ) + + async def _add_platform( + self, + hass: HomeAssistant, + integration_domain: str, + platform: Any, + ) -> None: + """Add a backup platform manager.""" + self._add_platform_pre_post_handler(integration_domain, platform) + await self._async_add_platform_agents(integration_domain, platform) + LOGGER.debug("Backup platform %s loaded", integration_domain) + LOGGER.debug("%s platforms loaded in total", len(self.platforms)) + LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) + LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + + async def async_pre_backup_actions(self) -> None: + """Perform pre backup actions.""" pre_backup_results = await asyncio.gather( *( platform.async_pre_backup(self.hass) @@ -104,11 +339,8 @@ class BaseBackupManager(abc.ABC): if isinstance(result, Exception): raise result - async def async_post_backup_actions(self, **kwargs: Any) -> None: + async def async_post_backup_actions(self) -> None: """Perform post backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() - post_backup_results = await asyncio.gather( *( platform.async_post_backup(self.hass) @@ -123,226 +355,703 @@ class BaseBackupManager(abc.ABC): async def load_platforms(self) -> None: """Load backup platforms.""" await integration_platform.async_process_integration_platforms( - self.hass, DOMAIN, self._add_platform, wait_for_platforms=True + self.hass, + DOMAIN, + self._add_platform, + wait_for_platforms=True, ) LOGGER.debug("Loaded %s platforms", len(self.platforms)) - self.loaded_platforms = True + LOGGER.debug("Loaded %s agents", len(self.backup_agents)) - @abc.abstractmethod - async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: - """Restore a backup.""" + async def _async_upload_backup( + self, + *, + backup: AgentBackup, + agent_ids: list[str], + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + ) -> dict[str, Exception]: + """Upload a backup to selected agents.""" + agent_errors: dict[str, Exception] = {} - @abc.abstractmethod - async def async_create_backup(self, **kwargs: Any) -> Backup: - """Generate a backup.""" + LOGGER.debug("Uploading backup %s to agents %s", backup.backup_id, agent_ids) - @abc.abstractmethod - async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: + sync_backup_results = await asyncio.gather( + *( + self.backup_agents[agent_id].async_upload_backup( + open_stream=open_stream, + backup=backup, + ) + for agent_id in agent_ids + ), + return_exceptions=True, + ) + for idx, result in enumerate(sync_backup_results): + if isinstance(result, Exception): + agent_errors[agent_ids[idx]] = result + LOGGER.exception( + "Error during backup upload - %s", result, exc_info=result + ) + return agent_errors + + async def async_get_backups( + self, + ) -> tuple[dict[str, ManagerBackup], dict[str, Exception]]: """Get backups. - Return a dictionary of Backup instances keyed by their slug. + Return a dictionary of Backup instances keyed by their ID. """ + backups: dict[str, ManagerBackup] = {} + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - @abc.abstractmethod - async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: + list_backups_results = await asyncio.gather( + *(agent.async_list_backups() for agent in self.backup_agents.values()), + return_exceptions=True, + ) + for idx, result in enumerate(list_backups_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + for agent_backup in result: + if (backup_id := agent_backup.backup_id) not in backups: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backups[backup_id] = ManagerBackup( + agent_ids=[], + addons=agent_backup.addons, + backup_id=backup_id, + date=agent_backup.date, + database_included=agent_backup.database_included, + failed_agent_ids=failed_agent_ids, + folders=agent_backup.folders, + homeassistant_included=agent_backup.homeassistant_included, + homeassistant_version=agent_backup.homeassistant_version, + name=agent_backup.name, + protected=agent_backup.protected, + size=agent_backup.size, + with_strategy_settings=with_strategy_settings, + ) + backups[backup_id].agent_ids.append(agent_ids[idx]) + + return (backups, agent_errors) + + async def async_get_backup( + self, backup_id: str + ) -> tuple[ManagerBackup | None, dict[str, Exception]]: """Get a backup.""" + backup: ManagerBackup | None = None + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - @abc.abstractmethod - async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: - """Remove a backup.""" + get_backup_results = await asyncio.gather( + *( + agent.async_get_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(get_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + if not result: + continue + if backup is None: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backup = ManagerBackup( + agent_ids=[], + addons=result.addons, + backup_id=result.backup_id, + date=result.date, + database_included=result.database_included, + failed_agent_ids=failed_agent_ids, + folders=result.folders, + homeassistant_included=result.homeassistant_included, + homeassistant_version=result.homeassistant_version, + name=result.name, + protected=result.protected, + size=result.size, + with_strategy_settings=with_strategy_settings, + ) + backup.agent_ids.append(agent_ids[idx]) + + return (backup, agent_errors) + + async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]: + """Delete a backup.""" + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) + + delete_backup_results = await asyncio.gather( + *( + agent.async_delete_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(delete_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + + if not agent_errors: + self.known_backups.remove(backup_id) + + return agent_errors - @abc.abstractmethod async def async_receive_backup( self, *, + agent_ids: list[str], contents: aiohttp.BodyPartReader, - **kwargs: Any, ) -> None: """Receive and store a backup file from upload.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS) + ) + try: + await self._async_receive_backup(agent_ids=agent_ids, contents=contents) + except Exception: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.FAILED) + ) + raise + else: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.COMPLETED) + ) + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_receive_backup( + self, + *, + agent_ids: list[str], + contents: aiohttp.BodyPartReader, + ) -> None: + """Receive and store a backup file from upload.""" + contents.chunk_size = BUF_SIZE + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.RECEIVE_FILE, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + written_backup = await self._reader_writer.async_receive_backup( + agent_ids=agent_ids, + stream=contents, + suggested_filename=contents.filename or "backup.tar", + ) + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.UPLOAD_TO_AGENTS, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + self.known_backups.add(written_backup.backup, agent_errors, False) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Create a backup.""" + new_backup = await self.async_initiate_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + assert self._backup_finish_task + await self._backup_finish_task + return new_backup + + async def async_initiate_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Initiate generating a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + if with_strategy_settings: + self.config.data.last_attempted_strategy_backup = dt_util.now() + self.store.save() + + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + try: + return await self._async_create_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + except Exception: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + self.async_on_backup_event(IdleEvent()) + raise + + async def _async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool, + ) -> NewBackup: + """Initiate generating a backup.""" + if not agent_ids: + raise HomeAssistantError("At least one agent must be selected") + if any(agent_id not in self.backup_agents for agent_id in agent_ids): + raise HomeAssistantError("Invalid agent selected") + if include_all_addons and include_addons: + raise HomeAssistantError( + "Cannot include all addons and specify specific addons" + ) + + backup_name = name or f"Core {HAVERSION}" + new_backup, self._backup_task = await self._reader_writer.async_create_backup( + agent_ids=agent_ids, + backup_name=backup_name, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + on_progress=self.async_on_backup_event, + password=password, + ) + self._backup_finish_task = self.hass.async_create_task( + self._async_finish_backup(agent_ids, with_strategy_settings), + name="backup_manager_finish_backup", + ) + return new_backup + + async def _async_finish_backup( + self, agent_ids: list[str], with_strategy_settings: bool + ) -> None: + if TYPE_CHECKING: + assert self._backup_task is not None + try: + written_backup = await self._backup_task + except Exception as err: # noqa: BLE001 + LOGGER.debug("Generating backup failed", exc_info=err) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + else: + LOGGER.debug( + "Generated new backup with backup_id %s, uploading to agents %s", + written_backup.backup.backup_id, + agent_ids, + ) + self.async_on_backup_event( + CreateBackupEvent( + stage=CreateBackupStage.UPLOAD_TO_AGENTS, + state=CreateBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + if with_strategy_settings: + # create backup was successful, update last_completed_strategy_backup + self.config.data.last_completed_strategy_backup = dt_util.now() + self.store.save() + self.known_backups.add( + written_backup.backup, agent_errors, with_strategy_settings + ) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) + ) + finally: + self._backup_task = None + self._backup_finish_task = None + self.async_on_backup_event(IdleEvent()) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS) + ) + try: + await self._async_restore_backup( + backup_id=backup_id, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + except Exception: + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED) + ) + raise + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + agent = self.backup_agents[agent_id] + if not await agent.async_get_backup(backup_id): + raise HomeAssistantError( + f"Backup {backup_id} not found in agent {agent_id}" + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await agent.async_download_backup(backup_id) + + await self._reader_writer.async_restore_backup( + backup_id=backup_id, + open_stream=open_backup, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + + @callback + def async_on_backup_event( + self, + event: ManagerStateEvent, + ) -> None: + """Forward event to subscribers.""" + if (current_state := self.state) != (new_state := event.manager_state): + LOGGER.debug("Backup state: %s -> %s", current_state, new_state) + self.last_event = event + for subscription in self._backup_event_subscriptions: + subscription(event) + + @callback + def async_subscribe_events( + self, + on_event: Callable[[ManagerStateEvent], None], + ) -> Callable[[], None]: + """Subscribe events.""" + + def remove_subscription() -> None: + self._backup_event_subscriptions.remove(on_event) + + self._backup_event_subscriptions.append(on_event) + return remove_subscription -class BackupManager(BaseBackupManager): - """Backup manager for the Backup integration.""" +class KnownBackups: + """Track known backups.""" + + def __init__(self, manager: BackupManager) -> None: + """Initialize.""" + self._backups: dict[str, KnownBackup] = {} + self._manager = manager + + def load(self, stored_backups: list[StoredKnownBackup]) -> None: + """Load backups.""" + self._backups = { + backup["backup_id"]: KnownBackup( + backup_id=backup["backup_id"], + failed_agent_ids=backup["failed_agent_ids"], + with_strategy_settings=backup["with_strategy_settings"], + ) + for backup in stored_backups + } + + def to_list(self) -> list[StoredKnownBackup]: + """Convert known backups to a dict.""" + return [backup.to_dict() for backup in self._backups.values()] + + def add( + self, + backup: AgentBackup, + agent_errors: dict[str, Exception], + with_strategy_settings: bool, + ) -> None: + """Add a backup.""" + self._backups[backup.backup_id] = KnownBackup( + backup_id=backup.backup_id, + failed_agent_ids=list(agent_errors), + with_strategy_settings=with_strategy_settings, + ) + self._manager.store.save() + + def get(self, backup_id: str) -> KnownBackup | None: + """Get a backup.""" + return self._backups.get(backup_id) + + def remove(self, backup_id: str) -> None: + """Remove a backup.""" + if backup_id not in self._backups: + return + self._backups.pop(backup_id) + self._manager.store.save() + + +@dataclass(kw_only=True) +class KnownBackup: + """Persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + def to_dict(self) -> StoredKnownBackup: + """Convert known backup to a dict.""" + return { + "backup_id": self.backup_id, + "failed_agent_ids": self.failed_agent_ids, + "with_strategy_settings": self.with_strategy_settings, + } + + +class StoredKnownBackup(TypedDict): + """Stored persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + +class CoreBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in core and container installations.""" + + _local_agent_id = f"{DOMAIN}.local" def __init__(self, hass: HomeAssistant) -> None: - """Initialize the backup manager.""" - super().__init__(hass=hass) - self.backup_dir = Path(hass.config.path("backups")) - self.loaded_backups = False + """Initialize the backup reader/writer.""" + self._hass = hass + self.temp_backup_dir = Path(hass.config.path("tmp_backups")) - async def load_backups(self) -> None: - """Load data of stored backup files.""" - backups = await self.hass.async_add_executor_job(self._read_backups) - LOGGER.debug("Loaded %s backups", len(backups)) - self.backups = backups - self.loaded_backups = True - - def _read_backups(self) -> dict[str, Backup]: - """Read backups from disk.""" - backups: dict[str, Backup] = {} - for backup_path in self.backup_dir.glob("*.tar"): - try: - with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: - if data_file := backup_file.extractfile("./backup.json"): - data = json_loads_object(data_file.read()) - backup = Backup( - slug=cast(str, data["slug"]), - name=cast(str, data["name"]), - date=cast(str, data["date"]), - path=backup_path, - size=round(backup_path.stat().st_size / 1_048_576, 2), - ) - backups[backup.slug] = backup - except (OSError, TarError, json.JSONDecodeError, KeyError) as err: - LOGGER.warning("Unable to read backup %s: %s", backup_path, err) - return backups - - async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: - """Return backups.""" - if not self.loaded_backups: - await self.load_backups() - - return self.backups - - async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: - """Return a backup.""" - if not self.loaded_backups: - await self.load_backups() - - if not (backup := self.backups.get(slug)): - return None - - if not backup.path.exists(): - LOGGER.debug( - ( - "Removing tracked backup (%s) that does not exists on the expected" - " path %s" - ), - backup.slug, - backup.path, - ) - self.backups.pop(slug) - return None - - return backup - - async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: - """Remove a backup.""" - if (backup := await self.async_get_backup(slug=slug)) is None: - return - - await self.hass.async_add_executor_job(backup.path.unlink, True) - LOGGER.debug("Removed backup located at %s", backup.path) - self.backups.pop(slug) - - async def async_receive_backup( + async def async_create_backup( self, *, - contents: aiohttp.BodyPartReader, - **kwargs: Any, - ) -> None: - """Receive and store a backup file from upload.""" - queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = ( - SimpleQueue() - ) - temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory) - target_temp_file = Path( - temp_dir_handler.name, contents.filename or "backup.tar" + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Initiate generating a backup.""" + date_str = dt_util.now().isoformat() + backup_id = _generate_backup_id(date_str, backup_name) + + if include_addons or include_all_addons or include_folders: + raise HomeAssistantError( + "Addons and folders are not supported by core backup" + ) + if not include_homeassistant: + raise HomeAssistantError("Home Assistant must be included in backup") + + backup_task = self._hass.async_create_task( + self._async_create_backup( + agent_ids=agent_ids, + backup_id=backup_id, + backup_name=backup_name, + include_database=include_database, + date_str=date_str, + on_progress=on_progress, + password=password, + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return ) - def _sync_queue_consumer() -> None: - with target_temp_file.open("wb") as file_handle: - while True: - if (_chunk_future := queue.get()) is None: - break - _chunk, _future = _chunk_future - if _future is not None: - self.hass.loop.call_soon_threadsafe(_future.set_result, None) - file_handle.write(_chunk) + return (NewBackup(backup_job_id=backup_id), backup_task) - fut: asyncio.Future[None] | None = None - try: - fut = self.hass.async_add_executor_job(_sync_queue_consumer) - megabytes_sending = 0 - while chunk := await contents.read_chunk(BUF_SIZE): - megabytes_sending += 1 - if megabytes_sending % 5 != 0: - queue.put_nowait((chunk, None)) - continue - - chunk_future = self.hass.loop.create_future() - queue.put_nowait((chunk, chunk_future)) - await asyncio.wait( - (fut, chunk_future), - return_when=asyncio.FIRST_COMPLETED, - ) - if fut.done(): - # The executor job failed - break - - queue.put_nowait(None) # terminate queue consumer - finally: - if fut is not None: - await fut - - def _move_and_cleanup() -> None: - shutil.move(target_temp_file, self.backup_dir / target_temp_file.name) - temp_dir_handler.cleanup() - - await self.hass.async_add_executor_job(_move_and_cleanup) - await self.load_backups() - - async def async_create_backup(self, **kwargs: Any) -> Backup: + async def _async_create_backup( + self, + *, + agent_ids: list[str], + backup_id: str, + backup_name: str, + date_str: str, + include_database: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> WrittenBackup: """Generate a backup.""" - if self.backing_up: - raise HomeAssistantError("Backup already in progress") + manager = self._hass.data[DATA_MANAGER] + local_agent_tar_file_path = None + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + local_agent_tar_file_path = local_agent.get_backup_path(backup_id) + + on_progress( + CreateBackupEvent( + stage=CreateBackupStage.HOME_ASSISTANT, + state=CreateBackupState.IN_PROGRESS, + ) + ) try: - self.backing_up = True - await self.async_pre_backup_actions() - backup_name = f"Core {HAVERSION}" - date_str = dt_util.now().isoformat() - slug = _generate_slug(date_str, backup_name) + # Inform integrations a backup is about to be made + await manager.async_pre_backup_actions() backup_data = { - "slug": slug, - "name": backup_name, - "date": date_str, - "type": "partial", - "folders": ["homeassistant"], - "homeassistant": {"version": HAVERSION}, "compressed": True, + "date": date_str, + "homeassistant": { + "exclude_database": not include_database, + "version": HAVERSION, + }, + "name": backup_name, + "protected": password is not None, + "slug": backup_id, + "type": "partial", + "version": 2, } - tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar") - size_in_bytes = await self.hass.async_add_executor_job( + + tar_file_path, size_in_bytes = await self._hass.async_add_executor_job( self._mkdir_and_generate_backup_contents, - tar_file_path, backup_data, + include_database, + password, + local_agent_tar_file_path, ) - backup = Backup( - slug=slug, - name=backup_name, + backup = AgentBackup( + addons=[], + backup_id=backup_id, + database_included=include_database, date=date_str, - path=tar_file_path, - size=round(size_in_bytes / 1_048_576, 2), + folders=[], + homeassistant_included=True, + homeassistant_version=HAVERSION, + name=backup_name, + protected=password is not None, + size=size_in_bytes, + ) + + async_add_executor_job = self._hass.async_add_executor_job + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if local_agent_tar_file_path: + return + await async_add_executor_job(tar_file_path.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup ) - if self.loaded_backups: - self.backups[slug] = backup - LOGGER.debug("Generated new backup with slug %s", slug) - return backup finally: - self.backing_up = False - await self.async_post_backup_actions() + # Inform integrations the backup is done + await manager.async_post_backup_actions() def _mkdir_and_generate_backup_contents( self, - tar_file_path: Path, backup_data: dict[str, Any], - ) -> int: + database_included: bool, + password: str | None, + tar_file_path: Path | None, + ) -> tuple[Path, int]: """Generate backup contents and return the size.""" - if not self.backup_dir.exists(): - LOGGER.debug("Creating backup directory") - self.backup_dir.mkdir() + if not tar_file_path: + tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar" + make_backup_dir(tar_file_path.parent) + + excludes = EXCLUDE_FROM_BACKUP + if not database_included: + excludes = excludes + EXCLUDE_DATABASE_FROM_BACKUP outer_secure_tarfile = SecureTarFile( tar_file_path, "w", gzip=False, bufsize=BUF_SIZE @@ -355,37 +1064,136 @@ class BackupManager(BaseBackupManager): tar_info.mtime = int(time.time()) outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj) with outer_secure_tarfile.create_inner_tar( - "./homeassistant.tar.gz", gzip=True + "./homeassistant.tar.gz", + gzip=True, + key=password_to_key(password) if password is not None else None, ) as core_tar: atomic_contents_add( tar_file=core_tar, - origin_path=Path(self.hass.config.path()), - excludes=EXCLUDE_FROM_BACKUP, + origin_path=Path(self._hass.config.path()), + excludes=excludes, arcname="data", ) + return (tar_file_path, tar_file_path.stat().st_size) - return tar_file_path.stat().st_size + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + temp_file = Path(self.temp_backup_dir, suggested_filename) - async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + async_add_executor_job = self._hass.async_add_executor_job + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(temp_file.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + try: + backup = await async_add_executor_job(read_backup, temp_file) + except (OSError, tarfile.TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to parse backup %s: %s", temp_file, err) + raise + + manager = self._hass.data[DATA_MANAGER] + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + tar_file_path = local_agent.get_backup_path(backup.backup_id) + await async_add_executor_job(shutil.move, temp_file, tar_file_path) + else: + tar_file_path = temp_file + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if self._local_agent_id in agent_ids: + return + await async_add_executor_job(temp_file.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup + ) + + async def async_restore_backup( + self, + backup_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: """Restore a backup. This will write the restore information to .HA_RESTORE which will be handled during startup by the restore_backup module. """ - if (backup := await self.async_get_backup(slug=slug)) is None: - raise HomeAssistantError(f"Backup {slug} not found") + + if restore_addons or restore_folders: + raise HomeAssistantError( + "Addons and folders are not supported in core restore" + ) + if not restore_homeassistant and not restore_database: + raise HomeAssistantError( + "Home Assistant or database must be included in restore" + ) + + manager = self._hass.data[DATA_MANAGER] + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + remove_after_restore = False + else: + async_add_executor_job = self._hass.async_add_executor_job + path = self.temp_backup_dir / f"{backup_id}.tar" + stream = await open_stream() + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(path.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + remove_after_restore = True def _write_restore_file() -> None: """Write the restore file.""" - Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text( - json.dumps({"path": backup.path.as_posix()}), + Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text( + json.dumps( + { + "path": path.as_posix(), + "password": password, + "remove_after_restore": remove_after_restore, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ), encoding="utf-8", ) - await self.hass.async_add_executor_job(_write_restore_file) - await self.hass.services.async_call("homeassistant", "restart", {}) + await self._hass.async_add_executor_job(_write_restore_file) + await self._hass.services.async_call("homeassistant", "restart", {}) -def _generate_slug(date: str, name: str) -> str: - """Generate a backup slug.""" +def _generate_backup_id(date: str, name: str) -> str: + """Generate a backup ID.""" return hashlib.sha1(f"{date} - {name}".lower().encode()).hexdigest()[:8] diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index 0a906bb6dfa..b399043e013 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -1,11 +1,12 @@ { "domain": "backup", "name": "Backup", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", "integration_type": "system", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["securetar==2024.11.0"] + "requirements": ["cronsim==2.6", "securetar==2024.11.0"] } diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py new file mode 100644 index 00000000000..6306d9f1fec --- /dev/null +++ b/homeassistant/components/backup/models.py @@ -0,0 +1,61 @@ +"""Models for the backup integration.""" + +from __future__ import annotations + +from dataclasses import asdict, dataclass +from enum import StrEnum +from typing import Any, Self + + +@dataclass(frozen=True, kw_only=True) +class AddonInfo: + """Addon information.""" + + name: str + slug: str + version: str + + +class Folder(StrEnum): + """Folder type.""" + + SHARE = "share" + ADDONS = "addons/local" + SSL = "ssl" + MEDIA = "media" + + +@dataclass(frozen=True, kw_only=True) +class AgentBackup: + """Base backup class.""" + + addons: list[AddonInfo] + backup_id: str + date: str + database_included: bool + folders: list[Folder] + homeassistant_included: bool + homeassistant_version: str | None # None if homeassistant_included is False + name: str + protected: bool + size: int + + def as_dict(self) -> dict: + """Return a dict representation of this backup.""" + return asdict(self) + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> Self: + """Create an instance from a JSON serialization.""" + return cls( + addons=[AddonInfo(**addon) for addon in data["addons"]], + backup_id=data["backup_id"], + date=data["date"], + database_included=data["database_included"], + folders=[Folder(folder) for folder in data["folders"]], + homeassistant_included=data["homeassistant_included"], + homeassistant_version=data["homeassistant_version"], + name=data["name"], + protected=data["protected"], + size=data["size"], + ) diff --git a/homeassistant/components/backup/store.py b/homeassistant/components/backup/store.py new file mode 100644 index 00000000000..ddabead24f9 --- /dev/null +++ b/homeassistant/components/backup/store.py @@ -0,0 +1,52 @@ +"""Store backup configuration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, TypedDict + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.storage import Store + +from .const import DOMAIN + +if TYPE_CHECKING: + from .config import StoredBackupConfig + from .manager import BackupManager, StoredKnownBackup + +STORE_DELAY_SAVE = 30 +STORAGE_KEY = DOMAIN +STORAGE_VERSION = 1 + + +class StoredBackupData(TypedDict): + """Represent the stored backup config.""" + + backups: list[StoredKnownBackup] + config: StoredBackupConfig + + +class BackupStore: + """Store backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize the backup manager.""" + self._hass = hass + self._manager = manager + self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY) + + async def load(self) -> StoredBackupData | None: + """Load the store.""" + return await self._store.async_load() + + @callback + def save(self) -> None: + """Save config.""" + self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE) + + @callback + def _data_to_save(self) -> StoredBackupData: + """Return data to save.""" + return { + "backups": self._manager.known_backups.to_list(), + "config": self._manager.config.data.to_dict(), + } diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py new file mode 100644 index 00000000000..1d8252cc30b --- /dev/null +++ b/homeassistant/components/backup/util.py @@ -0,0 +1,111 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path +from queue import SimpleQueue +import tarfile +from typing import cast + +import aiohttp + +from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonObjectType, json_loads_object + +from .const import BUF_SIZE +from .models import AddonInfo, AgentBackup, Folder + + +def make_backup_dir(path: Path) -> None: + """Create a backup directory if it does not exist.""" + path.mkdir(exist_ok=True) + + +def read_backup(backup_path: Path) -> AgentBackup: + """Read a backup from disk.""" + + with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: + if not (data_file := backup_file.extractfile("./backup.json")): + raise KeyError("backup.json not found in tar file") + data = json_loads_object(data_file.read()) + addons = [ + AddonInfo( + name=cast(str, addon["name"]), + slug=cast(str, addon["slug"]), + version=cast(str, addon["version"]), + ) + for addon in cast(list[JsonObjectType], data.get("addons", [])) + ] + + folders = [ + Folder(folder) + for folder in cast(list[str], data.get("folders", [])) + if folder != "homeassistant" + ] + + homeassistant_included = False + homeassistant_version: str | None = None + database_included = False + if ( + homeassistant := cast(JsonObjectType, data.get("homeassistant")) + ) and "version" in homeassistant: + homeassistant_version = cast(str, homeassistant["version"]) + database_included = not cast( + bool, homeassistant.get("exclude_database", False) + ) + + return AgentBackup( + addons=addons, + backup_id=cast(str, data["slug"]), + database_included=database_included, + date=cast(str, data["date"]), + folders=folders, + homeassistant_included=homeassistant_included, + homeassistant_version=homeassistant_version, + name=cast(str, data["name"]), + protected=cast(bool, data.get("protected", False)), + size=backup_path.stat().st_size, + ) + + +async def receive_file( + hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path +) -> None: + """Receive a file from a stream and write it to a file.""" + queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue() + + def _sync_queue_consumer() -> None: + with path.open("wb") as file_handle: + while True: + if (_chunk_future := queue.get()) is None: + break + _chunk, _future = _chunk_future + if _future is not None: + hass.loop.call_soon_threadsafe(_future.set_result, None) + file_handle.write(_chunk) + + fut: asyncio.Future[None] | None = None + try: + fut = hass.async_add_executor_job(_sync_queue_consumer) + megabytes_sending = 0 + while chunk := await contents.read_chunk(BUF_SIZE): + megabytes_sending += 1 + if megabytes_sending % 5 != 0: + queue.put_nowait((chunk, None)) + continue + + chunk_future = hass.loop.create_future() + queue.put_nowait((chunk, chunk_future)) + await asyncio.wait( + (fut, chunk_future), + return_when=asyncio.FIRST_COMPLETED, + ) + if fut.done(): + # The executor job failed + break + + queue.put_nowait(None) # terminate queue consumer + finally: + if fut is not None: + await fut diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 3ac8a7ace3e..7dacc39f9ba 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -7,22 +7,31 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from .config import ScheduleState from .const import DATA_MANAGER, LOGGER +from .manager import ManagerStateEvent +from .models import Folder @callback def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None: """Register websocket commands.""" + websocket_api.async_register_command(hass, backup_agents_info) + if with_hassio: websocket_api.async_register_command(hass, handle_backup_end) websocket_api.async_register_command(hass, handle_backup_start) - return websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) - websocket_api.async_register_command(hass, handle_remove) + websocket_api.async_register_command(hass, handle_create_with_strategy_settings) + websocket_api.async_register_command(hass, handle_delete) websocket_api.async_register_command(hass, handle_restore) + websocket_api.async_register_command(hass, handle_subscribe_events) + + websocket_api.async_register_command(hass, handle_config_info) + websocket_api.async_register_command(hass, handle_config_update) @websocket_api.require_admin @@ -35,12 +44,16 @@ async def handle_info( ) -> None: """List all stored backups.""" manager = hass.data[DATA_MANAGER] - backups = await manager.async_get_backups() + backups, agent_errors = await manager.async_get_backups() connection.send_result( msg["id"], { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, "backups": list(backups.values()), - "backing_up": manager.backing_up, + "last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup, + "last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup, }, ) @@ -49,7 +62,7 @@ async def handle_info( @websocket_api.websocket_command( { vol.Required("type"): "backup/details", - vol.Required("slug"): str, + vol.Required("backup_id"): str, } ) @websocket_api.async_response @@ -58,11 +71,16 @@ async def handle_details( connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: - """Get backup details for a specific slug.""" - backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"]) + """Get backup details for a specific backup.""" + backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup( + msg["backup_id"] + ) connection.send_result( msg["id"], { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, "backup": backup, }, ) @@ -71,26 +89,39 @@ async def handle_details( @websocket_api.require_admin @websocket_api.websocket_command( { - vol.Required("type"): "backup/remove", - vol.Required("slug"): str, + vol.Required("type"): "backup/delete", + vol.Required("backup_id"): str, } ) @websocket_api.async_response -async def handle_remove( +async def handle_delete( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: - """Remove a backup.""" - await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"]) - connection.send_result(msg["id"]) + """Delete a backup.""" + agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"]) + connection.send_result( + msg["id"], + { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + } + }, + ) @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required("type"): "backup/restore", - vol.Required("slug"): str, + vol.Required("backup_id"): str, + vol.Required("agent_id"): str, + vol.Optional("password"): str, + vol.Optional("restore_addons"): [str], + vol.Optional("restore_database", default=True): bool, + vol.Optional("restore_folders"): [vol.Coerce(Folder)], + vol.Optional("restore_homeassistant", default=True): bool, } ) @websocket_api.async_response @@ -100,12 +131,32 @@ async def handle_restore( msg: dict[str, Any], ) -> None: """Restore a backup.""" - await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"]) + await hass.data[DATA_MANAGER].async_restore_backup( + msg["backup_id"], + agent_id=msg["agent_id"], + password=msg.get("password"), + restore_addons=msg.get("restore_addons"), + restore_database=msg["restore_database"], + restore_folders=msg.get("restore_folders"), + restore_homeassistant=msg["restore_homeassistant"], + ) connection.send_result(msg["id"]) @websocket_api.require_admin -@websocket_api.websocket_command({vol.Required("type"): "backup/generate"}) +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate", + vol.Required("agent_ids"): [str], + vol.Optional("include_addons"): [str], + vol.Optional("include_all_addons", default=False): bool, + vol.Optional("include_database", default=True): bool, + vol.Optional("include_folders"): [vol.Coerce(Folder)], + vol.Optional("include_homeassistant", default=True): bool, + vol.Optional("name"): str, + vol.Optional("password"): str, + } +) @websocket_api.async_response async def handle_create( hass: HomeAssistant, @@ -113,7 +164,46 @@ async def handle_create( msg: dict[str, Any], ) -> None: """Generate a backup.""" - backup = await hass.data[DATA_MANAGER].async_create_backup() + + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=msg["agent_ids"], + include_addons=msg.get("include_addons"), + include_all_addons=msg["include_all_addons"], + include_database=msg["include_database"], + include_folders=msg.get("include_folders"), + include_homeassistant=msg["include_homeassistant"], + name=msg.get("name"), + password=msg.get("password"), + ) + connection.send_result(msg["id"], backup) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate_with_strategy_settings", + } +) +@websocket_api.async_response +async def handle_create_with_strategy_settings( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate a backup with stored settings.""" + + config_data = hass.data[DATA_MANAGER].config.data + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) connection.send_result(msg["id"], backup) @@ -127,7 +217,6 @@ async def handle_backup_start( ) -> None: """Backup start notification.""" manager = hass.data[DATA_MANAGER] - manager.backing_up = True LOGGER.debug("Backup start notification") try: @@ -149,7 +238,6 @@ async def handle_backup_end( ) -> None: """Backup end notification.""" manager = hass.data[DATA_MANAGER] - manager.backing_up = False LOGGER.debug("Backup end notification") try: @@ -159,3 +247,97 @@ async def handle_backup_end( return connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"}) +@websocket_api.async_response +async def backup_agents_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Return backup agents info.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents], + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"}) +@websocket_api.async_response +async def handle_config_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Send the stored backup config.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "config": manager.config.data.to_dict(), + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/config/update", + vol.Optional("create_backup"): vol.Schema( + { + vol.Optional("agent_ids"): vol.All(list[str]), + vol.Optional("include_addons"): vol.Any(list[str], None), + vol.Optional("include_all_addons"): bool, + vol.Optional("include_database"): bool, + vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None), + vol.Optional("name"): vol.Any(str, None), + vol.Optional("password"): vol.Any(str, None), + }, + ), + vol.Optional("retention"): vol.Schema( + { + vol.Optional("copies"): vol.Any(int, None), + vol.Optional("days"): vol.Any(int, None), + }, + ), + vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)), + } +) +@websocket_api.async_response +async def handle_config_update( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Update the stored backup config.""" + manager = hass.data[DATA_MANAGER] + changes = dict(msg) + changes.pop("id") + changes.pop("type") + await manager.config.update(**changes) + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"}) +@websocket_api.async_response +async def handle_subscribe_events( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Subscribe to backup events.""" + + def on_event(event: ManagerStateEvent) -> None: + connection.send_message(websocket_api.event_message(msg["id"], event)) + + manager = hass.data[DATA_MANAGER] + on_event(manager.last_event) + connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event) + connection.send_result(msg["id"]) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py new file mode 100644 index 00000000000..58ecc7a78fd --- /dev/null +++ b/homeassistant/components/cloud/backup.py @@ -0,0 +1,196 @@ +"""Backup platform for the cloud integration.""" + +from __future__ import annotations + +import base64 +from collections.abc import AsyncIterator, Callable, Coroutine +import hashlib +from typing import Any, Self + +from aiohttp import ClientError, StreamReader +from hass_nabucasa import Cloud, CloudError +from hass_nabucasa.cloud_api import ( + async_files_delete_file, + async_files_download_details, + async_files_list, + async_files_upload_details, +) + +from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.core import HomeAssistant, callback + +from .client import CloudClient +from .const import DATA_CLOUD, DOMAIN + +_STORAGE_BACKUP = "backup" + + +async def _b64md5(stream: AsyncIterator[bytes]) -> str: + """Calculate the MD5 hash of a file.""" + file_hash = hashlib.md5() + async for chunk in stream: + file_hash.update(chunk) + return base64.b64encode(file_hash.digest()).decode() + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the cloud backup agent.""" + return [CloudBackupAgent(hass=hass, cloud=hass.data[DATA_CLOUD])] + + +class ChunkAsyncStreamIterator: + """Async iterator for chunked streams. + + Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields + bytes instead of tuple[bytes, bool]. + """ + + __slots__ = ("_stream",) + + def __init__(self, stream: StreamReader) -> None: + """Initialize.""" + self._stream = stream + + def __aiter__(self) -> Self: + """Iterate.""" + return self + + async def __anext__(self) -> bytes: + """Yield next chunk.""" + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv[0] + + +class CloudBackupAgent(BackupAgent): + """Cloud backup agent.""" + + name = DOMAIN + + def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None: + """Initialize the cloud backup sync agent.""" + super().__init__() + self._cloud = cloud + self._hass = hass + + @callback + def _get_backup_filename(self) -> str: + """Return the backup filename.""" + return f"{self._cloud.client.prefs.instance_id}.tar" + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + if not await self.async_get_backup(backup_id): + raise BackupAgentError("Backup not found") + + try: + details = await async_files_download_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get download details") from err + + try: + resp = await self._cloud.websession.get(details["url"]) + resp.raise_for_status() + except ClientError as err: + raise BackupAgentError("Failed to download backup") from err + + return ChunkAsyncStreamIterator(resp.content) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + if not backup.protected: + raise BackupAgentError("Cloud backups must be protected") + + base64md5hash = await _b64md5(await open_stream()) + + try: + details = await async_files_upload_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + metadata=backup.as_dict(), + size=backup.size, + base64md5hash=base64md5hash, + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get upload details") from err + + try: + upload_status = await self._cloud.websession.put( + details["url"], + data=await open_stream(), + headers=details["headers"] | {"content-length": str(backup.size)}, + ) + upload_status.raise_for_status() + except ClientError as err: + raise BackupAgentError("Failed to upload backup") from err + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + if not await self.async_get_backup(backup_id): + raise BackupAgentError("Backup not found") + + try: + await async_files_delete_file( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to delete backup") from err + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + try: + backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to list backups") from err + + return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups] + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + backups = await self.async_list_backups() + + for backup in backups: + if backup.backup_id == backup_id: + return backup + + return None diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 661edb67762..48f2153e86f 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -1,7 +1,12 @@ { "domain": "cloud", "name": "Home Assistant Cloud", - "after_dependencies": ["assist_pipeline", "google_assistant", "alexa"], + "after_dependencies": [ + "alexa", + "assist_pipeline", + "backup", + "google_assistant" + ], "codeowners": ["@home-assistant/cloud"], "dependencies": ["auth", "http", "repairs", "webhook"], "documentation": "https://www.home-assistant.io/integrations/cloud", diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py new file mode 100644 index 00000000000..f7f66f6cecc --- /dev/null +++ b/homeassistant/components/hassio/backup.py @@ -0,0 +1,365 @@ +"""Backup functionality for supervised installations.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +from pathlib import Path +from typing import Any, cast + +from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) + +from homeassistant.components.backup import ( + DATA_MANAGER, + AddonInfo, + AgentBackup, + BackupAgent, + BackupReaderWriter, + CreateBackupEvent, + Folder, + NewBackup, + WrittenBackup, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .const import DOMAIN, EVENT_SUPERVISOR_EVENT +from .handler import get_supervisor_client + +LOCATION_CLOUD_BACKUP = ".cloud_backup" + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the hassio backup agents.""" + client = get_supervisor_client(hass) + mounts = await client.mounts.info() + agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)] + for mount in mounts.mounts: + if mount.usage is not supervisor_mounts.MountUsage.BACKUP: + continue + agents.append(SupervisorBackupAgent(hass, mount.name, mount.name)) + return agents + + +def _backup_details_to_agent_backup( + details: supervisor_backups.BackupComplete, +) -> AgentBackup: + """Convert a supervisor backup details object to an agent backup.""" + homeassistant_included = details.homeassistant is not None + if not homeassistant_included: + database_included = False + else: + database_included = details.homeassistant_exclude_database is False + addons = [ + AddonInfo(name=addon.name, slug=addon.slug, version=addon.version) + for addon in details.addons + ] + return AgentBackup( + addons=addons, + backup_id=details.slug, + database_included=database_included, + date=details.date.isoformat(), + folders=[Folder(folder) for folder in details.folders], + homeassistant_included=homeassistant_included, + homeassistant_version=details.homeassistant, + name=details.name, + protected=details.protected, + size=details.size_bytes, + ) + + +class SupervisorBackupAgent(BackupAgent): + """Backup agent for supervised installations.""" + + def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path("/backups") + self._client = get_supervisor_client(hass) + self.name = name + self.location = location + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return await self._client.backups.download_backup(backup_id) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + Not required for supervisor, the SupervisorBackupReaderWriter stores files. + """ + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + backup_list = await self._client.backups.list() + result = [] + for backup in backup_list: + if not backup.locations or self.location not in backup.locations: + continue + details = await self._client.backups.backup_info(backup.slug) + result.append(_backup_details_to_agent_backup(details)) + return result + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + details = await self._client.backups.backup_info(backup_id) + if self.location not in details.locations: + return None + return _backup_details_to_agent_backup(details) + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Remove a backup.""" + try: + await self._client.backups.remove_backup(backup_id) + except SupervisorBadRequestError as err: + if err.args[0] != "Backup does not exist": + raise + + +class SupervisorBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in supervised installations.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup reader/writer.""" + self._hass = hass + self._client = get_supervisor_client(hass) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[CreateBackupEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + manager = self._hass.data[DATA_MANAGER] + + include_addons_set = set(include_addons) if include_addons else None + include_folders_set = ( + {supervisor_backups.Folder(folder) for folder in include_folders} + if include_folders + else None + ) + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if agent_id.startswith(DOMAIN) + ] + locations = {agent.location for agent in hassio_agents} + + backup = await self._client.backups.partial_backup( + supervisor_backups.PartialBackupOptions( + addons=include_addons_set, + folders=include_folders_set, + homeassistant=include_homeassistant, + name=backup_name, + password=password, + compressed=True, + location=locations or LOCATION_CLOUD_BACKUP, + homeassistant_exclude_database=not include_database, + background=True, + ) + ) + backup_task = self._hass.async_create_task( + self._async_wait_for_backup( + backup, remove_after_upload=not bool(locations) + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return + ) + + return (NewBackup(backup_job_id=backup.job_id), backup_task) + + async def _async_wait_for_backup( + self, backup: supervisor_backups.NewBackup, *, remove_after_upload: bool + ) -> WrittenBackup: + """Wait for a backup to complete.""" + backup_complete = asyncio.Event() + backup_id: str | None = None + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + nonlocal backup_id + if data.get("done") is True: + backup_id = data.get("reference") + backup_complete.set() + + try: + unsub = self._async_listen_job_events(backup.job_id, on_progress) + await backup_complete.wait() + finally: + unsub() + if not backup_id: + raise HomeAssistantError("Backup failed") + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if not remove_after_upload: + return + await self._client.backups.remove_backup(backup_id) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + manager = self._hass.data[DATA_MANAGER] + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if agent_id.startswith(DOMAIN) + ] + locations = {agent.location for agent in hassio_agents} + + backup_id = await self._client.backups.upload_backup( + stream, + supervisor_backups.UploadBackupOptions( + location=locations or {LOCATION_CLOUD_BACKUP} + ), + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if locations: + return + await self._client.backups.remove_backup(backup_id) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + if restore_homeassistant and not restore_database: + raise HomeAssistantError("Cannot restore Home Assistant without database") + if not restore_homeassistant and restore_database: + raise HomeAssistantError("Cannot restore database without Home Assistant") + restore_addons_set = set(restore_addons) if restore_addons else None + restore_folders_set = ( + {supervisor_backups.Folder(folder) for folder in restore_folders} + if restore_folders + else None + ) + + if not agent_id.startswith(DOMAIN): + # Download the backup to the supervisor. Supervisor will clean up the backup + # two days after the restore is done. + await self.async_receive_backup( + agent_ids=[], + stream=await open_stream(), + suggested_filename=f"{backup_id}.tar", + ) + + job = await self._client.backups.partial_restore( + backup_id, + supervisor_backups.PartialRestoreOptions( + addons=restore_addons_set, + folders=restore_folders_set, + homeassistant=restore_homeassistant, + password=password, + background=True, + ), + ) + + restore_complete = asyncio.Event() + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + if data.get("done") is True: + restore_complete.set() + + try: + unsub = self._async_listen_job_events(job.job_id, on_progress) + await restore_complete.wait() + finally: + unsub() + + @callback + def _async_listen_job_events( + self, job_id: str, on_event: Callable[[Mapping[str, Any]], None] + ) -> Callable[[], None]: + """Listen for job events.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("uuid") != job_id + ): + return + on_event(event_data) + + unsub_signal = async_dispatcher_connect( + self._hass, EVENT_SUPERVISOR_EVENT, handle_signal + ) + return unsub diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 31fa27a92c4..8fe124e763c 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.1"], + "requirements": ["aiohasupervisor==0.2.2b0"], "single_config_entry": true } diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py new file mode 100644 index 00000000000..02c61ff4de6 --- /dev/null +++ b/homeassistant/components/kitchen_sink/backup.py @@ -0,0 +1,92 @@ +"""Backup platform for the kitchen_sink integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine +import logging +from typing import Any + +from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder +from homeassistant.core import HomeAssistant + +LOGGER = logging.getLogger(__name__) + + +async def async_get_backup_agents( + hass: HomeAssistant, +) -> list[BackupAgent]: + """Register the backup agents.""" + return [KitchenSinkBackupAgent("syncer")] + + +class KitchenSinkBackupAgent(BackupAgent): + """Kitchen sink backup agent.""" + + def __init__(self, name: str) -> None: + """Initialize the kitchen sink backup sync agent.""" + super().__init__() + self.name = name + self._uploads = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=False, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Kitchen sink syncer", + protected=False, + size=1234, + ) + ] + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + LOGGER.info("Downloading backup %s", backup_id) + reader = asyncio.StreamReader() + reader.feed_data(b"backup data") + reader.feed_eof() + return reader + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + LOGGER.info("Uploading backup %s %s", backup.backup_id, backup) + self._uploads.append(backup) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" + self._uploads = [ + upload for upload in self._uploads if upload.backup_id != backup_id + ] + LOGGER.info("Deleted backup %s", backup_id) + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List synced backups.""" + return self._uploads + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + for backup in self._uploads: + if backup.backup_id == backup_id: + return backup + return None diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 726dad56ccb..e4abf3ab678 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.10 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 5239874e2f6..c40f8bd0d01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.1", + "aiohasupervisor==0.2.2b0", "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 7ed445c6b65..9ef9f0e44f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index c6ab1e2dfae..661ce5876a9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -262,7 +262,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 # homeassistant.components.homekit_controller aiohomekit==3.2.7 @@ -704,6 +704,7 @@ connect-box==0.3.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f9ed2bebf99..c959d83723c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -247,7 +247,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 # homeassistant.components.homekit_controller aiohomekit==3.2.7 @@ -600,6 +600,7 @@ colorthief==0.2.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index 70b33d2de3f..133a2602192 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,29 +2,162 @@ from __future__ import annotations +from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch -from homeassistant.components.backup import DOMAIN -from homeassistant.components.backup.manager import Backup +from homeassistant.components.backup import ( + DOMAIN, + AddonInfo, + AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, + Folder, +) +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -TEST_BACKUP = Backup( - slug="abc123", - name="Test", +from tests.common import MockPlatform, mock_platform + +LOCAL_AGENT_ID = f"{DOMAIN}.local" + +TEST_BACKUP_ABC123 = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, date="1970-01-01T00:00:00.000Z", - path=Path("abc123.tar"), - size=0.0, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0, ) +TEST_BACKUP_PATH_ABC123 = Path("abc123.tar") + +TEST_BACKUP_DEF456 = AgentBackup( + addons=[], + backup_id="def456", + database_included=False, + date="1980-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test 2", + protected=False, + size=1, +) + +TEST_DOMAIN = "test" + + +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + def __init__(self, name: str, backups: list[AgentBackup] | None = None) -> None: + """Initialize the backup agent.""" + self.name = name + if backups is None: + backups = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=13, + ) + ] + + self._backup_data: bytearray | None = None + self._backups = {backup.backup_id: backup for backup in backups} + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + backup_stream = await open_stream() + self._backup_data = bytearray() + async for chunk in backup_stream: + self._backup_data += chunk + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + return self._backups.get(backup_id) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" async def setup_backup_integration( hass: HomeAssistant, with_hassio: bool = False, configuration: ConfigType | None = None, + *, + backups: dict[str, list[AgentBackup]] | None = None, + remote_agents: list[str] | None = None, ) -> bool: """Set up the Backup integration.""" - with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio): - return await async_setup_component(hass, DOMAIN, configuration or {}) + with ( + patch("homeassistant.components.backup.is_hassio", return_value=with_hassio), + patch( + "homeassistant.components.backup.backup.is_hassio", return_value=with_hassio + ), + ): + remote_agents = remote_agents or [] + platform = Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest(agent, []) for agent in remote_agents] + ), + spec_set=BackupAgentPlatformProtocol, + ) + + mock_platform(hass, f"{TEST_DOMAIN}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, TEST_DOMAIN, {}) + + result = await async_setup_component(hass, DOMAIN, configuration or {}) + await hass.async_block_till_done() + if not backups: + return result + + for agent_id, agent_backups in backups.items(): + if with_hassio and agent_id == LOCAL_AGENT_ID: + continue + agent = hass.data[DATA_MANAGER].backup_agents[agent_id] + agent._backups = {backups.backup_id: backups for backups in agent_backups} + if agent_id == LOCAL_AGENT_ID: + agent._loaded_backups = True + + return result diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py new file mode 100644 index 00000000000..7ccfcc4e0f0 --- /dev/null +++ b/tests/components/backup/conftest.py @@ -0,0 +1,97 @@ +"""Test fixtures for the Backup integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from homeassistant.core import HomeAssistant + +from .common import TEST_BACKUP_PATH_ABC123 + + +@pytest.fixture(name="mocked_json_bytes") +def mocked_json_bytes_fixture() -> Generator[Mock]: + """Mock json_bytes.""" + with patch( + "homeassistant.components.backup.manager.json_bytes", + return_value=b"{}", # Empty JSON + ) as mocked_json_bytes: + yield mocked_json_bytes + + +@pytest.fixture(name="mocked_tarfile") +def mocked_tarfile_fixture() -> Generator[Mock]: + """Mock tarfile.""" + with patch( + "homeassistant.components.backup.manager.SecureTarFile" + ) as mocked_tarfile: + yield mocked_tarfile + + +@pytest.fixture(name="path_glob") +def path_glob_fixture() -> Generator[MagicMock]: + """Mock path glob.""" + with patch( + "pathlib.Path.glob", return_value=[TEST_BACKUP_PATH_ABC123] + ) as path_glob: + yield path_glob + + +CONFIG_DIR = { + "testing_config": [ + Path("test.txt"), + Path(".DS_Store"), + Path(".storage"), + Path("backups"), + Path("tmp_backups"), + Path("home-assistant_v2.db"), + ], + "backups": [ + Path("backups/backup.tar"), + Path("backups/not_backup"), + ], + "tmp_backups": [ + Path("tmp_backups/forgotten_backup.tar"), + Path("tmp_backups/not_backup"), + ], +} +CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} + + +@pytest.fixture(name="mock_backup_generation") +def mock_backup_generation_fixture( + hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock +) -> Generator[None]: + """Mock backup generator.""" + + with ( + patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), + patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), + patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), + patch( + "pathlib.Path.exists", + lambda x: x + not in ( + Path(hass.config.path("backups")), + Path(hass.config.path("tmp_backups")), + ), + ), + patch( + "pathlib.Path.is_symlink", + lambda _: False, + ), + patch( + "pathlib.Path.mkdir", + MagicMock(), + ), + patch( + "homeassistant.components.backup.manager.HAVERSION", + "2025.1.0", + ), + ): + yield diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr new file mode 100644 index 00000000000..b350ff680ee --- /dev/null +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -0,0 +1,206 @@ +# serializer version: 1 +# name: test_delete_backup[found_backups0-True-1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups1-False-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups2-True-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 096df37d704..8bd4e2817b2 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1,4 +1,32 @@ # serializer version: 1 +# name: test_agent_delete_backup + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_agents_info + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + dict({ + 'agent_id': 'domain.test', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_backup_end[with_hassio-hass_access_token] dict({ 'error': dict({ @@ -40,7 +68,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception0] +# name: test_backup_end_exception[exception0] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -51,7 +79,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception1] +# name: test_backup_end_exception[exception1] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -62,7 +90,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception2] +# name: test_backup_end_exception[exception2] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -114,7 +142,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception0] +# name: test_backup_start_exception[exception0] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -125,7 +153,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception1] +# name: test_backup_start_exception[exception1] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -136,7 +164,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception2] +# name: test_backup_start_exception[exception2] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -147,121 +175,2666 @@ 'type': 'result', }) # --- -# name: test_details[with_hassio-with_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[with_hassio-without_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[without_hassio-with_backup_content] +# name: test_config_info[None] dict({ 'id': 1, 'result': dict({ - 'backup': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), }), }), 'success': True, 'type': 'result', }) # --- -# name: test_details[without_hassio-without_backup_content] +# name: test_config_info[storage_data1] dict({ 'id': 1, 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': True, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': '2024-10-27T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'sat', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command10] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command1] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command6] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command7] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command8] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command9] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update_errors[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), 'backup': None, }), 'success': True, 'type': 'result', }) # --- -# name: test_generate[with_hassio] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_generate[without_hassio] +# name: test_details[remote_agents1-backups1] dict({ 'id': 1, 'result': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), }), 'success': True, 'type': 'result', }) # --- -# name: test_info[with_hassio] +# name: test_details[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[side_effect0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Boom!', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_info[without_hassio] +# name: test_generate[None] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[None].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[None].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_info[remote_agents0-remote_backups0] dict({ 'id': 1, 'result': dict({ - 'backing_up': False, + 'agent_errors': dict({ + }), 'backups': list([ dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, }), ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, }), 'success': True, 'type': 'result', }) # --- -# name: test_remove[with_hassio] +# name: test_info[remote_agents1-remote_backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents2-remote_backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents3-remote_backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[side_effect0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Boom!', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_remove[without_hassio] +# name: test_restore_local_agent[backups0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent backup.local', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore_local_agent[backups0].1 + 0 +# --- +# name: test_restore_local_agent[backups1] dict({ 'id': 1, 'result': None, @@ -269,18 +2842,24 @@ 'type': 'result', }) # --- -# name: test_restore[with_hassio] +# name: test_restore_local_agent[backups1].1 + 1 +# --- +# name: test_restore_remote_agent[remote_agents0-backups0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent test.remote', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_restore[without_hassio] +# name: test_restore_remote_agent[remote_agents0-backups0].1 + 0 +# --- +# name: test_restore_remote_agent[remote_agents1-backups1] dict({ 'id': 1, 'result': None, @@ -288,3 +2867,34 @@ 'type': 'result', }) # --- +# name: test_restore_remote_agent[remote_agents1-backups1].1 + 1 +# --- +# name: test_subscribe_event + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_subscribe_event.1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_subscribe_event.2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- diff --git a/tests/components/backup/test_backup.py b/tests/components/backup/test_backup.py new file mode 100644 index 00000000000..02252ef6fa5 --- /dev/null +++ b/tests/components/backup/test_backup.py @@ -0,0 +1,129 @@ +"""Test the builtin backup platform.""" + +from __future__ import annotations + +from collections.abc import Generator +from io import StringIO +import json +from pathlib import Path +from tarfile import TarError +from unittest.mock import MagicMock, mock_open, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import TEST_BACKUP_ABC123, TEST_BACKUP_PATH_ABC123 + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(name="read_backup") +def read_backup_fixture(path_glob: MagicMock) -> Generator[MagicMock]: + """Mock read backup.""" + with patch( + "homeassistant.components.backup.backup.read_backup", + return_value=TEST_BACKUP_ABC123, + ) as read_backup: + yield read_backup + + +@pytest.mark.parametrize( + "side_effect", + [ + None, + OSError("Boom"), + TarError("Boom"), + json.JSONDecodeError("Boom", "test", 1), + KeyError("Boom"), + ], +) +async def test_load_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + read_backup: MagicMock, + side_effect: Exception | None, +) -> None: + """Test load backups.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + read_backup.side_effect = side_effect + + # list agents + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + # load and list backups + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test upload backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + open_mock = mock_open() + + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert open_mock.call_count == 1 + assert move_mock.call_count == 1 + assert move_mock.mock_calls[0].args[1].name == "abc123.tar" + + +@pytest.mark.usefixtures("read_backup") +@pytest.mark.parametrize( + ("found_backups", "backup_exists", "unlink_calls"), + [ + ([TEST_BACKUP_PATH_ABC123], True, 1), + ([TEST_BACKUP_PATH_ABC123], False, 0), + (([], True, 0)), + ], +) +async def test_delete_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + path_glob: MagicMock, + found_backups: list[Path], + backup_exists: bool, + unlink_calls: int, +) -> None: + """Test delete backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + path_glob.return_value = found_backups + + with ( + patch("pathlib.Path.exists", return_value=backup_exists), + patch("pathlib.Path.unlink") as unlink, + ): + await client.send_json_auto_id( + {"type": "backup/delete", "backup_id": TEST_BACKUP_ABC123.backup_id} + ) + assert await client.receive_json() == snapshot + + assert unlink.call_count == unlink_calls diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index 76b1f76b55b..c071a0d8386 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -7,27 +7,28 @@ from unittest.mock import patch from aiohttp import web import pytest +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant -from .common import TEST_BACKUP, setup_backup_integration +from .common import TEST_BACKUP_ABC123, BackupAgentTest, setup_backup_integration from tests.common import MockUser from tests.typing import ClientSessionGenerator -async def test_downloading_backup( +async def test_downloading_local_backup( hass: HomeAssistant, hass_client: ClientSessionGenerator, ) -> None: - """Test downloading a backup file.""" + """Test downloading a local backup file.""" await setup_backup_integration(hass) client = await hass_client() with ( patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - return_value=TEST_BACKUP, + "homeassistant.components.backup.backup.CoreLocalBackupAgent.async_get_backup", + return_value=TEST_BACKUP_ABC123, ), patch("pathlib.Path.exists", return_value=True), patch( @@ -35,10 +36,29 @@ async def test_downloading_backup( return_value=web.Response(text=""), ), ): - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 200 +async def test_downloading_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a remote backup.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_client() + + with ( + patch.object(BackupAgentTest, "async_download_backup") as download_mock, + ): + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + resp = await client.get("/api/backup/download/abc123?agent_id=domain.test") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + async def test_downloading_backup_not_found( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -48,7 +68,7 @@ async def test_downloading_backup_not_found( client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 404 @@ -63,7 +83,7 @@ async def test_downloading_as_non_admin( client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 401 @@ -80,7 +100,7 @@ async def test_uploading_a_backup_file( "homeassistant.components.backup.manager.BackupManager.async_receive_backup", ) as async_receive_backup_mock: resp = await client.post( - "/api/backup/upload", + "/api/backup/upload?agent_id=backup.local", data={"file": StringIO("test")}, ) assert resp.status == 201 @@ -90,7 +110,7 @@ async def test_uploading_a_backup_file( @pytest.mark.parametrize( ("error", "message"), [ - (OSError("Boom!"), "Can't write backup file Boom!"), + (OSError("Boom!"), "Can't write backup file: Boom!"), (asyncio.CancelledError("Boom!"), ""), ], ) @@ -110,7 +130,7 @@ async def test_error_handling_uploading_a_backup_file( side_effect=error, ): resp = await client.post( - "/api/backup/upload", + "/api/backup/upload?agent_id=backup.local", data={"file": StringIO("test")}, ) assert resp.status == 500 diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index e064939d618..16a49af9647 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -1,15 +1,18 @@ """Tests for the Backup integration.""" +from typing import Any from unittest.mock import patch import pytest -from homeassistant.components.backup.const import DOMAIN +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +@pytest.mark.usefixtures("supervisor_client") async def test_setup_with_hassio( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -20,14 +23,14 @@ async def test_setup_with_hassio( with_hassio=True, configuration={DOMAIN: {}}, ) - assert ( - "The backup integration is not supported on this installation method, please" - " remove it from your configuration" - ) in caplog.text + manager = hass.data[DATA_MANAGER] + assert not manager.backup_agents +@pytest.mark.parametrize("service_data", [None, {}]) async def test_create_service( hass: HomeAssistant, + service_data: dict[str, Any] | None, ) -> None: """Test generate backup.""" await setup_backup_integration(hass) @@ -39,6 +42,15 @@ async def test_create_service( DOMAIN, "create", blocking=True, + service_data=service_data, ) assert generate_backup.called + + +async def test_create_service_with_hassio(hass: HomeAssistant) -> None: + """Test action backup.create does not exist with hassio.""" + await setup_backup_integration(hass, with_hassio=True) + + with pytest.raises(ServiceNotFound): + await hass.services.async_call(DOMAIN, "create", blocking=True) diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index a3f70267643..f335ea5c0ee 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -2,199 +2,527 @@ from __future__ import annotations -from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch +import asyncio +from collections.abc import Generator +from io import StringIO +import json +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch -import aiohttp -from multidict import CIMultiDict, CIMultiDictProxy import pytest -from homeassistant.components.backup import BackupManager -from homeassistant.components.backup.manager import BackupPlatformProtocol +from homeassistant.components.backup import ( + DOMAIN, + AgentBackup, + BackupAgentPlatformProtocol, + BackupManager, + BackupPlatformProtocol, + Folder, + backup as local_backup_platform, +) +from homeassistant.components.backup.const import DATA_MANAGER +from homeassistant.components.backup.manager import ( + BackupManagerState, + CoreBackupReaderWriter, + CreateBackupEvent, + CreateBackupStage, + CreateBackupState, + NewBackup, + WrittenBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import TEST_BACKUP +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, +) from tests.common import MockPlatform, mock_platform +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +_EXPECTED_FILES = [ + "test.txt", + ".storage", + "backups", + "backups/not_backup", + "tmp_backups", + "tmp_backups/not_backup", +] +_EXPECTED_FILES_WITH_DATABASE = { + True: [*_EXPECTED_FILES, "home-assistant_v2.db"], + False: _EXPECTED_FILES, +} -async def _mock_backup_generation(manager: BackupManager): - """Mock backup generator.""" - - def _mock_iterdir(path: Path) -> list[Path]: - if not path.name.endswith("testing_config"): - return [] - return [ - Path("test.txt"), - Path(".DS_Store"), - Path(".storage"), - ] - - with ( - patch( - "homeassistant.components.backup.manager.SecureTarFile" - ) as mocked_tarfile, - patch("pathlib.Path.iterdir", _mock_iterdir), - patch("pathlib.Path.stat", MagicMock(st_size=123)), - patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), - patch( - "pathlib.Path.is_dir", - lambda x: x.name == ".storage", - ), - patch( - "pathlib.Path.exists", - lambda x: x != manager.backup_dir, - ), - patch( - "pathlib.Path.is_symlink", - lambda _: False, - ), - patch( - "pathlib.Path.mkdir", - MagicMock(), - ), - patch( - "homeassistant.components.backup.manager.json_bytes", - return_value=b"{}", # Empty JSON - ) as mocked_json_bytes, - patch( - "homeassistant.components.backup.manager.HAVERSION", - "2025.1.0", - ), - ): - await manager.async_create_backup() - - assert mocked_json_bytes.call_count == 1 - backup_json_dict = mocked_json_bytes.call_args[0][0] - assert isinstance(backup_json_dict, dict) - assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} - assert manager.backup_dir.as_posix() in str( - mocked_tarfile.call_args_list[0][0][0] - ) - - -async def _setup_mock_domain( +async def _setup_backup_platform( hass: HomeAssistant, - platform: BackupPlatformProtocol | None = None, + *, + domain: str = "some_domain", + platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None, ) -> None: """Set up a mock domain.""" - mock_platform(hass, "some_domain.backup", platform or MockPlatform()) - assert await async_setup_component(hass, "some_domain", {}) + mock_platform(hass, f"{domain}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() -async def test_constructor(hass: HomeAssistant) -> None: - """Test BackupManager constructor.""" - manager = BackupManager(hass) - assert manager.backup_dir.as_posix() == hass.config.path("backups") +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield -async def test_load_backups(hass: HomeAssistant) -> None: - """Test loading backups.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", return_value=MagicMock()), - patch( - "homeassistant.components.backup.manager.json_loads_object", - return_value={ - "slug": TEST_BACKUP.slug, - "name": TEST_BACKUP.name, - "date": TEST_BACKUP.date, - }, +@pytest.fixture(name="generate_backup_id") +def generate_backup_id_fixture() -> Generator[MagicMock]: + """Mock generate backup id.""" + with patch("homeassistant.components.backup.manager._generate_backup_id") as mock: + mock.return_value = "abc123" + yield mock + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_create_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, +) -> None: + """Test create backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + new_backup = NewBackup(backup_job_id="time-123") + backup_task = AsyncMock( + return_value=WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), ), - patch( - "pathlib.Path.stat", - return_value=MagicMock(st_size=TEST_BACKUP.size), - ), - ): - await manager.load_backups() - backups = await manager.async_get_backups() - assert backups == {TEST_BACKUP.slug: TEST_BACKUP} + )() # call it so that it can be awaited + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) -async def test_load_backups_with_exception( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test loading backups with exception.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", side_effect=OSError("Test exception")), - ): - await manager.load_backups() - backups = await manager.async_get_backups() - assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text - assert backups == {} - - -async def test_removing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing backup.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=True): - await manager.async_remove_backup(slug=TEST_BACKUP.slug) - assert "Removed backup located at" in caplog.text - - -async def test_removing_non_existing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing not existing backup.""" - manager = BackupManager(hass) - - await manager.async_remove_backup(slug="non_existing") - assert "Removed backup located at" not in caplog.text - - -async def test_getting_backup_that_does_not_exist( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test getting backup that does not exist.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=False): - backup = await manager.async_get_backup(slug=TEST_BACKUP.slug) - assert backup is None - - assert ( - f"Removing tracked backup ({TEST_BACKUP.slug}) that " - f"does not exists on the expected path {TEST_BACKUP.path}" - ) in caplog.text + assert create_backup.called + assert create_backup.call_args == call( + agent_ids=["backup.local"], + backup_name="Core 2025.1.0", + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + on_progress=ANY, + password=None, + ) async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: """Test generate backup.""" - manager = BackupManager(hass) - manager.backing_up = True - with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.async_create_backup() + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + manager.last_event = CreateBackupEvent( + stage=None, state=CreateBackupState.IN_PROGRESS + ) + with pytest.raises(HomeAssistantError, match="Backup manager busy"): + await manager.async_create_backup( + agent_ids=[LOCAL_AGENT_ID], + include_addons=[], + include_all_addons=False, + include_database=True, + include_folders=[], + include_homeassistant=True, + name=None, + password=None, + ) -async def test_async_create_backup( +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ({"agent_ids": []}, "At least one agent must be selected"), + ({"agent_ids": ["non_existing"]}, "Invalid agent selected"), + ( + {"include_addons": ["ssl"], "include_all_addons": True}, + "Cannot include all addons and specify specific addons", + ), + ({"include_homeassistant": False}, "Home Assistant must be included in backup"), + ], +) +async def test_create_backup_wrong_parameters( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test create backup with wrong parameters.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + + default_parameters = { + "agent_ids": [LOCAL_AGENT_ID], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "include_homeassistant": True, + } + + await ws_client.send_json_auto_id( + {"type": "backup/generate"} | default_parameters | parameters + ) + result = await ws_client.receive_json() + + assert result["success"] is False + assert result["error"]["code"] == "home_assistant_error" + assert result["error"]["message"] == expected_error + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("agent_ids", "backup_directory", "temp_file_unlink_call_count"), + [ + ([LOCAL_AGENT_ID], "backups", 0), + (["test.remote"], "tmp_backups", 1), + ([LOCAL_AGENT_ID, "test.remote"], "backups", 0), + ], +) +@pytest.mark.parametrize( + "params", + [ + {}, + {"include_database": True, "name": "abc123"}, + {"include_database": False}, + {"password": "pass123"}, + ], +) +async def test_async_initiate_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + params: dict[str, Any], + agent_ids: list[str], + backup_directory: str, + temp_file_unlink_call_count: int, ) -> None: """Test generate backup.""" - manager = BackupManager(hass) - manager.loaded_backups = True + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + agents = { + f"backup.{local_agent.name}": local_agent, + f"test.{remote_agent.name}": remote_agent, + } + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) - await _mock_backup_generation(manager) + ws_client = await hass_ws_client(hass) - assert "Generated new backup with slug " in caplog.text - assert "Creating backup directory" in caplog.text - assert "Loaded 0 platforms" in caplog.text + include_database = params.get("include_database", True) + name = params.get("name", "Core 2025.1.0") + password = params.get("password") + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch("pathlib.Path.unlink") as unlink_mock, + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} | params + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + assert unlink_mock.call_count == temp_file_unlink_call_count + + assert mocked_json_bytes.call_count == 1 + backup_json_dict = mocked_json_bytes.call_args[0][0] + assert isinstance(backup_json_dict, dict) + assert backup_json_dict == { + "compressed": True, + "date": ANY, + "homeassistant": { + "exclude_database": not include_database, + "version": "2025.1.0", + }, + "name": name, + "protected": bool(password), + "slug": ANY, + "type": "partial", + "version": 2, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + + backup_data = result["result"]["backup"] + backup_agent_ids = backup_data.pop("agent_ids") + + assert backup_agent_ids == agent_ids + + backup = AgentBackup.from_dict(backup_data) + + assert backup == AgentBackup( + addons=[], + backup_id=ANY, + database_included=include_database, + date=ANY, + folders=[], + homeassistant_included=True, + homeassistant_version="2025.1.0", + name=name, + protected=bool(password), + size=ANY, + ) + for agent_id in agent_ids: + agent = agents[agent_id] + assert len(agent._backups) == 1 + agent_backup = agent._backups[backup.backup_id] + assert agent_backup.backup_id == backup.backup_id + assert agent_backup.date == backup.date + assert agent_backup.name == backup.name + assert agent_backup.protected == backup.protected + assert agent_backup.size == backup.size + + outer_tar = mocked_tarfile.return_value + core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value + expected_files = [call(hass.config.path(), arcname="data", recursive=False)] + [ + call(file, arcname=f"data/{file}", recursive=False) + for file in _EXPECTED_FILES_WITH_DATABASE[include_database] + ] + assert core_tar.add.call_args_list == expected_files + + tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) + backup_directory = hass.config.path(backup_directory) + assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar" + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_initiate_backup_with_agent_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + hass_storage: dict[str, Any], +) -> None: + """Test generate backup.""" + agent_ids = [LOCAL_AGENT_ID, "test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch.object( + remote_agent, "async_upload_backup", side_effect=Exception("Test exception") + ), + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + expected_backup_data = { + "addons": [], + "agent_ids": ["backup.local"], + "backup_id": "abc123", + "database_included": True, + "date": ANY, + "failed_agent_ids": ["test.remote"], + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.1.0", + "name": "Core 2025.1.0", + "protected": False, + "size": 123, + "with_strategy_settings": False, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backup": expected_backup_data, + } + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backups": [expected_backup_data], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await hass.async_block_till_done() + assert hass_storage[DOMAIN]["data"]["backups"] == [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] async def test_loading_platforms( @@ -202,198 +530,384 @@ async def test_loading_platforms( caplog: pytest.LogCaptureFixture, ) -> None: """Test loading backup platforms.""" - manager = BackupManager(hass) + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - assert not manager.loaded_platforms assert not manager.platforms - await _setup_mock_domain( + await _setup_backup_platform( hass, - Mock( + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), + async_get_backup_agents=AsyncMock(), ), ) await manager.load_platforms() await hass.async_block_till_done() - assert manager.loaded_platforms assert len(manager.platforms) == 1 assert "Loaded 1 platforms" in caplog.text +@pytest.mark.parametrize( + "platform_mock", + [ + Mock(async_pre_backup=AsyncMock(), spec=["async_pre_backup"]), + Mock(async_post_backup=AsyncMock(), spec=["async_post_backup"]), + Mock(spec=[]), + ], +) async def test_not_loading_bad_platforms( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + platform_mock: Mock, ) -> None: - """Test loading backup platforms.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain(hass) - await manager.load_platforms() + """Test not loading bad backup platforms.""" + await _setup_backup_platform( + hass, + domain="test", + platform=platform_mock, + ) + assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - assert manager.loaded_platforms - assert len(manager.platforms) == 0 - - assert "Loaded 0 platforms" in caplog.text - assert ( - "some_domain does not implement required functions for the backup platform" - in caplog.text - ) + assert platform_mock.mock_calls == [] -async def test_exception_plaform_pre(hass: HomeAssistant) -> None: +async def test_exception_platform_pre( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test exception in pre step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=_mock_step, async_post_backup=AsyncMock(), + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_exception_plaform_post(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_backup_generation") +async def test_exception_platform_post( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: """Test exception in post step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=_mock_step, + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_loading_platforms_when_running_async_pre_backup_actions( +@pytest.mark.parametrize( + ( + "agent_id_params", + "open_call_count", + "move_call_count", + "move_path_names", + "remote_agent_backups", + "remote_agent_backup_data", + "temp_file_unlink_call_count", + ), + [ + ( + "agent_id=backup.local&agent_id=test.remote", + 2, + 1, + ["abc123.tar"], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 0, + ), + ( + "agent_id=backup.local", + 1, + 1, + ["abc123.tar"], + {}, + None, + 0, + ), + ( + "agent_id=test.remote", + 2, + 0, + [], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 1, + ), + ], +) +async def test_receive_backup( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + hass_client: ClientSessionGenerator, + agent_id_params: str, + open_call_count: int, + move_call_count: int, + move_path_names: list[str], + remote_agent_backups: dict[str, AgentBackup], + remote_agent_backup_data: bytes | None, + temp_file_unlink_call_count: int, ) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain( + """Test receive backup and upload to the local and a remote agent.""" + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, ), ) - await manager.async_pre_backup_actions() + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() - assert manager.loaded_platforms - assert len(manager.platforms) == 1 + upload_data = "test" + open_mock = mock_open(read_data=upload_data.encode(encoding="utf-8")) - assert "Loaded 1 platforms" in caplog.text - - -async def test_loading_platforms_when_running_async_post_backup_actions( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain( - hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, ), - ) - await manager.async_post_backup_actions() - - assert manager.loaded_platforms - assert len(manager.platforms) == 1 - - assert "Loaded 1 platforms" in caplog.text - - -async def test_async_receive_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test receiving a backup file.""" - manager = BackupManager(hass) - - size = 2 * 2**16 - protocol = Mock(_reading_paused=False) - stream = aiohttp.StreamReader(protocol, 2**16) - stream.feed_data(b"0" * size + b"\r\n--:--") - stream.feed_eof() - - open_mock = mock_open() - - with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock: - await manager.async_receive_backup( - contents=aiohttp.BodyPartReader( - b"--:", - CIMultiDictProxy( - CIMultiDict( - { - aiohttp.hdrs.CONTENT_DISPOSITION: "attachment; filename=abc123.tar" - } - ) - ), - stream, - ) + patch("pathlib.Path.unlink") as unlink_mock, + ): + resp = await client.post( + f"/api/backup/upload?{agent_id_params}", + data={"file": StringIO(upload_data)}, ) - assert open_mock.call_count == 1 - assert mover_mock.call_count == 1 - assert mover_mock.mock_calls[0].args[1].name == "abc123.tar" + await hass.async_block_till_done() + + assert resp.status == 201 + assert open_mock.call_count == open_call_count + assert move_mock.call_count == move_call_count + for index, name in enumerate(move_path_names): + assert move_mock.call_args_list[index].args[1].name == name + assert remote_agent._backups == remote_agent_backups + assert remote_agent._backup_data == remote_agent_backup_data + assert unlink_mock.call_count == temp_file_unlink_call_count +@pytest.mark.usefixtures("mock_backup_generation") +async def test_receive_backup_busy_manager( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test receive backup with a busy manager.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + ws_client = await hass_ws_client(hass) + + upload_data = "test" + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": "idle"} + + result = await ws_client.receive_json() + assert result["success"] is True + + new_backup = NewBackup(backup_job_id="time-123") + backup_task: asyncio.Future[WrittenBackup] = asyncio.Future() + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == {"backup_job_id": "time-123"} + + assert create_backup.call_count == 1 + + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO(upload_data)}, + ) + + assert resp.status == 500 + assert ( + await resp.text() + == "Can't upload backup file: Backup manager busy: create_backup" + ) + + # finish the backup + backup_task.set_result( + WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), + ) + ) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("agent_id", "password", "restore_database", "restore_homeassistant", "dir"), + [ + (LOCAL_AGENT_ID, None, True, False, "backups"), + (LOCAL_AGENT_ID, "abc123", False, True, "backups"), + ("test.remote", None, True, True, "tmp_backups"), + ], +) async def test_async_trigger_restore( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + agent_id: str, + password: str | None, + restore_database: bool, + restore_homeassistant: bool, + dir: str, ) -> None: """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + hass.data[DATA_MANAGER] = manager + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123])] + ), + spec_set=BackupAgentPlatformProtocol, + ), + ) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True with ( patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.open"), patch("pathlib.Path.write_text") as mocked_write_text, patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch.object(BackupAgentTest, "async_download_backup") as download_mock, ): - await manager.async_restore_backup(TEST_BACKUP.slug) - assert mocked_write_text.call_args[0][0] == '{"path": "abc123.tar"}' + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + await manager.async_restore_backup( + TEST_BACKUP_ABC123.backup_id, + agent_id=agent_id, + password=password, + restore_addons=None, + restore_database=restore_database, + restore_folders=None, + restore_homeassistant=restore_homeassistant, + ) + expected_restore_file = json.dumps( + { + "path": f"{hass.config.path()}/{dir}/abc123.tar", + "password": password, + "remove_after_restore": agent_id != LOCAL_AGENT_ID, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ) + assert mocked_write_text.call_args[0][0] == expected_restore_file assert mocked_service_call.called -async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"backup_id": TEST_BACKUP_DEF456.backup_id}, + "Backup def456 not found", + ), + ( + {"restore_addons": ["blah"]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_folders": [Folder.ADDONS]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_database": False, "restore_homeassistant": False}, + "Home Assistant or database must be included in restore", + ), + ], +) +async def test_async_trigger_restore_wrong_parameters( + hass: HomeAssistant, parameters: dict[str, Any], expected_error: str +) -> None: """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - with pytest.raises(HomeAssistantError, match="Backup abc123 not found"): - await manager.async_restore_backup(TEST_BACKUP.slug) + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True + + default_parameters = { + "agent_id": LOCAL_AGENT_ID, + "backup_id": TEST_BACKUP_ABC123.backup_id, + "password": None, + "restore_addons": None, + "restore_database": True, + "restore_folders": None, + "restore_homeassistant": True, + } + + with ( + patch("pathlib.Path.exists", return_value=True), + pytest.raises(HomeAssistantError, match=expected_error), + ): + await manager.async_restore_backup(**(default_parameters | parameters)) diff --git a/tests/components/backup/test_models.py b/tests/components/backup/test_models.py new file mode 100644 index 00000000000..6a547f40dc3 --- /dev/null +++ b/tests/components/backup/test_models.py @@ -0,0 +1,11 @@ +"""Tests for the Backup integration.""" + +from homeassistant.components.backup import AgentBackup + +from .common import TEST_BACKUP_ABC123 + + +async def test_agent_backup_serialization() -> None: + """Test AgentBackup serialization.""" + + assert AgentBackup.from_dict(TEST_BACKUP_ABC123.as_dict()) == TEST_BACKUP_ABC123 diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 125ba8adaad..9df93ee9c46 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1,18 +1,74 @@ """Tests for the Backup integration.""" -from unittest.mock import patch +from asyncio import Future +from collections.abc import Generator +from datetime import datetime +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, call, patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.backup.manager import Backup +from homeassistant.components.backup import AgentBackup, BackupAgentError +from homeassistant.components.backup.agent import BackupAgentUnreachableError +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.components.backup.manager import ( + CreateBackupEvent, + CreateBackupState, + NewBackup, + WrittenBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .common import TEST_BACKUP, setup_backup_integration +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, + setup_backup_integration, +) +from tests.common import async_fire_time_changed, async_mock_service from tests.typing import WebSocketGenerator +BACKUP_CALL = call( + agent_ids=["test.test-agent"], + backup_name="test-name", + include_addons=["test-addon"], + include_all_addons=False, + include_database=True, + include_folders=["media"], + include_homeassistant=True, + password="test-password", + on_progress=ANY, +) + +DEFAULT_STORAGE_DATA = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + }, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "retention": { + "copies": None, + "days": None, + }, + "schedule": { + "state": "never", + }, + }, +} + @pytest.fixture def sync_access_token_proxy( @@ -26,145 +82,558 @@ def sync_access_token_proxy( return request.getfixturevalue(access_token_fixture_name) +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield + + +@pytest.fixture(name="create_backup") +def mock_create_backup() -> Generator[AsyncMock]: + """Mock manager create backup.""" + mock_written_backup = MagicMock(spec_set=WrittenBackup) + mock_written_backup.backup.backup_id = "abc123" + mock_written_backup.open_stream = AsyncMock() + mock_written_backup.release_stream = AsyncMock() + fut = Future() + fut.set_result(mock_written_backup) + with patch( + "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" + ) as mock_create_backup: + mock_create_backup.return_value = (MagicMock(), fut) + yield mock_create_backup + + +@pytest.fixture(name="delete_backup") +def mock_delete_backup() -> Generator[AsyncMock]: + """Mock manager delete backup.""" + with patch( + "homeassistant.components.backup.BackupManager.async_delete_backup" + ) as mock_delete_backup: + yield mock_delete_backup + + +@pytest.fixture(name="get_backups") +def mock_get_backups() -> Generator[AsyncMock]: + """Mock manager get backups.""" + with patch( + "homeassistant.components.backup.BackupManager.async_get_backups" + ) as mock_get_backups: + yield mock_get_backups + + @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "remote_backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), ], ) async def test_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + remote_backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration( + hass, + with_hassio=False, + backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} | remote_backups, + remote_agents=remote_agents, + ) client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backups", - return_value={TEST_BACKUP.slug: TEST_BACKUP}, - ): - await client.send_json_auto_id({"type": "backup/info"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "backup_content", - [ - pytest.param(TEST_BACKUP, id="with_backup_content"), - pytest.param(None, id="without_backup_content"), - ], + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] ) +async def test_info_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + side_effect: Exception, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_list_backups", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), ], ) async def test_details( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, - backup_content: Backup | None, ) -> None: """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - return_value=backup_content, - ): - await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"}) + with patch("pathlib.Path.exists", return_value=True): + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", - [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), - ], + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] ) -async def test_remove( +async def test_details_with_errors( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + side_effect: Exception, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: - """Test removing a backup file.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_remove_backup", + with ( + patch("pathlib.Path.exists", return_value=True), + patch.object(BackupAgentTest, "async_get_backup", side_effect=side_effect), ): - await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), ], ) +async def test_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup file.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + "storage_data", + [ + DEFAULT_STORAGE_DATA, + DEFAULT_STORAGE_DATA + | { + "backups": [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] + }, + ], +) +@pytest.mark.parametrize( + "side_effect", [None, HomeAssistantError("Boom!"), BackupAgentUnreachableError] +) +async def test_delete_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + side_effect: Exception, + storage_data: dict[str, Any] | None, + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup with one unavailable agent.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_delete_backup", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup file with a mock agent.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")} + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_delete_backup") as delete_mock: + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": "abc123", + } + ) + assert await client.receive_json() == snapshot + + assert delete_mock.call_args == call("abc123") + + +@pytest.mark.parametrize( + "data", + [ + None, + {}, + {"password": "abc123"}, + ], +) +@pytest.mark.usefixtures("mock_backup_generation") async def test_generate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + data: dict[str, Any] | None, + freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test generating a backup.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration(hass, with_hassio=False) client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_create_backup", - return_value=TEST_BACKUP, - ): - await client.send_json_auto_id({"type": "backup/generate"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + await client.send_json_auto_id( + {"type": "backup/generate", **{"agent_ids": ["backup.local"]} | (data or {})} + ) + for _ in range(6): + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", + ("parameters", "expected_error"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ( + {"include_homeassistant": False}, + "Home Assistant must be included in backup", + ), + ( + {"include_addons": ["blah"]}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_all_addons": True}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_folders": ["ssl"]}, + "Addons and folders are not supported by core backup", + ), ], ) -async def test_restore( +async def test_generate_wrong_parameters( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + + default_parameters = {"type": "backup/generate", "agent_ids": ["backup.local"]} + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("params", "expected_extra_call_params"), + [ + ({"agent_ids": ["backup.local"]}, {"agent_ids": ["backup.local"]}), + ( + { + "agent_ids": ["backup.local"], + "include_database": False, + "name": "abc123", + }, + { + "agent_ids": ["backup.local"], + "include_addons": None, + "include_database": False, + "include_folders": None, + "name": "abc123", + }, + ), + ], +) +async def test_generate_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + params: dict[str, Any], + expected_extra_call_params: dict[str, Any], +) -> None: + """Test translation of WS parameter to backup/generate to async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id({"type": "backup/generate"} | params) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with( + **{ + "include_all_addons": False, + "include_homeassistant": True, + "include_addons": None, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + } + | expected_extra_call_params + ) + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("create_backup_settings", "expected_call_params"), + [ + ( + {}, + { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "include_homeassistant": True, + "name": None, + "password": None, + "with_strategy_settings": True, + }, + ), + ( + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "include_homeassistant": True, + "name": "test-name", + "password": "test-password", + "with_strategy_settings": True, + }, + ), + ], +) +async def test_generate_with_default_settings_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + create_backup_settings: dict[str, Any], + expected_call_params: dict[str, Any], +) -> None: + """Test backup/generate_with_strategy_settings calls async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + await client.send_json_auto_id( + {"type": "backup/config/update", "create_backup": create_backup_settings} + ) + result = await client.receive_json() + assert result["success"] + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id( + {"type": "backup/generate_with_strategy_settings"} + ) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with(**expected_call_params) + + +@pytest.mark.parametrize( + "backups", + [ + {}, + {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}, + ], +) +async def test_restore_local_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test calling the restore command.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration(hass, with_hassio=False, backups=backups) + restart_calls = async_mock_service(hass, "homeassistant", "restart") client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_restore_backup", + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text"), ): - await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"}) + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "backup.local", + } + ) assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot + + +@pytest.mark.parametrize( + ("remote_agents", "backups"), + [ + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + ], +) +async def test_restore_remote_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch("pathlib.Path.write_text"), patch("pathlib.Path.open"): + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "test.remote", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot @pytest.mark.parametrize( @@ -178,6 +647,7 @@ async def test_restore( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_end( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -197,7 +667,7 @@ async def test_backup_end( "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -211,6 +681,7 @@ async def test_backup_end( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_start( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -230,7 +701,7 @@ async def test_backup_start( "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -241,7 +712,8 @@ async def test_backup_start( Exception("Boom"), ], ) -async def test_backup_end_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_end_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -259,7 +731,7 @@ async def test_backup_end_excepion( side_effect=exception, ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -270,7 +742,8 @@ async def test_backup_end_excepion( Exception("Boom"), ], ) -async def test_backup_start_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_start_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -288,4 +761,993 @@ async def test_backup_start_excepion( side_effect=exception, ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup agents info.""" + await setup_backup_integration(hass, with_hassio=False) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "storage_data", + [ + None, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": True, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": 3, "days": 7}, + "last_attempted_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "last_completed_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "schedule": {"state": "daily"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": 3, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": 7}, + "last_attempted_strategy_backup": datetime.fromisoformat( + "2024-10-27T04:45:00+01:00" + ), + "last_completed_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "mon"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "sat"}, + }, + }, + ], +) +async def test_config_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + hass_storage: dict[str, Any], + storage_data: dict[str, Any] | None, +) -> None: + """Test getting backup config info.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "mon", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "never", + }, + { + "type": "backup/config/update", + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"days": 7}, + "schedule": "daily", + }, + ], +) +async def test_config_update( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], + hass_storage: dict[str, Any], +) -> None: + """Test updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + assert hass_storage[DOMAIN] == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "someday", + }, + ], +) +async def test_config_update_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], +) -> None: + """Test errors when updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert not result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ( + "command", + "last_completed_strategy_backup", + "time_1", + "time_2", + "attempted_backup_time", + "completed_backup_time", + "backup_calls_1", + "backup_calls_2", + "call_args", + "create_backup_side_effect", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-25T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-11-11T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-11T13:00:00+01:00", + "2024-11-11T04:45:00+01:00", + "2024-11-11T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + 1, + 1, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-10-26T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-12T12:00:00+01:00", + "2024-10-26T04:45:00+01:00", + "2024-10-26T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # attempted to create backup but failed + "2024-11-11T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + [Exception("Boom"), None], + ), + ], +) +async def test_config_schedule_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + command: dict[str, Any], + last_completed_strategy_backup: str, + time_1: str, + time_2: str, + attempted_backup_time: str, + completed_backup_time: str, + backup_calls_1: int, + backup_calls_2: int, + call_args: Any, + create_backup_side_effect: list[Exception | None] | None, +) -> None: + """Test config schedule logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": datetime.fromisoformat( + last_completed_strategy_backup + ), + "last_completed_strategy_backup": datetime.fromisoformat( + last_completed_strategy_backup + ), + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + create_backup.side_effect = create_backup_side_effect + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(time_1) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_1 + assert create_backup.call_args == call_args + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == attempted_backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == completed_backup_time + ) + + freezer.move_to(time_2) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_2 + assert create_backup.call_args == call_args + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "next_time", + "backup_time", + "backup_calls", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, # we get backups even if backup retention copies is None + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 3, + [call("backup-1"), call("backup-2"), call("backup-3")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ], +) +async def test_config_retention_copies_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + next_time: str, + backup_time: str, + backup_calls: int, + get_backups_calls: int, + delete_calls: int, + delete_args_list: Any, +) -> None: + """Test config backup retention copies logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert create_backup.call_count == backup_calls + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == backup_time + ) + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "start_time", + "next_time", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 3}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 0}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ], +) +async def test_config_retention_days_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + start_time: str, + next_time: str, + get_backups_calls: int, + delete_calls: int, + delete_args_list: list[Any], +) -> None: + """Test config backup retention logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "schedule": {"state": "never"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to(start_time) + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + + +async def test_subscribe_event( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + manager = hass.data[DATA_MANAGER] + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + assert await client.receive_json() == snapshot + + manager.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + assert await client.receive_json() == snapshot diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py new file mode 100644 index 00000000000..16b446c7a2b --- /dev/null +++ b/tests/components/cloud/test_backup.py @@ -0,0 +1,568 @@ +"""Test the cloud backup platform.""" + +from collections.abc import AsyncGenerator, AsyncIterator, Generator +from io import StringIO +from typing import Any +from unittest.mock import Mock, PropertyMock, patch + +from aiohttp import ClientError +from hass_nabucasa import CloudError +import pytest +from yarl import URL + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.cloud import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, cloud: MagicMock +) -> AsyncGenerator[None]: + """Set up cloud integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +@pytest.fixture +def mock_delete_file() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_delete_file", + spec_set=True, + ) as delete_file: + yield delete_file + + +@pytest.fixture +def mock_get_download_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_download_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "462e16810d6841228828d9dd2f9e341e.tar?X-Amz-Algorithm=blah" + ), + } + yield download_details + + +@pytest.fixture +def mock_get_upload_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_upload_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "ea5c969e492c49df89d432a1483b8dc3.tar?X-Amz-Algorithm=blah" + ), + "headers": { + "content-md5": "HOhSM3WZkpHRYGiz4YRGIQ==", + "x-amz-meta-storage-type": "backup", + "x-amz-meta-b64json": ( + "eyJhZGRvbnMiOltdLCJiYWNrdXBfaWQiOiJjNDNiNWU2MCIsImRhdGUiOiIyMDI0LT" + "EyLTAzVDA0OjI1OjUwLjMyMDcwMy0wNTowMCIsImRhdGFiYXNlX2luY2x1ZGVkIjpm" + "YWxzZSwiZm9sZGVycyI6W10sImhvbWVhc3Npc3RhbnRfaW5jbHVkZWQiOnRydWUsIm" + "hvbWVhc3Npc3RhbnRfdmVyc2lvbiI6IjIwMjQuMTIuMC5kZXYwIiwibmFtZSI6ImVy" + "aWsiLCJwcm90ZWN0ZWQiOnRydWUsInNpemUiOjM1NjI0OTYwfQ==" + ), + }, + } + yield download_details + + +@pytest.fixture +def mock_list_files() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_list", spec_set=True + ) as list_files: + list_files.return_value = [ + { + "Key": "462e16810d6841228828d9dd2f9e341e.tar", + "LastModified": "2024-11-22T10:49:01.182Z", + "Size": 34519040, + "Metadata": { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "storage-type": "backup", + }, + } + ] + yield list_files + + +@pytest.fixture +def cloud_logged_in(cloud: MagicMock): + """Mock cloud logged in.""" + type(cloud).is_logged_in = PropertyMock(return_value=True) + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "cloud.cloud"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [ + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + } + ] + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +async def test_agents_list_backups_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, + side_effect: Exception, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + mock_list_files.side_effect = side_effect + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to list backups"}, + "backups": [], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + +@pytest.mark.parametrize( + ("backup_id", "expected_result"), + [ + ( + "23e64aec", + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + }, + ), + ( + "12345", + None, + ), + ], + ids=["found", "not_found"], +) +async def test_agents_get_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + backup_id: str, + expected_result: dict[str, Any] | None, + mock_list_files: Mock, +) -> None: + """Test agent get backup.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backup"] == expected_result + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get( + mock_get_download_details.return_value["url"], content=b"backup data" + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_download_details: Mock, + side_effect: Exception, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + mock_get_download_details.side_effect = side_effect + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to get download details" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_get( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get(mock_get_download_details.return_value["url"], status=500) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to download backup" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_not_found( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_client() + backup_id = "1234" + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 404 + assert await resp.content.read() == b"" + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"]) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert len(aioclient_mock.mock_calls) == 1 + assert aioclient_mock.mock_calls[-1][0] == "PUT" + assert aioclient_mock.mock_calls[-1][1] == URL( + mock_get_upload_details.return_value["url"] + ) + assert isinstance(aioclient_mock.mock_calls[-1][2], AsyncIterator) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_fail_put( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, +) -> None: + """Test agent upload backup fails.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"], status=500) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to upload backup" in caplog.text + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in") +async def test_agents_upload_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_upload_details: Mock, + side_effect: Exception, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + mock_get_upload_details.side_effect = side_effect + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to get upload details" in caplog.text + + +async def test_agents_upload_not_protected( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Cloud backups must be protected" in caplog.text + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + mock_delete_file.assert_called_once() + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, + side_effect: Exception, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + mock_delete_file.side_effect = side_effect + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to delete backup"} + } + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_not_found( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_ws_client(hass) + backup_id = "1234" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {"cloud.cloud": "Backup not found"}} diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 97b1d337e82..71c3b14050d 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -533,6 +533,10 @@ def supervisor_client() -> Generator[AsyncMock]: "homeassistant.components.hassio.addon_manager.get_supervisor_client", return_value=supervisor_client, ), + patch( + "homeassistant.components.hassio.backup.get_supervisor_client", + return_value=supervisor_client, + ), patch( "homeassistant.components.hassio.discovery.get_supervisor_client", return_value=supervisor_client, diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py new file mode 100644 index 00000000000..660753bd815 --- /dev/null +++ b/tests/components/hassio/test_backup.py @@ -0,0 +1,403 @@ +"""Test supervisor backup functionality.""" + +from collections.abc import AsyncGenerator, Generator +from datetime import datetime +from io import StringIO +import os +from typing import Any +from unittest.mock import AsyncMock, patch + +from aiohasupervisor.models import backups as supervisor_backups +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .test_init import MOCK_ENVIRON + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +TEST_BACKUP = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP.compressed, + date=TEST_BACKUP.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant="2024.12.0", + location=TEST_BACKUP.location, + locations=TEST_BACKUP.locations, + name=TEST_BACKUP.name, + protected=TEST_BACKUP.protected, + repositories=[], + size=TEST_BACKUP.size, + size_bytes=TEST_BACKUP.size_bytes, + slug=TEST_BACKUP.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP.type, +) + + +@pytest.fixture(autouse=True) +def fixture_supervisor_environ() -> Generator[None]: + """Mock os environ for supervisor.""" + with patch.dict(os.environ, MOCK_ENVIRON): + yield + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Set up Backup integration.""" + with ( + patch("homeassistant.components.backup.is_hassio", return_value=True), + patch("homeassistant.components.backup.backup.is_hassio", return_value=True), + ): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "hassio.local"}], + } + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [ + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": True, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + } + ] + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=hassio.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + supervisor_client.backups.reload.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + + +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + supervisor_backups.PartialBackupOptions( + addons=None, + background=True, + compressed=True, + folders=None, + homeassistant_exclude_database=False, + homeassistant=True, + location={None}, + name="Test", + password=None, + ) + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test restoring a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_restore.return_value.job_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "in_progress", + } + + supervisor_client.backups.partial_restore.assert_called_once_with( + "abc123", + supervisor_backups.PartialRestoreOptions( + addons=None, + background=True, + folders=None, + homeassistant=True, + password=None, + ), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert response["success"] + assert response["result"] is None + + +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"restore_database": False}, + "Cannot restore Home Assistant without database", + ), + ( + {"restore_homeassistant": False}, + "Cannot restore database without Home Assistant", + ), + ], +) +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_restore_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test trigger restore.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + default_parameters = { + "type": "backup/restore", + "agent_id": "hassio.local", + "backup_id": "abc123", + } + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py new file mode 100644 index 00000000000..7db03b7fa46 --- /dev/null +++ b/tests/components/kitchen_sink/test_backup.py @@ -0,0 +1,194 @@ +"""Test the Kitchen Sink backup platform.""" + +from collections.abc import AsyncGenerator +from io import StringIO +from unittest.mock import patch + +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.kitchen_sink import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def backup_only() -> AsyncGenerator[None]: + """Enable only the backup platform. + + The backup platform is not an entity platform. + """ + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def setup_integration(hass: HomeAssistant) -> AsyncGenerator[None]: + """Set up Kitchen Sink integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [ + { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Kitchen sink syncer", + "protected": False, + "size": 1234, + "with_strategy_settings": False, + } + ] + + +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a backup.""" + client = await hass_client() + + resp = await client.get("/api/backup/download/abc123?agent_id=kitchen_sink.syncer") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + hass_supervisor_access_token: str, +) -> None: + """Test agent upload backup.""" + ws_client = await hass_ws_client(hass, hass_supervisor_access_token) + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=kitchen_sink.syncer", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + await ws_client.send_json_auto_id({"type": "backup/info"}) + response = await ws_client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert len(backup_list) == 2 + assert backup_list[1] == { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "test-backup", + "database_included": True, + "date": "1970-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 0.0, + "with_strategy_settings": False, + } + + +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert f"Deleted backup {backup_id}" in caplog.text + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert not backup_list diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index 44a05c0540e..bce5eca4292 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -19,7 +19,29 @@ from .common import get_test_config_dir ( None, '{"path": "test"}', - backup_restore.RestoreBackupFileContent(backup_file_path=Path("test")), + None, + ), + ( + None, + '{"path": "test", "password": "psw", "remove_after_restore": false, "restore_database": false, "restore_homeassistant": true}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password="psw", + remove_after_restore=False, + restore_database=False, + restore_homeassistant=True, + ), + ), + ( + None, + '{"path": "test", "password": null, "remove_after_restore": true, "restore_database": true, "restore_homeassistant": false}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password=None, + remove_after_restore=True, + restore_database=True, + restore_homeassistant=False, + ), ), ], ) @@ -49,7 +71,11 @@ def test_restoring_backup_that_does_not_exist() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), @@ -78,7 +104,11 @@ def test_restoring_backup_that_is_not_a_file() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("pathlib.Path.exists", return_value=True), @@ -102,7 +132,11 @@ def test_aborting_for_older_versions() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("securetar.SecureTarFile"), @@ -117,14 +151,78 @@ def test_aborting_for_older_versions() -> None: assert backup_restore.restore_backup(config_dir) is True -def test_removal_of_current_configuration_when_restoring() -> None: +@pytest.mark.parametrize( + ( + "restore_backup_content", + "expected_removed_files", + "expected_removed_directories", + "expected_copied_files", + "expected_copied_trees", + ), + [ + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ( + ".HA_RESTORE", + ".HA_VERSION", + "home-assistant_v2.db", + "home-assistant_v2.db-wal", + ), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=False, + remove_after_restore=False, + restore_homeassistant=True, + ), + (".HA_RESTORE", ".HA_VERSION"), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=True, + remove_after_restore=False, + restore_homeassistant=False, + ), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ), + ], +) +def test_removal_of_current_configuration_when_restoring( + restore_backup_content: backup_restore.RestoreBackupFileContent, + expected_removed_files: tuple[str, ...], + expected_removed_directories: tuple[str, ...], + expected_copied_files: tuple[str, ...], + expected_copied_trees: tuple[str, ...], +) -> None: """Test that we are removing the current configuration directory.""" config_dir = Path(get_test_config_dir()) - backup_file_path = Path(config_dir, "backups", "test.tar") + restore_backup_content.backup_file_path = Path(config_dir, "backups", "test.tar") mock_config_dir = [ {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db-wal"), "is_file": True}, {"path": Path(config_dir, "backups"), "is_file": False}, + {"path": Path(config_dir, "tmp_backups"), "is_file": False}, {"path": Path(config_dir, "www"), "is_file": False}, ] @@ -140,12 +238,10 @@ def test_removal_of_current_configuration_when_restoring() -> None: with ( mock.patch( "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), + return_value=restore_backup_content, ), mock.patch("securetar.SecureTarFile"), - mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory") as temp_dir_mock, mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), mock.patch("pathlib.Path.read_text", _patched_path_read_text), mock.patch("pathlib.Path.is_file", _patched_path_is_file), @@ -154,17 +250,33 @@ def test_removal_of_current_configuration_when_restoring() -> None: "pathlib.Path.iterdir", return_value=[x["path"] for x in mock_config_dir], ), - mock.patch("pathlib.Path.unlink") as unlink_mock, - mock.patch("shutil.rmtree") as rmtreemock, + mock.patch("pathlib.Path.unlink", autospec=True) as unlink_mock, + mock.patch("shutil.copy") as copy_mock, + mock.patch("shutil.copytree") as copytree_mock, + mock.patch("shutil.rmtree") as rmtree_mock, ): - assert backup_restore.restore_backup(config_dir) is True - assert unlink_mock.call_count == 2 - assert ( - rmtreemock.call_count == 1 - ) # We have 2 directories in the config directory, but backups is kept + temp_dir_mock.return_value.__enter__.return_value = "tmp" - removed_directories = {Path(call.args[0]) for call in rmtreemock.mock_calls} - assert removed_directories == {Path(config_dir, "www")} + assert backup_restore.restore_backup(config_dir) is True + + tmp_ha = Path("tmp", "homeassistant") + assert copy_mock.call_count == len(expected_copied_files) + copied_files = {Path(call.args[0]) for call in copy_mock.mock_calls} + assert copied_files == {Path(tmp_ha, "data", f) for f in expected_copied_files} + + assert copytree_mock.call_count == len(expected_copied_trees) + copied_trees = {Path(call.args[0]) for call in copytree_mock.mock_calls} + assert copied_trees == {Path(tmp_ha, t) for t in expected_copied_trees} + + assert unlink_mock.call_count == len(expected_removed_files) + removed_files = {Path(call.args[0]) for call in unlink_mock.mock_calls} + assert removed_files == {Path(config_dir, f) for f in expected_removed_files} + + assert rmtree_mock.call_count == len(expected_removed_directories) + removed_directories = {Path(call.args[0]) for call in rmtree_mock.mock_calls} + assert removed_directories == { + Path(config_dir, d) for d in expected_removed_directories + } def test_extracting_the_contents_of_a_backup_file() -> None: @@ -177,8 +289,8 @@ def test_extracting_the_contents_of_a_backup_file() -> None: getmembers_mock = mock.MagicMock( return_value=[ + tarfile.TarInfo(name="../data/test"), tarfile.TarInfo(name="data"), - tarfile.TarInfo(name="data/../test"), tarfile.TarInfo(name="data/.HA_VERSION"), tarfile.TarInfo(name="data/.storage"), tarfile.TarInfo(name="data/www"), @@ -190,7 +302,11 @@ def test_extracting_the_contents_of_a_backup_file() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch( @@ -205,11 +321,59 @@ def test_extracting_the_contents_of_a_backup_file() -> None: mock.patch("pathlib.Path.read_text", _patched_path_read_text), mock.patch("pathlib.Path.is_file", return_value=False), mock.patch("pathlib.Path.iterdir", return_value=[]), + mock.patch("shutil.copytree"), ): assert backup_restore.restore_backup(config_dir) is True - assert getmembers_mock.call_count == 1 assert extractall_mock.call_count == 2 assert { member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] - } == {".HA_VERSION", ".storage", "www"} + } == {"data", "data/.HA_VERSION", "data/.storage", "data/www"} + + +@pytest.mark.parametrize( + ("remove_after_restore", "unlink_calls"), [(True, 1), (False, 0)] +) +def test_remove_backup_file_after_restore( + remove_after_restore: bool, unlink_calls: int +) -> None: + """Test removing a backup file after restore.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=remove_after_restore, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("homeassistant.backup_restore._extract_backup"), + mock.patch("pathlib.Path.unlink", autospec=True) as mock_unlink, + ): + assert backup_restore.restore_backup(config_dir) is True + assert mock_unlink.call_count == unlink_calls + for call in mock_unlink.mock_calls: + assert call.args[0] == backup_file_path + + +@pytest.mark.parametrize( + ("password", "expected"), + [ + ("test", b"\xf0\x9b\xb9\x1f\xdc,\xff\xd5x\xd6\xd6\x8fz\x19.\x0f"), + ("lorem ipsum...", b"#\xe0\xfc\xe0\xdb?_\x1f,$\rQ\xf4\xf5\xd8\xfb"), + ], +) +def test_pw_to_key(password: str | None, expected: bytes | None) -> None: + """Test password to key conversion.""" + assert backup_restore.password_to_key(password) == expected + + +def test_pw_to_key_none() -> None: + """Test password to key conversion.""" + with pytest.raises(AttributeError): + backup_restore.password_to_key(None) From 4c5965ffc9fb028000e176202b9f5d43510da6a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 22:47:14 +0100 Subject: [PATCH 0524/1198] Add reconfiguration flow to myuplink (#132970) * Add reconfiguration flow * Tick reconfiguration-flow rule --- .../components/myuplink/config_flow.py | 17 +++- .../components/myuplink/quality_scale.yaml | 2 +- .../components/myuplink/strings.json | 1 + tests/components/myuplink/test_config_flow.py | 93 +++++++++++++++++++ 4 files changed, 111 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/myuplink/config_flow.py b/homeassistant/components/myuplink/config_flow.py index 15bff643185..cf0428f59ce 100644 --- a/homeassistant/components/myuplink/config_flow.py +++ b/homeassistant/components/myuplink/config_flow.py @@ -6,7 +6,11 @@ from typing import Any import jwt -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlowResult, +) from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, OAUTH2_SCOPES @@ -48,6 +52,12 @@ class OAuth2FlowHandler( return await self.async_step_user() + async def async_step_reconfigure( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """User initiated reconfiguration.""" + return await self.async_step_user() + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create or update the config entry.""" @@ -62,5 +72,10 @@ class OAuth2FlowHandler( return self.async_update_reload_and_abort( self._get_reauth_entry(), data=data ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="account_mismatch") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data=data + ) self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index 661986a2f71..463002b5519 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -82,7 +82,7 @@ rules: status: todo comment: PR pending review \#191937 icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index bd60a3c7bb3..d3d2f198448 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -23,6 +23,7 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "account_mismatch": "The used account does not match the original account", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" }, diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index e823402bda6..0b8d0dba17a 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -181,3 +181,96 @@ async def test_flow_reauth_abort( assert result.get("reason") == expected_reason assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.parametrize( + ("unique_id", "scope", "expected_reason"), + [ + ( + UNIQUE_ID, + CURRENT_SCOPE, + "reconfigure_successful", + ), + ( + "wrong_uid", + CURRENT_SCOPE, + "account_mismatch", + ), + ], + ids=["reauth_only", "account_mismatch"], +) +async def test_flow_reconfigure_abort( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + setup_credentials: None, + mock_config_entry: MockConfigEntry, + access_token: str, + expires_at: float, + unique_id: str, + scope: str, + expected_reason: str, +) -> None: + """Test reauth step with correct params and mismatches.""" + + CURRENT_TOKEN = { + "auth_implementation": DOMAIN, + "token": { + "access_token": access_token, + "scope": scope, + "expires_in": 86399, + "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", + "token_type": "Bearer", + "expires_at": expires_at, + }, + } + assert hass.config_entries.async_update_entry( + mock_config_entry, data=CURRENT_TOKEN, unique_id=unique_id + ) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URL, + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + f"&redirect_uri={REDIRECT_URL}" + f"&state={state}" + f"&scope={CURRENT_SCOPE.replace(' ', '+')}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "updated-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": "60", + "scope": CURRENT_SCOPE, + }, + ) + + with patch( + f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == expected_reason + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 From 95f48963d4d63fdb1a5e7c10c87ff694b50a7525 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 23:11:11 +0100 Subject: [PATCH 0525/1198] Set strict typing for myuplink (#132972) Set strict typing --- homeassistant/components/myuplink/__init__.py | 6 ++++-- homeassistant/components/myuplink/quality_scale.yaml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index c3ff8b6988b..e833c5fcd8e 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -77,14 +77,16 @@ async def async_setup_entry( return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: MyUplinkConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @callback def create_devices( - hass: HomeAssistant, config_entry: ConfigEntry, coordinator: MyUplinkDataCoordinator + hass: HomeAssistant, + config_entry: MyUplinkConfigEntry, + coordinator: MyUplinkDataCoordinator, ) -> None: """Update all devices.""" device_registry = dr.async_get(hass) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index 463002b5519..ef64ce757f5 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -95,4 +95,4 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: todo + strict-typing: done From eea781f34a50d1ddab6b84ae3f5383104e65285c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 12 Dec 2024 05:46:31 +0100 Subject: [PATCH 0526/1198] Bump led-ble to 1.1.1 (#132977) changelog: https://github.com/Bluetooth-Devices/led-ble/compare/v1.0.2...v1.1.1 --- homeassistant/components/led_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 1d12e355a0d..4aaaebc0006 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 661ce5876a9..10b8c650127 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1278,7 +1278,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c959d83723c..194e29e35e8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1074,7 +1074,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 From b02ccd0813c5eb731ca9b3dceae19c8f69ca08c5 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Thu, 12 Dec 2024 07:47:57 +0100 Subject: [PATCH 0527/1198] Add missing body height icon in Withings integration (#132991) Update icons.json --- homeassistant/components/withings/icons.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/withings/icons.json b/homeassistant/components/withings/icons.json index 79ff7489bf8..8123337dc82 100644 --- a/homeassistant/components/withings/icons.json +++ b/homeassistant/components/withings/icons.json @@ -16,6 +16,9 @@ "heart_pulse": { "default": "mdi:heart-pulse" }, + "height": { + "default": "mdi:human-male-height-variant" + }, "hydration": { "default": "mdi:water" }, From 7e071d1fc6b1fad0ebfbc28e58e039ceff93407a Mon Sep 17 00:00:00 2001 From: Tom Date: Thu, 12 Dec 2024 07:49:08 +0100 Subject: [PATCH 0528/1198] Introduce parallel updates for Plugwise (#132940) * Plugwise indicate parallel updates * Update homeassistant/components/plugwise/number.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/plugwise/binary_sensor.py | 3 +++ homeassistant/components/plugwise/button.py | 2 ++ homeassistant/components/plugwise/climate.py | 2 ++ homeassistant/components/plugwise/number.py | 2 ++ homeassistant/components/plugwise/quality_scale.yaml | 4 +--- homeassistant/components/plugwise/select.py | 2 ++ homeassistant/components/plugwise/sensor.py | 3 +++ homeassistant/components/plugwise/switch.py | 2 ++ 8 files changed, 17 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index f422d4facf3..539fa243d6c 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -23,6 +23,9 @@ from .entity import PlugwiseEntity SEVERITIES = ["other", "info", "warning", "error"] +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/plugwise/button.py b/homeassistant/components/plugwise/button.py index 078d31bea12..8a05ede3496 100644 --- a/homeassistant/components/plugwise/button.py +++ b/homeassistant/components/plugwise/button.py @@ -13,6 +13,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index fb0124e144d..3cf536eb445 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -24,6 +24,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/plugwise/number.py b/homeassistant/components/plugwise/number.py index 833ea3ec761..1d0b1382c24 100644 --- a/homeassistant/components/plugwise/number.py +++ b/homeassistant/components/plugwise/number.py @@ -20,6 +20,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index a6b364cf381..ce0788c44f7 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -32,9 +32,7 @@ rules: reauthentication-flow: status: exempt comment: The hubs have a hardcoded `Smile ID` printed on the sticker used as password, it can not be changed - parallel-updates: - status: todo - comment: Using coordinator, but required due to mutable platform + parallel-updates: done test-coverage: done integration-owner: done docs-installation-parameters: diff --git a/homeassistant/components/plugwise/select.py b/homeassistant/components/plugwise/select.py index 46b27ca6225..ff268d8eded 100644 --- a/homeassistant/components/plugwise/select.py +++ b/homeassistant/components/plugwise/select.py @@ -15,6 +15,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseSelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/plugwise/sensor.py b/homeassistant/components/plugwise/sensor.py index 41ca439451a..14b42682376 100644 --- a/homeassistant/components/plugwise/sensor.py +++ b/homeassistant/components/plugwise/sensor.py @@ -31,6 +31,9 @@ from . import PlugwiseConfigEntry from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index 305518f4bef..ea6d6f18b7f 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -21,6 +21,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSwitchEntityDescription(SwitchEntityDescription): From e39897ff9a024b4f163e27c6a357e427ea2c7047 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 01:55:29 -0500 Subject: [PATCH 0529/1198] Enforce strict typing for Russound RIO (#132982) --- .strict-typing | 1 + .../components/russound_rio/media_player.py | 14 +++++++------- .../components/russound_rio/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++++ 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/.strict-typing b/.strict-typing index a45be32c3c6..130ae6e9393 100644 --- a/.strict-typing +++ b/.strict-typing @@ -402,6 +402,7 @@ homeassistant.components.romy.* homeassistant.components.rpi_power.* homeassistant.components.rss_feed_template.* homeassistant.components.rtsp_to_webrtc.* +homeassistant.components.russound_rio.* homeassistant.components.ruuvi_gateway.* homeassistant.components.ruuvitag_ble.* homeassistant.components.samsungtv.* diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 12b41485167..d0d8e02a282 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -148,37 +148,37 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): return MediaPlayerState.ON @property - def source(self): + def source(self) -> str: """Get the currently selected source.""" return self._source.name @property - def source_list(self): + def source_list(self) -> list[str]: """Return a list of available input sources.""" return [x.name for x in self._sources.values()] @property - def media_title(self): + def media_title(self) -> str | None: """Title of current playing media.""" return self._source.song_name @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media, music track only.""" return self._source.artist_name @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album name of current playing media, music track only.""" return self._source.album_name @property - def media_image_url(self): + def media_image_url(self) -> str | None: """Image url of current playing media.""" return self._source.cover_art_url @property - def volume_level(self): + def volume_level(self) -> float: """Volume level of the media player (0..1). Value is returned based on a range (0..50). diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 4c7214cfd8b..aaa354b2b31 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -83,4 +83,4 @@ rules: status: exempt comment: | This integration uses telnet exclusively and does not make http calls. - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index 4e5d4212ee9..a0c441c44f9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3775,6 +3775,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.russound_rio.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ruuvi_gateway.*] check_untyped_defs = true disallow_incomplete_defs = true From 2d0c4e4a591737a18696fe74740027aa6dcce161 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 01:56:29 -0500 Subject: [PATCH 0530/1198] Improve config flow test coverage for Russound RIO (#132981) --- .../russound_rio/quality_scale.yaml | 5 +--- tests/components/russound_rio/__init__.py | 12 ++++++++ .../russound_rio/test_config_flow.py | 29 +++++++++++++++++++ tests/components/russound_rio/test_init.py | 26 +++++++++++++++-- .../russound_rio/test_media_player.py | 10 ++----- 5 files changed, 68 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index aaa354b2b31..2d396892aa8 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -10,10 +10,7 @@ rules: This integration uses a push API. No polling required. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - Missing unique_id check in test_form() and test_import(). Test for adding same device twice missing. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index d0e6d77f1ee..d8764285dd3 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1,5 +1,9 @@ """Tests for the Russound RIO integration.""" +from unittest.mock import AsyncMock + +from aiorussound.models import CallbackType + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -11,3 +15,11 @@ async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + +async def mock_state_update( + client: AsyncMock, callback_type: CallbackType = CallbackType.STATE +) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, callback_type) diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index cf754852731..28cbf7eda5e 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -9,6 +9,8 @@ from homeassistant.data_entry_flow import FlowResultType from .const import MOCK_CONFIG, MODEL +from tests.common import MockConfigEntry + async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock @@ -29,6 +31,7 @@ async def test_form( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_form_cannot_connect( @@ -60,6 +63,31 @@ async def test_form_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 +async def test_duplicate( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_import( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: @@ -74,6 +102,7 @@ async def test_import( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_import_cannot_connect( diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py index 6787ee37c79..e7022fa6ac1 100644 --- a/tests/components/russound_rio/test_init.py +++ b/tests/components/russound_rio/test_init.py @@ -1,7 +1,9 @@ """Tests for the Russound RIO integration.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, Mock +from aiorussound.models import CallbackType +import pytest from syrupy import SnapshotAssertion from homeassistant.components.russound_rio.const import DOMAIN @@ -9,7 +11,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from . import setup_integration +from . import mock_state_update, setup_integration from tests.common import MockConfigEntry @@ -42,3 +44,23 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_disconnect_reconnect_log( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + + mock_russound_client.is_connected = Mock(return_value=False) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Disconnected from device at 127.0.0.1" in caplog.text + + mock_russound_client.is_connected = Mock(return_value=True) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Reconnected to device at 127.0.0.1" in caplog.text diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py index e720e2c7f65..c740ec4f39e 100644 --- a/tests/components/russound_rio/test_media_player.py +++ b/tests/components/russound_rio/test_media_player.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiorussound.models import CallbackType, PlayStatus +from aiorussound.models import PlayStatus import pytest from homeassistant.const import ( @@ -15,18 +15,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from . import setup_integration +from . import mock_state_update, setup_integration from .const import ENTITY_ID_ZONE_1 from tests.common import MockConfigEntry -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - @pytest.mark.parametrize( ("zone_status", "source_play_status", "media_player_state"), [ From 0d4780e91b0bb92c255983e19b144f3352aa4b1c Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Thu, 12 Dec 2024 01:00:24 -0600 Subject: [PATCH 0531/1198] Set parallel updates for roku (#132892) * Set parallel updates for roku * Update sensor.py * Update media_player.py * Update remote.py * Update select.py * Update media_player.py * Update remote.py * Update select.py * Update remote.py * Update media_player.py --- homeassistant/components/roku/binary_sensor.py | 3 +++ homeassistant/components/roku/media_player.py | 3 ++- homeassistant/components/roku/remote.py | 2 ++ homeassistant/components/roku/select.py | 2 ++ homeassistant/components/roku/sensor.py | 3 +++ 5 files changed, 12 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/roku/binary_sensor.py b/homeassistant/components/roku/binary_sensor.py index cd51c30c250..2e7fd12788c 100644 --- a/homeassistant/components/roku/binary_sensor.py +++ b/homeassistant/components/roku/binary_sensor.py @@ -18,6 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index d43d62c9438..0c1f92521af 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -46,7 +46,6 @@ from .helpers import format_channel_name, roku_exception_handler _LOGGER = logging.getLogger(__name__) - STREAM_FORMAT_TO_MEDIA_TYPE = { "dash": MediaType.VIDEO, "hls": MediaType.VIDEO, @@ -80,6 +79,8 @@ ATTRS_TO_PLAY_ON_ROKU_AUDIO_PARAMS = { SEARCH_SCHEMA: VolDictType = {vol.Required(ATTR_KEYWORD): str} +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback diff --git a/homeassistant/components/roku/remote.py b/homeassistant/components/roku/remote.py index 9a31f9fd7a0..f7916fb23a2 100644 --- a/homeassistant/components/roku/remote.py +++ b/homeassistant/components/roku/remote.py @@ -13,6 +13,8 @@ from . import RokuConfigEntry from .entity import RokuEntity from .helpers import roku_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/roku/select.py b/homeassistant/components/roku/select.py index 6977f8c0d24..360d4e25415 100644 --- a/homeassistant/components/roku/select.py +++ b/homeassistant/components/roku/select.py @@ -16,6 +16,8 @@ from . import RokuConfigEntry from .entity import RokuEntity from .helpers import format_channel_name, roku_exception_handler +PARALLEL_UPDATES = 1 + def _get_application_name(device: RokuDevice) -> str | None: if device.app is None or device.app.name is None: diff --git a/homeassistant/components/roku/sensor.py b/homeassistant/components/roku/sensor.py index 56a84ead402..870386945a6 100644 --- a/homeassistant/components/roku/sensor.py +++ b/homeassistant/components/roku/sensor.py @@ -15,6 +15,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuSensorEntityDescription(SensorEntityDescription): From 053f03ac58bc61b077910f13d486bef4a535be86 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 12 Dec 2024 02:03:05 -0600 Subject: [PATCH 0532/1198] Change warning to debug for VAD timeout (#132987) --- homeassistant/components/assist_pipeline/vad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index deae5b9b7b3..c7fe1bc10c7 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -140,7 +140,7 @@ class VoiceCommandSegmenter: self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: - _LOGGER.warning( + _LOGGER.debug( "VAD end of speech detection timed out after %s seconds", self.timeout_seconds, ) From 85d4c48d6f2120e4b99ae694407bdd77ee45d68c Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 09:53:26 +0100 Subject: [PATCH 0533/1198] Set parallel updates in Elgato (#132998) --- homeassistant/components/elgato/button.py | 2 ++ homeassistant/components/elgato/quality_scale.yaml | 5 +---- homeassistant/components/elgato/switch.py | 2 ++ 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index aefff0b750b..6f9436b8e29 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -22,6 +22,8 @@ from . import ElgatorConfigEntry from .coordinator import ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml index 513940e2438..531f0447f70 100644 --- a/homeassistant/components/elgato/quality_scale.yaml +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -30,10 +30,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: - status: todo - comment: | - Does not set parallel-updates on button/switch action calls. + parallel-updates: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index fe177616034..643f148ec7d 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -18,6 +18,8 @@ from . import ElgatorConfigEntry from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoSwitchEntityDescription(SwitchEntityDescription): From bb610acb8614de586000d659ccc7bb3012858b04 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:53:55 +0100 Subject: [PATCH 0534/1198] Migrate elgato light tests to use Kelvin (#133004) --- tests/components/elgato/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/elgato/test_light.py b/tests/components/elgato/test_light.py index 40c0232c2b3..43fad1faa77 100644 --- a/tests/components/elgato/test_light.py +++ b/tests/components/elgato/test_light.py @@ -9,7 +9,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.elgato.const import DOMAIN, SERVICE_IDENTIFY from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, DOMAIN as LIGHT_DOMAIN, ) @@ -74,7 +74,7 @@ async def test_light_change_state_temperature( { ATTR_ENTITY_ID: "light.frenck", ATTR_BRIGHTNESS: 255, - ATTR_COLOR_TEMP: 100, + ATTR_COLOR_TEMP_KELVIN: 10000, }, blocking=True, ) From 0377dc5b5a7c46b18aa817fa6c4ad336f86d6953 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 10:18:11 +0100 Subject: [PATCH 0535/1198] Move coordinator for TwenteMilieu into own module (#133000) --- .../components/twentemilieu/__init__.py | 32 +----------- .../components/twentemilieu/calendar.py | 2 +- .../components/twentemilieu/coordinator.py | 49 +++++++++++++++++++ .../components/twentemilieu/entity.py | 2 +- .../twentemilieu/quality_scale.yaml | 5 +- .../components/twentemilieu/sensor.py | 2 +- tests/components/twentemilieu/conftest.py | 3 +- tests/components/twentemilieu/test_init.py | 2 +- 8 files changed, 58 insertions(+), 39 deletions(-) create mode 100644 homeassistant/components/twentemilieu/coordinator.py diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index 2796e9916f1..1359e707601 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -2,53 +2,25 @@ from __future__ import annotations -from datetime import date, timedelta - -from twentemilieu import TwenteMilieu, WasteType import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_HOUSE_LETTER, CONF_HOUSE_NUMBER, CONF_POST_CODE, DOMAIN, LOGGER - -SCAN_INTERVAL = timedelta(seconds=3600) +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator SERVICE_UPDATE = "update" SERVICE_SCHEMA = vol.Schema({vol.Optional(CONF_ID): cv.string}) PLATFORMS = [Platform.CALENDAR, Platform.SENSOR] -type TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator[ - dict[WasteType, list[date]] -] -type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] - async def async_setup_entry( hass: HomeAssistant, entry: TwenteMilieuConfigEntry ) -> bool: """Set up Twente Milieu from a config entry.""" - session = async_get_clientsession(hass) - twentemilieu = TwenteMilieu( - post_code=entry.data[CONF_POST_CODE], - house_number=entry.data[CONF_HOUSE_NUMBER], - house_letter=entry.data[CONF_HOUSE_LETTER], - session=session, - ) - - coordinator: TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator( - hass, - LOGGER, - config_entry=entry, - name=DOMAIN, - update_interval=SCAN_INTERVAL, - update_method=twentemilieu.update, - ) + coordinator = TwenteMilieuDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/twentemilieu/calendar.py b/homeassistant/components/twentemilieu/calendar.py index 8e7452823b7..d163ae4e564 100644 --- a/homeassistant/components/twentemilieu/calendar.py +++ b/homeassistant/components/twentemilieu/calendar.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import TwenteMilieuConfigEntry from .const import WASTE_TYPE_TO_DESCRIPTION +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity diff --git a/homeassistant/components/twentemilieu/coordinator.py b/homeassistant/components/twentemilieu/coordinator.py new file mode 100644 index 00000000000..d2cf5a887ef --- /dev/null +++ b/homeassistant/components/twentemilieu/coordinator.py @@ -0,0 +1,49 @@ +"""Data update coordinator for Twente Milieu.""" + +from __future__ import annotations + +from datetime import date + +from twentemilieu import TwenteMilieu, WasteType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import ( + CONF_HOUSE_LETTER, + CONF_HOUSE_NUMBER, + CONF_POST_CODE, + DOMAIN, + LOGGER, + SCAN_INTERVAL, +) + +type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] + + +class TwenteMilieuDataUpdateCoordinator( + DataUpdateCoordinator[dict[WasteType, list[date]]] +): + """Class to manage fetching Twente Milieu data.""" + + def __init__(self, hass: HomeAssistant, entry: TwenteMilieuConfigEntry) -> None: + """Initialize Twente Milieu data update coordinator.""" + self.twentemilieu = TwenteMilieu( + post_code=entry.data[CONF_POST_CODE], + house_number=entry.data[CONF_HOUSE_NUMBER], + house_letter=entry.data[CONF_HOUSE_LETTER], + session=async_get_clientsession(hass), + ) + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + config_entry=entry, + ) + + async def _async_update_data(self) -> dict[WasteType, list[date]]: + """Fetch Twente Milieu data.""" + return await self.twentemilieu.update() diff --git a/homeassistant/components/twentemilieu/entity.py b/homeassistant/components/twentemilieu/entity.py index 0a2473f4524..660dd16288c 100644 --- a/homeassistant/components/twentemilieu/entity.py +++ b/homeassistant/components/twentemilieu/entity.py @@ -7,8 +7,8 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator class TwenteMilieuEntity(CoordinatorEntity[TwenteMilieuDataUpdateCoordinator], Entity): diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index f8fd813b03d..210416e56c5 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -6,10 +6,7 @@ rules: This integration does not provide additional actions. appropriate-polling: done brands: done - common-modules: - status: todo - comment: | - The coordinator isn't in the common module yet. + common-modules: done config-flow-test-coverage: done config-flow: status: todo diff --git a/homeassistant/components/twentemilieu/sensor.py b/homeassistant/components/twentemilieu/sensor.py index f5f91ce7080..4605ede1f87 100644 --- a/homeassistant/components/twentemilieu/sensor.py +++ b/homeassistant/components/twentemilieu/sensor.py @@ -16,8 +16,8 @@ from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TwenteMilieuConfigEntry from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7ecf1657ce9..e3e3c97034c 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -51,7 +51,8 @@ def mock_twentemilieu() -> Generator[MagicMock]: """Return a mocked Twente Milieu client.""" with ( patch( - "homeassistant.components.twentemilieu.TwenteMilieu", autospec=True + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu", + autospec=True, ) as twentemilieu_mock, patch( "homeassistant.components.twentemilieu.config_flow.TwenteMilieu", diff --git a/tests/components/twentemilieu/test_init.py b/tests/components/twentemilieu/test_init.py index 7e08b5f4938..5cc09e6875d 100644 --- a/tests/components/twentemilieu/test_init.py +++ b/tests/components/twentemilieu/test_init.py @@ -29,7 +29,7 @@ async def test_load_unload_config_entry( @patch( - "homeassistant.components.twentemilieu.TwenteMilieu.update", + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu.update", side_effect=RuntimeError, ) async def test_config_entry_not_ready( From 4a7039f51d1521377410a9af45800d839f171072 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 10:25:21 +0100 Subject: [PATCH 0536/1198] Bump velbusaio to 2024.12.0 (#132989) --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 84262ebd61c..5725a10b6f6 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.11.1"], + "requirements": ["velbus-aio==2024.12.0"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 10b8c650127..26acf53fa53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2939,7 +2939,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.11.1 +velbus-aio==2024.12.0 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 194e29e35e8..afe7252f9f8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2349,7 +2349,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.11.1 +velbus-aio==2024.12.0 # homeassistant.components.venstar venstarcolortouch==0.19 From d49b1b2d6b23a5e1730076b1bb8787cc8734ea3a Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 10:28:41 +0100 Subject: [PATCH 0537/1198] Use ConfigEntry runtime_data in EnergyZero (#132979) --- .../components/energyzero/__init__.py | 15 ++++----- .../components/energyzero/diagnostics.py | 32 ++++++++----------- homeassistant/components/energyzero/sensor.py | 8 +++-- .../components/energyzero/services.py | 2 +- tests/components/energyzero/test_init.py | 2 -- 5 files changed, 27 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/energyzero/__init__.py b/homeassistant/components/energyzero/__init__.py index 3e1bb830cce..f7591056383 100644 --- a/homeassistant/components/energyzero/__init__.py +++ b/homeassistant/components/energyzero/__init__.py @@ -13,9 +13,11 @@ from .const import DOMAIN from .coordinator import EnergyZeroDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up EnergyZero services.""" @@ -25,7 +27,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Set up EnergyZero from a config entry.""" coordinator = EnergyZeroDataUpdateCoordinator(hass) @@ -35,15 +37,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.energyzero.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Unload EnergyZero config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index 35d20fee929..ee1286598e6 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EnergyZeroDataUpdateCoordinator -from .const import DOMAIN +from . import EnergyZeroConfigEntry from .coordinator import EnergyZeroData @@ -32,30 +30,28 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EnergyZeroConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - return { "entry": { "title": entry.title, }, "energy": { - "current_hour_price": coordinator.data.energy_today.current_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": entry.runtime_data.data.energy_today.current_price, + "next_hour_price": entry.runtime_data.data.energy_today.price_at_time( + entry.runtime_data.data.energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_price, - "max_price": coordinator.data.energy_today.extreme_prices[1], - "min_price": coordinator.data.energy_today.extreme_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_price, - "hours_priced_equal_or_lower": coordinator.data.energy_today.hours_priced_equal_or_lower, + "average_price": entry.runtime_data.data.energy_today.average_price, + "max_price": entry.runtime_data.data.energy_today.extreme_prices[1], + "min_price": entry.runtime_data.data.energy_today.extreme_prices[0], + "highest_price_time": entry.runtime_data.data.energy_today.highest_price_time, + "lowest_price_time": entry.runtime_data.data.energy_today.lowest_price_time, + "percentage_of_max": entry.runtime_data.data.energy_today.pct_of_max_price, + "hours_priced_equal_or_lower": entry.runtime_data.data.energy_today.hours_priced_equal_or_lower, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(entry.runtime_data.data, 0), + "next_hour_price": get_gas_price(entry.runtime_data.data, 1), }, } diff --git a/homeassistant/components/energyzero/sensor.py b/homeassistant/components/energyzero/sensor.py index f65f7bd559c..d52da599966 100644 --- a/homeassistant/components/energyzero/sensor.py +++ b/homeassistant/components/energyzero/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -26,6 +25,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import EnergyZeroConfigEntry from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES from .coordinator import EnergyZeroData, EnergyZeroDataUpdateCoordinator @@ -142,10 +142,12 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EnergyZeroConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up EnergyZero Sensors based on a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EnergyZeroSensorEntity( coordinator=coordinator, diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index d98699c5c08..b281274575e 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -107,7 +107,7 @@ def __get_coordinator( }, ) - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry_id] + coordinator: EnergyZeroDataUpdateCoordinator = entry.runtime_data return coordinator diff --git a/tests/components/energyzero/test_init.py b/tests/components/energyzero/test_init.py index 287157026f4..f8e7e75e902 100644 --- a/tests/components/energyzero/test_init.py +++ b/tests/components/energyzero/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, patch from energyzero import EnergyZeroConnectionError import pytest -from homeassistant.components.energyzero.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -26,7 +25,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From a30c942fa7246d7781a74ef6ad1239274bf215af Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 10:42:27 +0100 Subject: [PATCH 0538/1198] Don't use kitchen_sink integration in config entries tests (#133012) --- .../components/config/test_config_entries.py | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index b96aa9ae006..4a3bff47d89 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -255,9 +255,7 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: """Test removing an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) resp = await client.delete(f"/api/config/config_entries/entry/{entry.entry_id}") assert resp.status == HTTPStatus.OK @@ -268,11 +266,9 @@ async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: async def test_reload_entry(hass: HomeAssistant, client: TestClient) -> None: """Test reloading an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") resp = await client.post( f"/api/config/config_entries/entry/{entry.entry_id}/reload" ) @@ -1157,11 +1153,9 @@ async def test_update_prefrences( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is False @@ -1257,12 +1251,10 @@ async def test_disable_entry( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) assert entry.disabled_by is None - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") # Disable await ws_client.send_json( From 7dc31dec3b05a28af46b36f830bacec426bdaebf Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 10:52:03 +0100 Subject: [PATCH 0539/1198] Fix config entry import in Twente Milieu diagnostic (#133017) --- homeassistant/components/twentemilieu/diagnostics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/twentemilieu/diagnostics.py b/homeassistant/components/twentemilieu/diagnostics.py index 75775303eb6..cb3b411c530 100644 --- a/homeassistant/components/twentemilieu/diagnostics.py +++ b/homeassistant/components/twentemilieu/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import TwenteMilieuConfigEntry +from .coordinator import TwenteMilieuConfigEntry async def async_get_config_entry_diagnostics( From 0e45ccb9566fd92529f5b27b38dc2ab869c57085 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 11:13:24 +0100 Subject: [PATCH 0540/1198] Migrate google_assistant color_temp handlers to use Kelvin (#132997) --- .../components/google_assistant/trait.py | 29 +++++++------------ .../google_assistant/test_google_assistant.py | 2 +- .../google_assistant/test_smart_home.py | 2 +- .../components/google_assistant/test_trait.py | 16 +++++----- 4 files changed, 21 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 8025a291031..44251a3be04 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -553,15 +553,9 @@ class ColorSettingTrait(_Trait): response["colorModel"] = "hsv" if light.color_temp_supported(color_modes): - # Max Kelvin is Min Mireds K = 1000000 / mireds - # Min Kelvin is Max Mireds K = 1000000 / mireds response["colorTemperatureRange"] = { - "temperatureMaxK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MIN_MIREDS) - ), - "temperatureMinK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MAX_MIREDS) - ), + "temperatureMaxK": int(attrs.get(light.ATTR_MAX_COLOR_TEMP_KELVIN)), + "temperatureMinK": int(attrs.get(light.ATTR_MIN_COLOR_TEMP_KELVIN)), } return response @@ -583,7 +577,7 @@ class ColorSettingTrait(_Trait): } if light.color_temp_supported([color_mode]): - temp = self.state.attributes.get(light.ATTR_COLOR_TEMP) + temp = self.state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) # Some faulty integrations might put 0 in here, raising exception. if temp == 0: _LOGGER.warning( @@ -592,9 +586,7 @@ class ColorSettingTrait(_Trait): temp, ) elif temp is not None: - color["temperatureK"] = color_util.color_temperature_mired_to_kelvin( - temp - ) + color["temperatureK"] = temp response = {} @@ -606,11 +598,9 @@ class ColorSettingTrait(_Trait): async def execute(self, command, data, params, challenge): """Execute a color temperature command.""" if "temperature" in params["color"]: - temp = color_util.color_temperature_kelvin_to_mired( - params["color"]["temperature"] - ) - min_temp = self.state.attributes[light.ATTR_MIN_MIREDS] - max_temp = self.state.attributes[light.ATTR_MAX_MIREDS] + temp = params["color"]["temperature"] + max_temp = self.state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] + min_temp = self.state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] if temp < min_temp or temp > max_temp: raise SmartHomeError( @@ -621,7 +611,10 @@ class ColorSettingTrait(_Trait): await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_COLOR_TEMP: temp}, + { + ATTR_ENTITY_ID: self.state.entity_id, + light.ATTR_COLOR_TEMP_KELVIN: temp, + }, blocking=not self.config.should_report_state, context=data.context, ) diff --git a/tests/components/google_assistant/test_google_assistant.py b/tests/components/google_assistant/test_google_assistant.py index ea30f89e0ef..2b0bfd82908 100644 --- a/tests/components/google_assistant/test_google_assistant.py +++ b/tests/components/google_assistant/test_google_assistant.py @@ -491,7 +491,7 @@ async def test_execute_request(hass_fixture, assistant_client, auth_header) -> N assert kitchen.attributes.get(light.ATTR_RGB_COLOR) == (255, 0, 0) bed = hass_fixture.states.get("light.bed_light") - assert bed.attributes.get(light.ATTR_COLOR_TEMP) == 212 + assert bed.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 4700 assert hass_fixture.states.get("switch.decorative_lights").state == "off" diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index c5e17155067..a1c2ba1b3d4 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -1450,7 +1450,7 @@ async def test_sync_message_recovery( "light.bad_light", "on", { - "min_mireds": "badvalue", + "max_color_temp_kelvin": "badvalue", "supported_color_modes": ["color_temp"], }, ) diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 9e9c7015674..d269b5ff0d7 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -77,7 +77,7 @@ from homeassistant.const import ( ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.core_config import async_process_ha_core_config -from homeassistant.util import color, dt as dt_util +from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter from . import BASIC_CONFIG, MockConfig @@ -870,10 +870,10 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, light.ATTR_COLOR_MODE: "color_temp", - light.ATTR_COLOR_TEMP: 300, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_COLOR_TEMP_KELVIN: 3333, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, "supported_color_modes": ["color_temp"], }, ), @@ -906,7 +906,7 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", - light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), + light.ATTR_COLOR_TEMP_KELVIN: 2857, } @@ -924,9 +924,9 @@ async def test_color_light_temperature_light_bad_temp(hass: HomeAssistant) -> No "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, - light.ATTR_COLOR_TEMP: 0, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, + light.ATTR_COLOR_TEMP_KELVIN: 0, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, }, ), BASIC_CONFIG, From a9d71e0a5fb15b2f6750dbf9ec32e1e118eced8b Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 11:34:36 +0100 Subject: [PATCH 0541/1198] Add reconfigure flow for Powerfox integration (#132260) --- .../components/powerfox/config_flow.py | 33 ++++++ .../components/powerfox/quality_scale.yaml | 2 +- .../components/powerfox/strings.json | 15 ++- tests/components/powerfox/test_config_flow.py | 105 ++++++++++++++++++ 4 files changed, 153 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/powerfox/config_flow.py b/homeassistant/components/powerfox/config_flow.py index ca78b8eb874..dd17badf881 100644 --- a/homeassistant/components/powerfox/config_flow.py +++ b/homeassistant/components/powerfox/config_flow.py @@ -100,3 +100,36 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=STEP_REAUTH_SCHEMA, errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure Powerfox configuration.""" + errors = {} + + reconfigure_entry = self._get_reconfigure_entry() + if user_input is not None: + client = Powerfox( + username=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + if reconfigure_entry.data[CONF_EMAIL] != user_input[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml index 7e104b894ca..f72d25c3684 100644 --- a/homeassistant/components/powerfox/quality_scale.yaml +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -80,7 +80,7 @@ rules: status: exempt comment: | There is no need for icon translations. - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/powerfox/strings.json b/homeassistant/components/powerfox/strings.json index 3eab77494d3..4a7c8e8fa4d 100644 --- a/homeassistant/components/powerfox/strings.json +++ b/homeassistant/components/powerfox/strings.json @@ -21,6 +21,18 @@ "data_description": { "password": "[%key:component::powerfox::config::step::user::data_description::password%]" } + }, + "reconfigure": { + "title": "Reconfigure your Powerfox account", + "description": "Powerfox is already configured. Would you like to reconfigure it?", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::powerfox::config::step::user::data_description::email%]", + "password": "[%key:component::powerfox::config::step::user::data_description::password%]" + } } }, "error": { @@ -29,7 +41,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { diff --git a/tests/components/powerfox/test_config_flow.py b/tests/components/powerfox/test_config_flow.py index 759092aee6e..a38f316faf3 100644 --- a/tests/components/powerfox/test_config_flow.py +++ b/tests/components/powerfox/test_config_flow.py @@ -110,6 +110,32 @@ async def test_duplicate_entry( assert result.get("reason") == "already_configured" +async def test_duplicate_entry_reconfiguration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_powerfox_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry on reconfiguration.""" + # Add two config entries + mock_config_entry.add_to_hass(hass) + mock_config_entry_2 = MockConfigEntry( + domain=DOMAIN, + data={CONF_EMAIL: "new@powerfox.test", CONF_PASSWORD: "new-password"}, + ) + mock_config_entry_2.add_to_hass(hass) + assert len(hass.config_entries.async_entries()) == 2 + + # Reconfigure the second entry + result = await mock_config_entry_2.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + @pytest.mark.parametrize( ("exception", "error"), [ @@ -216,3 +242,82 @@ async def test_step_reauth_exceptions( assert len(hass.config_entries.async_entries()) == 1 assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +async def test_reconfigure( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration of existing entry.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_reconfigure_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test exceptions during reconfiguration flow.""" + mock_powerfox_client.all_devices.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" From 000667248987600bd552e14e85c48c610e3d1d1d Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 11:39:55 +0100 Subject: [PATCH 0542/1198] Improve diagnostics code of EnergyZero integration (#133019) --- .../components/energyzero/diagnostics.py | 27 ++++++++++--------- .../components/energyzero/services.py | 12 +++++---- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index ee1286598e6..e6116eac259 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -33,25 +33,28 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: EnergyZeroConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today + return { "entry": { "title": entry.title, }, "energy": { - "current_hour_price": entry.runtime_data.data.energy_today.current_price, - "next_hour_price": entry.runtime_data.data.energy_today.price_at_time( - entry.runtime_data.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": entry.runtime_data.data.energy_today.average_price, - "max_price": entry.runtime_data.data.energy_today.extreme_prices[1], - "min_price": entry.runtime_data.data.energy_today.extreme_prices[0], - "highest_price_time": entry.runtime_data.data.energy_today.highest_price_time, - "lowest_price_time": entry.runtime_data.data.energy_today.lowest_price_time, - "percentage_of_max": entry.runtime_data.data.energy_today.pct_of_max_price, - "hours_priced_equal_or_lower": entry.runtime_data.data.energy_today.hours_priced_equal_or_lower, + "average_price": energy_today.average_price, + "max_price": energy_today.extreme_prices[1], + "min_price": energy_today.extreme_prices[0], + "highest_price_time": energy_today.highest_price_time, + "lowest_price_time": energy_today.lowest_price_time, + "percentage_of_max": energy_today.pct_of_max_price, + "hours_priced_equal_or_lower": energy_today.hours_priced_equal_or_lower, }, "gas": { - "current_hour_price": get_gas_price(entry.runtime_data.data, 0), - "next_hour_price": get_gas_price(entry.runtime_data.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index b281274575e..ba2bbf0573f 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -5,12 +5,12 @@ from __future__ import annotations from datetime import date, datetime from enum import Enum from functools import partial -from typing import Final +from typing import TYPE_CHECKING, Final from energyzero import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -22,6 +22,9 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import selector from homeassistant.util import dt as dt_util +if TYPE_CHECKING: + from . import EnergyZeroConfigEntry + from .const import DOMAIN from .coordinator import EnergyZeroDataUpdateCoordinator @@ -88,7 +91,7 @@ def __get_coordinator( ) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EnergyZeroConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ServiceValidationError( @@ -107,8 +110,7 @@ def __get_coordinator( }, ) - coordinator: EnergyZeroDataUpdateCoordinator = entry.runtime_data - return coordinator + return entry.runtime_data async def __get_prices( From ded7cee6e57b73e9cda05ba97db322686b363628 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Thu, 12 Dec 2024 05:42:00 -0500 Subject: [PATCH 0543/1198] fix AndroidTV logging when disconnected (#132919) --- .../components/androidtv/__init__.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 44e4c54b560..4ffa0e24777 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -135,15 +135,16 @@ async def async_connect_androidtv( ) aftv = await async_androidtv_setup( - config[CONF_HOST], - config[CONF_PORT], - adbkey, - config.get(CONF_ADB_SERVER_IP), - config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), - state_detection_rules, - config[CONF_DEVICE_CLASS], - timeout, - signer, + host=config[CONF_HOST], + port=config[CONF_PORT], + adbkey=adbkey, + adb_server_ip=config.get(CONF_ADB_SERVER_IP), + adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), + state_detection_rules=state_detection_rules, + device_class=config[CONF_DEVICE_CLASS], + auth_timeout_s=timeout, + signer=signer, + log_errors=False, ) if not aftv.available: From 52491bb75eafa9fc3edf068e1907851fb6fff87e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 12:52:01 +0100 Subject: [PATCH 0544/1198] Migrate tplink light tests to use Kelvin (#133026) --- tests/components/tplink/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 6998d8fbcc7..b7f4ed6b8f4 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -26,8 +26,8 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -153,8 +153,8 @@ async def test_color_light( assert attributes[ATTR_COLOR_MODE] == "brightness" else: assert attributes[ATTR_COLOR_MODE] == "hs" - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 assert attributes[ATTR_HS_COLOR] == (10, 30) assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) @@ -307,8 +307,8 @@ async def test_color_temp_light( assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 assert attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 await hass.services.async_call( From f2aaf2ac4abe6722763cd57d905f158b5464b13e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 12:55:25 +0100 Subject: [PATCH 0545/1198] Small test cleanups in Twente Milieu (#133028) --- .../snapshots/test_config_flow.ambr | 93 ------------------- .../twentemilieu/test_config_flow.py | 85 ++++++++++++----- 2 files changed, 63 insertions(+), 115 deletions(-) delete mode 100644 tests/components/twentemilieu/snapshots/test_config_flow.ambr diff --git a/tests/components/twentemilieu/snapshots/test_config_flow.ambr b/tests/components/twentemilieu/snapshots/test_config_flow.ambr deleted file mode 100644 index a98119e81c9..00000000000 --- a/tests/components/twentemilieu/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,93 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- -# name: test_invalid_address - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/twentemilieu/test_config_flow.py b/tests/components/twentemilieu/test_config_flow.py index dbc01c69acb..6dc261b8769 100644 --- a/tests/components/twentemilieu/test_config_flow.py +++ b/tests/components/twentemilieu/test_config_flow.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock import pytest -from syrupy.assertion import SnapshotAssertion from twentemilieu import TwenteMilieuAddressError, TwenteMilieuConnectionError from homeassistant import config_entries @@ -15,6 +14,7 @@ from homeassistant.components.twentemilieu.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -24,16 +24,16 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_twentemilieu") -async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +async def test_full_user_flow(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -42,14 +42,22 @@ async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_invalid_address( hass: HomeAssistant, mock_twentemilieu: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test full user flow when the user enters an incorrect address. @@ -60,11 +68,11 @@ async def test_invalid_address( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" mock_twentemilieu.unique_id.side_effect = TwenteMilieuAddressError - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234", @@ -72,12 +80,12 @@ async def test_invalid_address( }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "user" - assert result2.get("errors") == {"base": "invalid_address"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_address"} mock_twentemilieu.unique_id.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -85,8 +93,17 @@ async def test_invalid_address( }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: None, + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_connection_error( @@ -106,9 +123,33 @@ async def test_connection_error( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # Recover from error + mock_twentemilieu.unique_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_POST_CODE: "1234AB", + CONF_HOUSE_NUMBER: "1", + CONF_HOUSE_LETTER: "A", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_twentemilieu") @@ -128,5 +169,5 @@ async def test_address_already_set_up( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 85d4572a17a5d6100e37455befa7dfe6afb619c8 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 13:41:56 +0100 Subject: [PATCH 0546/1198] Adjust backup agent platform (#132944) * Adjust backup agent platform * Adjust according to discussion * Clean up the local agent dict too * Add test * Update kitchen_sink * Apply suggestions from code review Co-authored-by: Martin Hjelmare * Adjust tests * Clean up * Fix kitchen sink reload --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/backup/agent.py | 23 +++- homeassistant/components/backup/backup.py | 3 +- homeassistant/components/backup/manager.py | 41 +++++-- homeassistant/components/cloud/backup.py | 7 +- homeassistant/components/hassio/backup.py | 2 + .../components/kitchen_sink/__init__.py | 21 +++- .../components/kitchen_sink/backup.py | 27 ++++- .../components/kitchen_sink/const.py | 12 ++ tests/components/backup/common.py | 2 + tests/components/backup/test_manager.py | 103 +++++++++++++++--- tests/components/cloud/test_backup.py | 5 +- tests/components/kitchen_sink/test_backup.py | 21 ++++ 12 files changed, 235 insertions(+), 32 deletions(-) create mode 100644 homeassistant/components/kitchen_sink/const.py diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py index 36f2e7ee34e..44bc9b298e8 100644 --- a/homeassistant/components/backup/agent.py +++ b/homeassistant/components/backup/agent.py @@ -7,7 +7,9 @@ from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path from typing import Any, Protocol -from homeassistant.core import HomeAssistant +from propcache import cached_property + +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from .models import AgentBackup @@ -26,8 +28,14 @@ class BackupAgentUnreachableError(BackupAgentError): class BackupAgent(abc.ABC): """Backup agent interface.""" + domain: str name: str + @cached_property + def agent_id(self) -> str: + """Return the agent_id.""" + return f"{self.domain}.{self.name}" + @abc.abstractmethod async def async_download_backup( self, @@ -98,3 +106,16 @@ class BackupAgentPlatformProtocol(Protocol): **kwargs: Any, ) -> list[BackupAgent]: """Return a list of backup agents.""" + + @callback + def async_register_backup_agents_listener( + self, + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, + ) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed. + + :return: A function to unregister the listener. + """ diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py index b9aad89c7f3..ef4924161c2 100644 --- a/homeassistant/components/backup/backup.py +++ b/homeassistant/components/backup/backup.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.hassio import is_hassio from .agent import BackupAgent, LocalBackupAgent -from .const import LOGGER +from .const import DOMAIN, LOGGER from .models import AgentBackup from .util import read_backup @@ -30,6 +30,7 @@ async def async_get_backup_agents( class CoreLocalBackupAgent(LocalBackupAgent): """Local backup agent for Core and Container installations.""" + domain = DOMAIN name = "local" def __init__(self, hass: HomeAssistant) -> None: diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 1defbd350fb..66977e568e4 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -243,6 +243,7 @@ class BackupManager: """Initialize the backup manager.""" self.hass = hass self.platforms: dict[str, BackupPlatformProtocol] = {} + self.backup_agent_platforms: dict[str, BackupAgentPlatformProtocol] = {} self.backup_agents: dict[str, BackupAgent] = {} self.local_backup_agents: dict[str, LocalBackupAgent] = {} @@ -291,22 +292,48 @@ class BackupManager: self.platforms[integration_domain] = platform - async def _async_add_platform_agents( + @callback + def _async_add_backup_agent_platform( self, integration_domain: str, platform: BackupAgentPlatformProtocol, ) -> None: - """Add a platform to the backup manager.""" + """Add backup agent platform to the backup manager.""" if not hasattr(platform, "async_get_backup_agents"): return + self.backup_agent_platforms[integration_domain] = platform + + @callback + def listener() -> None: + LOGGER.debug("Loading backup agents for %s", integration_domain) + self.hass.async_create_task( + self._async_reload_backup_agents(integration_domain) + ) + + if hasattr(platform, "async_register_backup_agents_listener"): + platform.async_register_backup_agents_listener(self.hass, listener=listener) + + listener() + + async def _async_reload_backup_agents(self, domain: str) -> None: + """Add backup agent platform to the backup manager.""" + platform = self.backup_agent_platforms[domain] + + # Remove all agents for the domain + for agent_id in list(self.backup_agents): + if self.backup_agents[agent_id].domain == domain: + del self.backup_agents[agent_id] + for agent_id in list(self.local_backup_agents): + if self.local_backup_agents[agent_id].domain == domain: + del self.local_backup_agents[agent_id] + + # Add new agents agents = await platform.async_get_backup_agents(self.hass) - self.backup_agents.update( - {f"{integration_domain}.{agent.name}": agent for agent in agents} - ) + self.backup_agents.update({agent.agent_id: agent for agent in agents}) self.local_backup_agents.update( { - f"{integration_domain}.{agent.name}": agent + agent.agent_id: agent for agent in agents if isinstance(agent, LocalBackupAgent) } @@ -320,7 +347,7 @@ class BackupManager: ) -> None: """Add a backup platform manager.""" self._add_platform_pre_post_handler(integration_domain, platform) - await self._async_add_platform_agents(integration_domain, platform) + self._async_add_backup_agent_platform(integration_domain, platform) LOGGER.debug("Backup platform %s loaded", integration_domain) LOGGER.debug("%s platforms loaded in total", len(self.platforms)) LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index 58ecc7a78fd..2c7cc9d7bd5 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -38,7 +38,11 @@ async def async_get_backup_agents( **kwargs: Any, ) -> list[BackupAgent]: """Return the cloud backup agent.""" - return [CloudBackupAgent(hass=hass, cloud=hass.data[DATA_CLOUD])] + cloud = hass.data[DATA_CLOUD] + if not cloud.is_logged_in: + return [] + + return [CloudBackupAgent(hass=hass, cloud=cloud)] class ChunkAsyncStreamIterator: @@ -69,6 +73,7 @@ class ChunkAsyncStreamIterator: class CloudBackupAgent(BackupAgent): """Cloud backup agent.""" + domain = DOMAIN name = DOMAIN def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None: diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index f7f66f6cecc..53f3a226a09 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -79,6 +79,8 @@ def _backup_details_to_agent_backup( class SupervisorBackupAgent(BackupAgent): """Backup agent for supervised installations.""" + domain = DOMAIN + def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None: """Initialize the backup agent.""" super().__init__() diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 2c3887bb383..88d0c868636 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -26,8 +26,7 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util -DOMAIN = "kitchen_sink" - +from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.BUTTON, @@ -88,9 +87,27 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # Start a reauth flow config_entry.async_start_reauth(hass) + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + return True +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload config entry.""" + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + + return await hass.config_entries.async_unload_platforms( + entry, COMPONENTS_WITH_DEMO_PLATFORM + ) + + +async def _notify_backup_listeners(hass: HomeAssistant) -> None: + for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): + listener() + + def _create_issues(hass: HomeAssistant) -> None: """Create some issue registry issues.""" async_create_issue( diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py index 02c61ff4de6..615364f55ee 100644 --- a/homeassistant/components/kitchen_sink/backup.py +++ b/homeassistant/components/kitchen_sink/backup.py @@ -8,7 +8,9 @@ import logging from typing import Any from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback + +from . import DATA_BACKUP_AGENT_LISTENERS, DOMAIN LOGGER = logging.getLogger(__name__) @@ -17,12 +19,35 @@ async def async_get_backup_agents( hass: HomeAssistant, ) -> list[BackupAgent]: """Register the backup agents.""" + if not hass.config_entries.async_loaded_entries(DOMAIN): + LOGGER.info("No config entry found or entry is not loaded") + return [] return [KitchenSinkBackupAgent("syncer")] +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener) + + @callback + def remove_listener() -> None: + """Remove the listener.""" + hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener) + + return remove_listener + + class KitchenSinkBackupAgent(BackupAgent): """Kitchen sink backup agent.""" + domain = DOMAIN + def __init__(self, name: str) -> None: """Initialize the kitchen sink backup sync agent.""" super().__init__() diff --git a/homeassistant/components/kitchen_sink/const.py b/homeassistant/components/kitchen_sink/const.py new file mode 100644 index 00000000000..e6edaca46ce --- /dev/null +++ b/homeassistant/components/kitchen_sink/const.py @@ -0,0 +1,12 @@ +"""Constants for the Kitchen Sink integration.""" + +from __future__ import annotations + +from collections.abc import Callable + +from homeassistant.util.hass_dict import HassKey + +DOMAIN = "kitchen_sink" +DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( + f"{DOMAIN}.backup_agent_listeners" +) diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index 133a2602192..b06b8a5ef5d 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -57,6 +57,8 @@ TEST_DOMAIN = "test" class BackupAgentTest(BackupAgent): """Test backup agent.""" + domain = "test" + def __init__(self, name: str, backups: list[AgentBackup] | None = None) -> None: """Initialize the backup agent.""" self.name = name diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index f335ea5c0ee..302f4e07011 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Generator from io import StringIO import json +from pathlib import Path from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch @@ -18,6 +19,7 @@ from homeassistant.components.backup import ( BackupManager, BackupPlatformProtocol, Folder, + LocalBackupAgent, backup as local_backup_platform, ) from homeassistant.components.backup.const import DATA_MANAGER @@ -235,14 +237,14 @@ async def test_async_initiate_backup( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( - hass, - domain="test", - platform=Mock( - async_get_backup_agents=AsyncMock(return_value=[remote_agent]), - spec_set=BackupAgentPlatformProtocol, - ), - ) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) ws_client = await hass_ws_client(hass) @@ -402,14 +404,14 @@ async def test_async_initiate_backup_with_agent_error( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( - hass, - domain="test", - platform=Mock( - async_get_backup_agents=AsyncMock(return_value=[remote_agent]), - spec_set=BackupAgentPlatformProtocol, - ), - ) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) ws_client = await hass_ws_client(hass) @@ -534,21 +536,86 @@ async def test_loading_platforms( assert not manager.platforms + get_agents_mock = AsyncMock(return_value=[]) + await _setup_backup_platform( hass, platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), - async_get_backup_agents=AsyncMock(), + async_get_backup_agents=get_agents_mock, ), ) await manager.load_platforms() await hass.async_block_till_done() assert len(manager.platforms) == 1 - assert "Loaded 1 platforms" in caplog.text + get_agents_mock.assert_called_once_with(hass) + + +class LocalBackupAgentTest(BackupAgentTest, LocalBackupAgent): + """Local backup agent.""" + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return "test.tar" + + +@pytest.mark.parametrize( + ("agent_class", "num_local_agents"), + [(LocalBackupAgentTest, 2), (BackupAgentTest, 1)], +) +async def test_loading_platform_with_listener( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + agent_class: type[BackupAgentTest], + num_local_agents: int, +) -> None: + """Test loading a backup agent platform which can be listened to.""" + ws_client = await hass_ws_client(hass) + assert await async_setup_component(hass, DOMAIN, {}) + manager = hass.data[DATA_MANAGER] + + get_agents_mock = AsyncMock(return_value=[agent_class("remote1", backups=[])]) + register_listener_mock = Mock() + + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=get_agents_mock, + async_register_backup_agents_listener=register_listener_mock, + ), + ) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote1"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + + get_agents_mock.assert_called_once_with(hass) + register_listener_mock.assert_called_once_with(hass, listener=ANY) + + get_agents_mock.reset_mock() + get_agents_mock.return_value = [agent_class("remote2", backups=[])] + listener = register_listener_mock.call_args[1]["listener"] + listener() + + get_agents_mock.assert_called_once_with(hass) + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote2"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + @pytest.mark.parametrize( "platform_mock", diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 16b446c7a2b..d5dc8751d82 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -26,7 +26,10 @@ from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator @pytest.fixture(autouse=True) async def setup_integration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, cloud: MagicMock + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + cloud: MagicMock, + cloud_logged_in: None, ) -> AsyncGenerator[None]: """Set up cloud integration.""" with patch("homeassistant.components.backup.is_hassio", return_value=False): diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 7db03b7fa46..6a738094ae6 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -57,6 +57,27 @@ async def test_agents_info( "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], } + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agents": [{"agent_id": "backup.local"}]} + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + async def test_agents_list_backups( hass: HomeAssistant, From 5c80ddb89160e84be136e5d42b9edce3c050f277 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 13:49:17 +0100 Subject: [PATCH 0547/1198] Fix LaMetric config flow for cloud import path (#133039) --- homeassistant/components/lametric/config_flow.py | 5 ++++- homeassistant/components/lametric/strings.json | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/lametric/config_flow.py b/homeassistant/components/lametric/config_flow.py index 36dcdf26ed6..05c5dea77d1 100644 --- a/homeassistant/components/lametric/config_flow.py +++ b/homeassistant/components/lametric/config_flow.py @@ -249,7 +249,10 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): device = await lametric.device() if self.source != SOURCE_REAUTH: - await self.async_set_unique_id(device.serial_number) + await self.async_set_unique_id( + device.serial_number, + raise_on_progress=False, + ) self._abort_if_unique_id_configured( updates={CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key} ) diff --git a/homeassistant/components/lametric/strings.json b/homeassistant/components/lametric/strings.json index 87bda01e305..0fd6f5a12dc 100644 --- a/homeassistant/components/lametric/strings.json +++ b/homeassistant/components/lametric/strings.json @@ -21,8 +21,11 @@ "api_key": "You can find this API key in [devices page in your LaMetric developer account](https://developer.lametric.com/user/devices)." } }, - "user_cloud_select_device": { + "cloud_select_device": { "data": { + "device": "Device" + }, + "data_description": { "device": "Select the LaMetric device to add" } } From 7bdf034b93f9c5fbb97b46652ec509186869ffa5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 13:54:22 +0100 Subject: [PATCH 0548/1198] Migrate template light tests to use Kelvin (#133025) --- tests/components/template/test_light.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index 065a1488dc9..b5ba93a4bd0 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -773,7 +773,7 @@ async def test_temperature_action_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 345}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 2898}, blocking=True, ) @@ -1395,7 +1395,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 123}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 8130}, blocking=True, ) @@ -1531,7 +1531,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 234}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 4273}, blocking=True, ) From 6005b6d01ca46e89a8350d3633f07aac9f620c15 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 13:55:57 +0100 Subject: [PATCH 0549/1198] Explicitly pass config entry to coordinator in Elgato (#133014) * Explicitly pass config entry to coordinator in Elgato * Make it noice! * Apply suggestions from code review Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Adjustment from review comment --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/elgato/__init__.py | 9 +++------ homeassistant/components/elgato/button.py | 5 ++--- homeassistant/components/elgato/coordinator.py | 7 +++++-- homeassistant/components/elgato/diagnostics.py | 4 ++-- homeassistant/components/elgato/light.py | 5 ++--- homeassistant/components/elgato/sensor.py | 5 ++--- homeassistant/components/elgato/switch.py | 5 ++--- 7 files changed, 18 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/elgato/__init__.py b/homeassistant/components/elgato/__init__.py index 2d8446c3b76..1b1ff9948c9 100644 --- a/homeassistant/components/elgato/__init__.py +++ b/homeassistant/components/elgato/__init__.py @@ -1,17 +1,14 @@ """Support for Elgato Lights.""" -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator PLATFORMS = [Platform.BUTTON, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] -type ElgatorConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] - -async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Set up Elgato Light from a config entry.""" coordinator = ElgatoDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() @@ -22,6 +19,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> b return True -async def async_unload_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Unload Elgato Light config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index 6f9436b8e29..505eff36b44 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -18,8 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -50,7 +49,7 @@ BUTTONS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato button based on a config entry.""" diff --git a/homeassistant/components/elgato/coordinator.py b/homeassistant/components/elgato/coordinator.py index f3cf9216374..5e1ba0a6494 100644 --- a/homeassistant/components/elgato/coordinator.py +++ b/homeassistant/components/elgato/coordinator.py @@ -12,6 +12,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER, SCAN_INTERVAL +type ElgatoConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] + @dataclass class ElgatoData: @@ -26,10 +28,10 @@ class ElgatoData: class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): """Class to manage fetching Elgato data.""" - config_entry: ConfigEntry + config_entry: ElgatoConfigEntry has_battery: bool | None = None - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: ElgatoConfigEntry) -> None: """Initialize the coordinator.""" self.config_entry = entry self.client = Elgato( @@ -39,6 +41,7 @@ class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/elgato/diagnostics.py b/homeassistant/components/elgato/diagnostics.py index ac3ea0a155d..4e1b9d4cfdd 100644 --- a/homeassistant/components/elgato/diagnostics.py +++ b/homeassistant/components/elgato/diagnostics.py @@ -6,11 +6,11 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import ElgatorConfigEntry +from .coordinator import ElgatoConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ElgatorConfigEntry + hass: HomeAssistant, entry: ElgatoConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" coordinator = entry.runtime_data diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index 9a85c572e2c..990a0606fce 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -21,9 +21,8 @@ from homeassistant.helpers.entity_platform import ( ) from homeassistant.util import color as color_util -from . import ElgatorConfigEntry from .const import SERVICE_IDENTIFY -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -31,7 +30,7 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato Light based on a config entry.""" diff --git a/homeassistant/components/elgato/sensor.py b/homeassistant/components/elgato/sensor.py index a28ee01f505..529d2f7c76e 100644 --- a/homeassistant/components/elgato/sensor.py +++ b/homeassistant/components/elgato/sensor.py @@ -21,8 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity # Coordinator is used to centralize the data updates @@ -104,7 +103,7 @@ SENSORS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato sensor based on a config entry.""" diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index 643f148ec7d..3b2420b0ace 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -14,8 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -54,7 +53,7 @@ SWITCHES = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato switches based on a config entry.""" From bcaf1dc20b5035564b0d0e2815bff77e094238e6 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:24:38 +0100 Subject: [PATCH 0550/1198] Clean up Elgato config flow tests (#133045) --- .../elgato/snapshots/test_config_flow.ambr | 128 ------------------ tests/components/elgato/test_config_flow.py | 94 +++++++++---- 2 files changed, 65 insertions(+), 157 deletions(-) delete mode 100644 tests/components/elgato/snapshots/test_config_flow.ambr diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr deleted file mode 100644 index 522482ab602..00000000000 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,128 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_full_zeroconf_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'confirm_only': True, - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_during_onboarding - FlowResultSnapshot({ - 'context': dict({ - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/elgato/test_config_flow.py b/tests/components/elgato/test_config_flow.py index 42abc0cde63..00763f60458 100644 --- a/tests/components/elgato/test_config_flow.py +++ b/tests/components/elgato/test_config_flow.py @@ -5,12 +5,11 @@ from unittest.mock import AsyncMock, MagicMock from elgato import ElgatoConnectionError import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.elgato.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_SOURCE +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,7 +20,6 @@ async def test_full_user_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the full manual user flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -29,15 +27,22 @@ async def test_full_user_flow_implementation( context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: "127.0.0.1"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: None, + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -47,7 +52,6 @@ async def test_full_zeroconf_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -64,9 +68,9 @@ async def test_full_zeroconf_flow_implementation( ), ) - assert result.get("description_placeholders") == {"serial_number": "CN11A1A00001"} - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["description_placeholders"] == {"serial_number": "CN11A1A00001"} + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 @@ -74,12 +78,19 @@ async def test_full_zeroconf_flow_implementation( assert "context" in progress[0] assert progress[0]["context"].get("confirm_only") is True - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -97,9 +108,28 @@ async def test_connection_error( data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": "cannot_connect"} - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + # Recover from error + mock_elgato.info.side_effect = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: "127.0.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_MAC: None, + } + assert not config_entry.options async def test_zeroconf_connection_error( @@ -122,8 +152,8 @@ async def test_zeroconf_connection_error( ), ) - assert result.get("reason") == "cannot_connect" - assert result.get("type") is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + assert result["type"] is FlowResultType.ABORT @pytest.mark.usefixtures("mock_elgato") @@ -138,8 +168,8 @@ async def test_user_device_exists_abort( data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" @pytest.mark.usefixtures("mock_elgato") @@ -162,8 +192,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.1" @@ -183,8 +213,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.2" @@ -195,7 +225,6 @@ async def test_zeroconf_during_onboarding( mock_elgato: MagicMock, mock_setup_entry: AsyncMock, mock_onboarding: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf creates an entry during onboarding.""" result = await hass.config_entries.flow.async_init( @@ -212,8 +241,15 @@ async def test_zeroconf_during_onboarding( ), ) - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 From c18cbf5994d6b22504e19d5d698d80f806137fc6 Mon Sep 17 00:00:00 2001 From: Krisjanis Lejejs Date: Thu, 12 Dec 2024 13:25:54 +0000 Subject: [PATCH 0551/1198] Bump hass-nabucasa from 0.86.0 to 0.87.0 (#133043) --- homeassistant/components/cloud/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 48f2153e86f..7ee8cf46b86 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -13,6 +13,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.86.0"], + "requirements": ["hass-nabucasa==0.87.0"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e4abf3ab678..e7d46787f5d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ fnv-hash-fast==1.0.2 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 habluetooth==3.6.0 -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.7 diff --git a/pyproject.toml b/pyproject.toml index c40f8bd0d01..375e57126f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.86.0", + "hass-nabucasa==0.87.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", diff --git a/requirements.txt b/requirements.txt index 9ef9f0e44f2..e43822553f3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.2 fnv-hash-fast==1.0.2 -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 26acf53fa53..fb873805873 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1088,7 +1088,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 # homeassistant.components.splunk hass-splunk==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index afe7252f9f8..83e7c89dd8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -926,7 +926,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 # homeassistant.components.conversation hassil==2.0.5 From 2e133df549a3bc4fa67375882eb5824d6f6abe0b Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:26:17 +0100 Subject: [PATCH 0552/1198] Improve husqvarna_automower decorator typing (#133047) --- .../components/husqvarna_automower/entity.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index fef0ba03b62..5b5156e5f1d 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -1,10 +1,12 @@ """Platform for Husqvarna Automower base entity.""" +from __future__ import annotations + import asyncio -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Callable, Coroutine import functools import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Concatenate from aioautomower.exceptions import ApiException from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea @@ -52,18 +54,17 @@ def _work_area_translation_key(work_area_id: int, key: str) -> str: return f"work_area_{key}" -def handle_sending_exception( +type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]] + + +def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P]( poll_after_sending: bool = False, -) -> Callable[ - [Callable[..., Awaitable[Any]]], Callable[..., Coroutine[Any, Any, None]] -]: +) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]: """Handle exceptions while sending a command and optionally refresh coordinator.""" - def decorator( - func: Callable[..., Awaitable[Any]], - ) -> Callable[..., Coroutine[Any, Any, None]]: + def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]: @functools.wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None: try: await func(self, *args, **kwargs) except ApiException as exception: From 8e15287662fa70bc9eb76dad2326d2a6ace1d8f5 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:26:34 +0100 Subject: [PATCH 0553/1198] Add data descriptions to Twente Milieu config flow (#133046) --- homeassistant/components/twentemilieu/quality_scale.yaml | 5 +---- homeassistant/components/twentemilieu/strings.json | 5 +++++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index 210416e56c5..3d7535a249c 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -8,10 +8,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - data_description's are missing. + config-flow: done dependency-transparency: done docs-actions: status: exempt diff --git a/homeassistant/components/twentemilieu/strings.json b/homeassistant/components/twentemilieu/strings.json index 7797167ea0b..5c40df1b0c2 100644 --- a/homeassistant/components/twentemilieu/strings.json +++ b/homeassistant/components/twentemilieu/strings.json @@ -7,6 +7,11 @@ "post_code": "Postal code", "house_number": "House number", "house_letter": "House letter/additional" + }, + "data_description": { + "post_code": "The postal code of the address, for example 7500AA", + "house_number": "The house number of the address", + "house_letter": "The house letter or additional information of the address" } } }, From 4b5d717898c32712689d2534e33d9c2e79d90579 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:35:11 +0100 Subject: [PATCH 0554/1198] Fix music_assistant decorator typing (#133044) --- .../components/music_assistant/media_player.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 847a71b0061..7d09bd5b888 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -3,11 +3,11 @@ from __future__ import annotations import asyncio -from collections.abc import Awaitable, Callable, Coroutine, Mapping +from collections.abc import Callable, Coroutine, Mapping from contextlib import suppress import functools import os -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Concatenate from music_assistant_models.enums import ( EventType, @@ -102,14 +102,14 @@ ATTR_AUTO_PLAY = "auto_play" def catch_musicassistant_error[_R, **P]( - func: Callable[..., Awaitable[_R]], -) -> Callable[..., Coroutine[Any, Any, _R | None]]: + func: Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]]: """Check and log commands to players.""" @functools.wraps(func) async def wrapper( self: MusicAssistantPlayer, *args: P.args, **kwargs: P.kwargs - ) -> _R | None: + ) -> _R: """Catch Music Assistant errors and convert to Home Assistant error.""" try: return await func(self, *args, **kwargs) From dc18e62e1e5c18a52678f518c09f7d27378191b5 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:38:55 +0100 Subject: [PATCH 0555/1198] Bump ruff to 0.8.2 (#133041) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9947ee05ad1..5d65225f512 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.1 + rev: v0.8.2 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index b263373f11d..aa04dbeb6d0 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.8.1 +ruff==0.8.2 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 98edb9c458f..afedbd23cfe 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.2 \ PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From f05d18ea70cd2581d5ca317e50ccda7f5ad283f1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:42:05 +0100 Subject: [PATCH 0556/1198] Small test improvements to Tailwind tests (#133051) --- .../tailwind/snapshots/test_config_flow.ambr | 89 ------------- tests/components/tailwind/test_config_flow.py | 125 +++++++++++------- tests/components/tailwind/test_init.py | 4 +- 3 files changed, 78 insertions(+), 140 deletions(-) delete mode 100644 tests/components/tailwind/snapshots/test_config_flow.ambr diff --git a/tests/components/tailwind/snapshots/test_config_flow.ambr b/tests/components/tailwind/snapshots/test_config_flow.ambr deleted file mode 100644 index 09bf25cb96e..00000000000 --- a/tests/components/tailwind/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,89 +0,0 @@ -# serializer version: 1 -# name: test_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_flow - FlowResultSnapshot({ - 'context': dict({ - 'configuration_url': 'https://web.gotailwind.com/client/integration/local-control-key', - 'source': 'zeroconf', - 'title_placeholders': dict({ - 'name': 'Tailwind iQ3', - }), - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index d2d15172718..ca6fbacf0fc 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -25,20 +25,17 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_tailwind") -async def test_user_flow( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: +async def test_user_flow(hass: HomeAssistant) -> None: """Test the full happy path user flow from start to finish.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.1", @@ -46,8 +43,15 @@ async def test_user_flow( }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -76,19 +80,27 @@ async def test_user_flow_errors( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.2", CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_TOKEN: "123456", + } + assert not config_entry.options async def test_user_flow_unsupported_firmware_version( @@ -105,8 +117,8 @@ async def test_user_flow_unsupported_firmware_version( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unsupported_firmware" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unsupported_firmware" @pytest.mark.usefixtures("mock_tailwind") @@ -129,8 +141,8 @@ async def test_user_flow_already_configured( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -160,19 +172,26 @@ async def test_zeroconf_flow( ), ) - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 assert progress[0].get("flow_id") == result["flow_id"] - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "987654"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -200,8 +219,8 @@ async def test_zeroconf_flow_abort_incompatible_properties( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == expected_reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == expected_reason @pytest.mark.parametrize( @@ -240,25 +259,33 @@ async def test_zeroconf_flow_errors( ), ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "zeroconf_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "123456", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_tailwind") @@ -292,8 +319,8 @@ async def test_zeroconf_flow_not_discovered_again( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -307,17 +334,17 @@ async def test_reauth_flow( assert mock_config_entry.data[CONF_TOKEN] == "123456" result = await mock_config_entry.start_reauth_flow(hass) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_TOKEN: "987654"}, ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -343,27 +370,27 @@ async def test_reauth_flow_errors( result = await mock_config_entry.start_reauth_flow(hass) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "reauth_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" async def test_dhcp_discovery_updates_entry( @@ -384,8 +411,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -404,5 +431,5 @@ async def test_dhcp_discovery_ignores_unknown(hass: HomeAssistant) -> None: ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/tailwind/test_init.py b/tests/components/tailwind/test_init.py index 8ea5f1108f4..8e075a26279 100644 --- a/tests/components/tailwind/test_init.py +++ b/tests/components/tailwind/test_init.py @@ -66,8 +66,8 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH From 006b3b0e2235e397262cbcc6dcacea2a79bca44b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 12 Dec 2024 14:51:15 +0100 Subject: [PATCH 0557/1198] Bump uv to 0.5.8 (#133036) --- Dockerfile | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 61d64212b40..630fc19496c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.5.4 +RUN pip3 install uv==0.5.8 WORKDIR /usr/src diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e7d46787f5d..b2dd0cf251c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -65,7 +65,7 @@ standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.4 +uv==0.5.8 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 diff --git a/pyproject.toml b/pyproject.toml index 375e57126f2..2930d381d2a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ dependencies = [ # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.5.4", + "uv==0.5.8", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", diff --git a/requirements.txt b/requirements.txt index e43822553f3..e80804569d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -43,7 +43,7 @@ standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.4 +uv==0.5.8 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index afedbd23cfe..a4f33c3ad40 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ && uv pip install \ From 6d042d987fbe2634bbb56c33f83d8dcf5dcab6bf Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:13 +0100 Subject: [PATCH 0558/1198] Migrate emulated_hue light tests to use Kelvin (#133006) --- tests/components/emulated_hue/test_hue_api.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index a445f8bae0d..8a340d5e2dd 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -793,7 +793,10 @@ async def test_put_light_state( await hass_hue.services.async_call( light.DOMAIN, const.SERVICE_TURN_ON, - {const.ATTR_ENTITY_ID: "light.ceiling_lights", light.ATTR_COLOR_TEMP: 20}, + { + const.ATTR_ENTITY_ID: "light.ceiling_lights", + light.ATTR_COLOR_TEMP_KELVIN: 50000, + }, blocking=True, ) @@ -802,8 +805,10 @@ async def test_put_light_state( ) assert ( - hass_hue.states.get("light.ceiling_lights").attributes[light.ATTR_COLOR_TEMP] - == 50 + hass_hue.states.get("light.ceiling_lights").attributes[ + light.ATTR_COLOR_TEMP_KELVIN + ] + == 20000 ) # mock light.turn_on call @@ -1785,7 +1790,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, @@ -1813,7 +1818,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, From 37f2bde6f54bd65245c109c4c1e37cba8cc7ce45 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:34 +0100 Subject: [PATCH 0559/1198] Migrate esphome light tests to use Kelvin (#133008) --- tests/components/esphome/test_light.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 7f275fff4f2..8e4f37079d1 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -20,9 +20,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -1379,9 +1377,6 @@ async def test_light_color_temp( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 await hass.services.async_call( @@ -1454,9 +1449,6 @@ async def test_light_color_temp_no_mireds_set( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] is None - assert attributes[ATTR_MAX_MIREDS] is None - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 0 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 0 await hass.services.async_call( @@ -1558,8 +1550,6 @@ async def test_light_color_temp_legacy( assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.COLOR_TEMP] - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 From 839312c65ce4e98024ad60ea3adabb96b0d5e9de Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:52 +0100 Subject: [PATCH 0560/1198] Migrate homekit light tests to use Kelvin (#133011) --- tests/components/homekit/test_type_lights.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index a45e4988c36..fb059b93a13 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -20,8 +20,8 @@ from homeassistant.components.light import ( ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -1391,8 +1391,8 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: { ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_BRIGHTNESS: 255, - ATTR_MAX_MIREDS: 500.5, - ATTR_MIN_MIREDS: 153.5, + ATTR_MIN_COLOR_TEMP_KELVIN: 1999, + ATTR_MAX_COLOR_TEMP_KELVIN: 6499, }, ) await hass.async_block_till_done() From 0a748252e757f423fb5511dbfa7d8f8e9d734311 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:14:28 +0100 Subject: [PATCH 0561/1198] Improve Callable annotations (#133050) --- homeassistant/components/crownstone/config_flow.py | 2 +- homeassistant/components/dsmr/sensor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index bf6e9204714..2a96098421a 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -49,7 +49,7 @@ class BaseCrownstoneFlowHandler(ConfigEntryBaseFlow): cloud: CrownstoneCloud def __init__( - self, flow_type: str, create_entry_cb: Callable[..., ConfigFlowResult] + self, flow_type: str, create_entry_cb: Callable[[], ConfigFlowResult] ) -> None: """Set up flow instance.""" self.flow_type = flow_type diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index a069c32be04..213e948bafb 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -549,7 +549,7 @@ async def async_setup_entry( dsmr_version = entry.data[CONF_DSMR_VERSION] entities: list[DSMREntity] = [] initialized: bool = False - add_entities_handler: Callable[..., None] | None + add_entities_handler: Callable[[], None] | None @callback def init_async_add_entities(telegram: Telegram) -> None: From 5c6e4ad191c755315de87a77af05d61655f3929a Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:01:57 +0100 Subject: [PATCH 0562/1198] Use PEP 695 TypeVar syntax (#133049) --- homeassistant/components/motionblinds_ble/sensor.py | 7 ++----- homeassistant/components/powerfox/sensor.py | 7 +++---- homeassistant/components/powerwall/sensor.py | 12 +++++------- homeassistant/helpers/event.py | 7 +++---- 4 files changed, 13 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/motionblinds_ble/sensor.py b/homeassistant/components/motionblinds_ble/sensor.py index aa0f5ef7c90..740a0509a9e 100644 --- a/homeassistant/components/motionblinds_ble/sensor.py +++ b/homeassistant/components/motionblinds_ble/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass import logging from math import ceil -from typing import Generic, TypeVar from motionblindsble.const import ( MotionBlindType, @@ -45,11 +44,9 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -_T = TypeVar("_T") - @dataclass(frozen=True, kw_only=True) -class MotionblindsBLESensorEntityDescription(SensorEntityDescription, Generic[_T]): +class MotionblindsBLESensorEntityDescription[_T](SensorEntityDescription): """Entity description of a sensor entity with initial_value attribute.""" initial_value: str | None = None @@ -110,7 +107,7 @@ async def async_setup_entry( async_add_entities(entities) -class MotionblindsBLESensorEntity(MotionblindsBLEEntity, SensorEntity, Generic[_T]): +class MotionblindsBLESensorEntity[_T](MotionblindsBLEEntity, SensorEntity): """Representation of a sensor entity.""" entity_description: MotionblindsBLESensorEntityDescription[_T] diff --git a/homeassistant/components/powerfox/sensor.py b/homeassistant/components/powerfox/sensor.py index af6f0301b0c..7771f96dd81 100644 --- a/homeassistant/components/powerfox/sensor.py +++ b/homeassistant/components/powerfox/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Generic, TypeVar from powerfox import Device, PowerMeter, WaterMeter @@ -22,11 +21,11 @@ from . import PowerfoxConfigEntry from .coordinator import PowerfoxDataUpdateCoordinator from .entity import PowerfoxEntity -T = TypeVar("T", PowerMeter, WaterMeter) - @dataclass(frozen=True, kw_only=True) -class PowerfoxSensorEntityDescription(Generic[T], SensorEntityDescription): +class PowerfoxSensorEntityDescription[T: (PowerMeter, WaterMeter)]( + SensorEntityDescription +): """Describes Poweropti sensor entity.""" value_fn: Callable[[T], float | int | None] diff --git a/homeassistant/components/powerwall/sensor.py b/homeassistant/components/powerwall/sensor.py index 9423d65b0fc..28506e2a60c 100644 --- a/homeassistant/components/powerwall/sensor.py +++ b/homeassistant/components/powerwall/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from operator import attrgetter, methodcaller -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING from tesla_powerwall import GridState, MeterResponse, MeterType @@ -35,14 +35,12 @@ from .models import BatteryResponse, PowerwallConfigEntry, PowerwallRuntimeData _METER_DIRECTION_EXPORT = "export" _METER_DIRECTION_IMPORT = "import" -_ValueParamT = TypeVar("_ValueParamT") -_ValueT = TypeVar("_ValueT", bound=float | int | str | None) +type _ValueType = float | int | str | None @dataclass(frozen=True, kw_only=True) -class PowerwallSensorEntityDescription( - SensorEntityDescription, - Generic[_ValueParamT, _ValueT], +class PowerwallSensorEntityDescription[_ValueParamT, _ValueT: _ValueType]( + SensorEntityDescription ): """Describes Powerwall entity.""" @@ -389,7 +387,7 @@ class PowerWallImportSensor(PowerWallEnergyDirectionSensor): return meter.get_energy_imported() -class PowerWallBatterySensor(BatteryEntity, SensorEntity, Generic[_ValueT]): +class PowerWallBatterySensor[_ValueT: _ValueType](BatteryEntity, SensorEntity): """Representation of an Powerwall Battery sensor.""" entity_description: PowerwallSensorEntityDescription[BatteryResponse, _ValueT] diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 578132f358f..72a4ef3c050 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -90,7 +90,6 @@ RANDOM_MICROSECOND_MIN = 50000 RANDOM_MICROSECOND_MAX = 500000 _TypedDictT = TypeVar("_TypedDictT", bound=Mapping[str, Any]) -_StateEventDataT = TypeVar("_StateEventDataT", bound=EventStateEventData) @dataclass(slots=True, frozen=True) @@ -333,7 +332,7 @@ def async_track_state_change_event( @callback -def _async_dispatch_entity_id_event_soon( +def _async_dispatch_entity_id_event_soon[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -343,7 +342,7 @@ def _async_dispatch_entity_id_event_soon( @callback -def _async_dispatch_entity_id_event( +def _async_dispatch_entity_id_event[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -363,7 +362,7 @@ def _async_dispatch_entity_id_event( @callback -def _async_state_filter( +def _async_state_filter[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event_data: _StateEventDataT, From 33c799b2d074bbc8feb3417315fb27ea5b6ee88f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:42:10 +0100 Subject: [PATCH 0563/1198] Migrate mqtt light tests to use Kelvin (#133035) --- tests/components/mqtt/test_light_json.py | 6 +++--- tests/components/mqtt/test_light_template.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 7d8ff241d3c..18627c4f6ef 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -435,7 +435,7 @@ async def test_single_color_mode( assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] @@ -494,7 +494,7 @@ async def test_controlling_state_with_unknown_color_mode( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) is None + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(light.ATTR_BRIGHTNESS) is None assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.UNKNOWN @@ -507,7 +507,7 @@ async def test_controlling_state_with_unknown_color_mode( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.COLOR_TEMP diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 64cdff370be..b17637e43b0 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -212,7 +212,7 @@ async def test_single_color_mode( assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] From 2ce2765e674fe6ebc0f8d9abadda5ccc14e583a2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:49:25 +0100 Subject: [PATCH 0564/1198] Adjust light test helpers to use Kelvin, and cleanup unused helpers (#133048) Cleanup light test helper methods --- .core_files.yaml | 1 + tests/components/light/common.py | 107 +------------------ tests/components/mqtt/test_light.py | 4 +- tests/components/mqtt/test_light_json.py | 12 ++- tests/components/mqtt/test_light_template.py | 8 +- tests/components/tasmota/test_light.py | 10 +- 6 files changed, 28 insertions(+), 114 deletions(-) diff --git a/.core_files.yaml b/.core_files.yaml index cc99487f68d..2624c4432be 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -132,6 +132,7 @@ tests: &tests - tests/components/conftest.py - tests/components/diagnostics/** - tests/components/history/** + - tests/components/light/common.py - tests/components/logbook/** - tests/components/recorder/** - tests/components/repairs/** diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 147f2336876..d696c7ab8cf 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -10,11 +10,10 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_PROFILE, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -35,54 +34,10 @@ from homeassistant.const import ( SERVICE_TURN_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.loader import bind_hass from tests.common import MockToggleEntity -@bind_hass -def turn_on( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - rgbw_color: tuple[int, int, int, int] | None = None, - rgbww_color: tuple[int, int, int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, - white: bool | None = None, -) -> None: - """Turn all or specified light on.""" - hass.add_job( - async_turn_on, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - rgbw_color, - rgbww_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - white, - ) - - async def async_turn_on( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -94,8 +49,7 @@ async def async_turn_on( rgbww_color: tuple[int, int, int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, @@ -116,8 +70,7 @@ async def async_turn_on( (ATTR_RGBWW_COLOR, rgbww_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), @@ -129,17 +82,6 @@ async def async_turn_on( await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -@bind_hass -def turn_off( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - flash: str | None = None, -) -> None: - """Turn all or specified light off.""" - hass.add_job(async_turn_off, hass, entity_id, transition, flash) - - async def async_turn_off( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -160,43 +102,6 @@ async def async_turn_off( await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) -@bind_hass -def toggle( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, -) -> None: - """Toggle all or specified light.""" - hass.add_job( - async_toggle, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - ) - - async def async_toggle( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -206,8 +111,7 @@ async def async_toggle( rgb_color: tuple[int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, @@ -225,8 +129,7 @@ async def async_toggle( (ATTR_RGB_COLOR, rgb_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 8e9e2abb85a..ed4b16e3d0c 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1148,7 +1148,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", color_temp=125) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=8000) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/color_temp/set", "125", 2, False), @@ -1321,7 +1321,7 @@ async def test_sending_mqtt_color_temp_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", color_temp=100) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=10000) mqtt_mock.async_publish.assert_has_calls( [ diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 18627c4f6ef..b1031bec342 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -423,7 +423,9 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message( hass, @@ -458,7 +460,9 @@ async def test_turn_on_with_unknown_color_mode_optimistic( assert state.state == STATE_ON # Turn on the light with brightness or color_temp attributes - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP assert state.attributes.get("brightness") == 50 @@ -1083,7 +1087,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", @@ -1244,7 +1248,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_ON # Turn the light on with color temperature - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator('{"state":"ON","color_temp":90}'), diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index b17637e43b0..5ffff578b5b 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -205,7 +205,9 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message(hass, "test_light", "on,50,192") color_modes = [light.ColorMode.COLOR_TEMP] state = hass.states.get("light.test") @@ -463,7 +465,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 2, False ) @@ -594,7 +596,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert state.state == STATE_UNKNOWN # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 0, False ) diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index f5802c509bf..4f4daee1301 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -1108,7 +1108,7 @@ async def test_sending_mqtt_commands_rgbww( ) mqtt_mock.async_publish.reset_mock() - await common.async_turn_on(hass, "light.tasmota_test", color_temp=200) + await common.async_turn_on(hass, "light.tasmota_test", color_temp_kelvin=5000) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON;NoDelay;CT 200", @@ -1350,7 +1350,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 153 # Set color_temp of the light from 153 to 500 @ 50%: Speed should be 6*2*2=24 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=500, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=2000, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 ON;NoDelay;CT 500", @@ -1369,7 +1371,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 500 # Set color_temp of the light from 500 to 326 @ 50%: Speed should be 6*2*2*2=48->40 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=326, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=3067, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Power1 ON;NoDelay;CT 326", From 0b18e51a13ef5e3f3fd24a9ab9df8f8cfd82b10e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:49:50 +0100 Subject: [PATCH 0565/1198] Remove reference to self.min/max_mireds in mqtt light (#133055) --- homeassistant/components/mqtt/light/schema_basic.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index a4d3ecb5f21..9cc50daa329 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -486,10 +486,8 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _converter( r: int, g: int, b: int, cw: int, ww: int ) -> tuple[int, int, int]: - min_kelvin = color_util.color_temperature_mired_to_kelvin(self.max_mireds) - max_kelvin = color_util.color_temperature_mired_to_kelvin(self.min_mireds) return color_util.color_rgbww_to_rgb( - r, g, b, cw, ww, min_kelvin, max_kelvin + r, g, b, cw, ww, self.min_color_temp_kelvin, self.max_color_temp_kelvin ) rgbww = self._rgbx_received( From 3d201690ce460f5cb9fa31adca6477ac63bbeb44 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 16:54:21 +0100 Subject: [PATCH 0566/1198] Fix load of backup store (#133024) * Fix load of backup store * Tweak type annotations in test * Fix tests * Remove the new test * Remove snapshots --- homeassistant/components/backup/config.py | 32 ++++++++++++--- tests/components/backup/conftest.py | 20 +++++++++- tests/components/backup/test_websocket.py | 47 ++++------------------- 3 files changed, 53 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 6304d0aa90b..32dfa95509c 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -33,8 +33,8 @@ class StoredBackupConfig(TypedDict): """Represent the stored backup config.""" create_backup: StoredCreateBackupConfig - last_attempted_strategy_backup: datetime | None - last_completed_strategy_backup: datetime | None + last_attempted_strategy_backup: str | None + last_completed_strategy_backup: str | None retention: StoredRetentionConfig schedule: StoredBackupSchedule @@ -59,6 +59,16 @@ class BackupConfigData: include_folders = None retention = data["retention"] + if last_attempted_str := data["last_attempted_strategy_backup"]: + last_attempted = dt_util.parse_datetime(last_attempted_str) + else: + last_attempted = None + + if last_attempted_str := data["last_completed_strategy_backup"]: + last_completed = dt_util.parse_datetime(last_attempted_str) + else: + last_completed = None + return cls( create_backup=CreateBackupConfig( agent_ids=data["create_backup"]["agent_ids"], @@ -69,8 +79,8 @@ class BackupConfigData: name=data["create_backup"]["name"], password=data["create_backup"]["password"], ), - last_attempted_strategy_backup=data["last_attempted_strategy_backup"], - last_completed_strategy_backup=data["last_completed_strategy_backup"], + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, retention=RetentionConfig( copies=retention["copies"], days=retention["days"], @@ -80,10 +90,20 @@ class BackupConfigData: def to_dict(self) -> StoredBackupConfig: """Convert backup config data to a dict.""" + if self.last_attempted_strategy_backup: + last_attempted = self.last_attempted_strategy_backup.isoformat() + else: + last_attempted = None + + if self.last_completed_strategy_backup: + last_completed = self.last_completed_strategy_backup.isoformat() + else: + last_completed = None + return StoredBackupConfig( create_backup=self.create_backup.to_dict(), - last_attempted_strategy_backup=self.last_attempted_strategy_backup, - last_completed_strategy_backup=self.last_completed_strategy_backup, + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, retention=self.retention.to_dict(), schedule=self.schedule.to_dict(), ) diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index 7ccfcc4e0f0..13f2537db47 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -2,12 +2,14 @@ from __future__ import annotations +from asyncio import Future from collections.abc import Generator from pathlib import Path -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from homeassistant.components.backup.manager import WrittenBackup from homeassistant.core import HomeAssistant from .common import TEST_BACKUP_PATH_ABC123 @@ -62,6 +64,22 @@ CONFIG_DIR = { CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} +@pytest.fixture(name="create_backup") +def mock_create_backup() -> Generator[AsyncMock]: + """Mock manager create backup.""" + mock_written_backup = MagicMock(spec_set=WrittenBackup) + mock_written_backup.backup.backup_id = "abc123" + mock_written_backup.open_stream = AsyncMock() + mock_written_backup.release_stream = AsyncMock() + fut = Future() + fut.set_result(mock_written_backup) + with patch( + "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" + ) as mock_create_backup: + mock_create_backup.return_value = (MagicMock(), fut) + yield mock_create_backup + + @pytest.fixture(name="mock_backup_generation") def mock_backup_generation_fixture( hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 9df93ee9c46..518005e8470 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1,8 +1,6 @@ """Tests for the Backup integration.""" -from asyncio import Future from collections.abc import Generator -from datetime import datetime from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, call, patch @@ -17,7 +15,6 @@ from homeassistant.components.backup.manager import ( CreateBackupEvent, CreateBackupState, NewBackup, - WrittenBackup, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -89,22 +86,6 @@ def mock_delay_save() -> Generator[None]: yield -@pytest.fixture(name="create_backup") -def mock_create_backup() -> Generator[AsyncMock]: - """Mock manager create backup.""" - mock_written_backup = MagicMock(spec_set=WrittenBackup) - mock_written_backup.backup.backup_id = "abc123" - mock_written_backup.open_stream = AsyncMock() - mock_written_backup.release_stream = AsyncMock() - fut = Future() - fut.set_result(mock_written_backup) - with patch( - "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" - ) as mock_create_backup: - mock_create_backup.return_value = (MagicMock(), fut) - yield mock_create_backup - - @pytest.fixture(name="delete_backup") def mock_delete_backup() -> Generator[AsyncMock]: """Mock manager delete backup.""" @@ -798,12 +779,8 @@ async def test_agents_info( "password": "test-password", }, "retention": {"copies": 3, "days": 7}, - "last_attempted_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), - "last_completed_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), + "last_attempted_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "daily"}, }, }, @@ -838,12 +815,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": 7}, - "last_attempted_strategy_backup": datetime.fromisoformat( - "2024-10-27T04:45:00+01:00" - ), - "last_completed_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), + "last_attempted_strategy_backup": "2024-10-27T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "never"}, }, }, @@ -1205,12 +1178,8 @@ async def test_config_schedule_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": datetime.fromisoformat( - last_completed_strategy_backup - ), - "last_completed_strategy_backup": datetime.fromisoformat( - last_completed_strategy_backup - ), + "last_attempted_strategy_backup": last_completed_strategy_backup, + "last_completed_strategy_backup": last_completed_strategy_backup, "schedule": {"state": "daily"}, }, } @@ -1486,7 +1455,7 @@ async def test_config_retention_copies_logic( }, "retention": {"copies": None, "days": None}, "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "last_completed_strategy_backup": last_backup_time, "schedule": {"state": "daily"}, }, } @@ -1699,7 +1668,7 @@ async def test_config_retention_days_logic( }, "retention": {"copies": None, "days": None}, "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "last_completed_strategy_backup": last_backup_time, "schedule": {"state": "never"}, }, } From 0726809228789d3b1846f080dd0e10dd747ca60c Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 17:00:11 +0100 Subject: [PATCH 0567/1198] Bump velbusaio to 2024.12.1 (#133056) --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 5725a10b6f6..600370f87d9 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.12.0"], + "requirements": ["velbus-aio==2024.12.1"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index fb873805873..ee253d174df 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2939,7 +2939,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.0 +velbus-aio==2024.12.1 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 83e7c89dd8b..65290d4b308 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2349,7 +2349,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.0 +velbus-aio==2024.12.1 # homeassistant.components.venstar venstarcolortouch==0.19 From e7a43cfe090c0ccce30342c2479c6d81f5f91541 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 18:13:24 +0100 Subject: [PATCH 0568/1198] Migrate deconz light tests to use Kelvin (#133002) --- tests/components/deconz/test_light.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 15135a333ce..9ac15d4867b 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -11,7 +11,7 @@ from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -391,7 +391,7 @@ async def test_light_state_change( "call": { ATTR_ENTITY_ID: "light.hue_go", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -804,7 +804,7 @@ async def test_groups( "call": { ATTR_ENTITY_ID: "light.group", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -1079,7 +1079,7 @@ async def test_non_color_light_reports_color( hass.states.get("light.group").attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP ) - assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP] == 250 + assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color @@ -1099,7 +1099,7 @@ async def test_non_color_light_reports_color( group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) - assert group.attributes.get(ATTR_COLOR_TEMP) is None + assert group.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None @pytest.mark.parametrize( From 39e4719a43051d364d13195e49452c1fcf5612a5 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Thu, 12 Dec 2024 18:47:37 +0100 Subject: [PATCH 0569/1198] Fix backup strategy retention filter (#133060) * Fix lint * Update tests * Fix backup strategy retention filter --- homeassistant/components/backup/config.py | 9 +- tests/components/backup/test_websocket.py | 307 +++++++++++++++++++--- 2 files changed, 275 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 32dfa95509c..26ce691a4cc 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -423,7 +423,14 @@ async def _delete_filtered_backups( get_agent_errors, ) - LOGGER.debug("Total backups: %s", backups) + # only delete backups that are created by the backup strategy + backups = { + backup_id: backup + for backup_id, backup in backups.items() + if backup.with_strategy_settings + } + + LOGGER.debug("Total strategy backups: %s", backups) filtered_backups = backup_filter(backups) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 518005e8470..4a94689c19e 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -14,6 +14,7 @@ from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.components.backup.manager import ( CreateBackupEvent, CreateBackupState, + ManagerBackup, NewBackup, ) from homeassistant.core import HomeAssistant @@ -42,7 +43,7 @@ BACKUP_CALL = call( on_progress=ANY, ) -DEFAULT_STORAGE_DATA = { +DEFAULT_STORAGE_DATA: dict[str, Any] = { "backups": {}, "config": { "create_backup": { @@ -1248,9 +1249,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1270,9 +1288,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1292,10 +1327,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1315,10 +1371,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1338,9 +1415,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {"test-agent": BackupAgentError("Boom!")}, {}, @@ -1360,9 +1454,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {"test-agent": BackupAgentError("Boom!")}, @@ -1382,10 +1493,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1405,7 +1537,16 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1518,8 +1659,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1538,8 +1692,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1558,9 +1725,26 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1579,8 +1763,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {"test-agent": BackupAgentError("Boom!")}, {}, @@ -1599,8 +1796,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {"test-agent": BackupAgentError("Boom!")}, @@ -1619,9 +1829,26 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, From a6b785d937157009e339f6c6fc03dcac2e7891dc Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 12 Dec 2024 19:11:07 +0100 Subject: [PATCH 0570/1198] Update frontend to 20241127.8 (#133066) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index bfc08c6e11e..1f9988dff38 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.7"] + "requirements": ["home-assistant-frontend==20241127.8"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b2dd0cf251c..65a6890024f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index ee253d174df..e866ba901cc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1128,7 +1128,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 65290d4b308..b93673f45bd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -954,7 +954,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 From 12051787027352e13ea7a2835d590a88230bc31f Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Thu, 12 Dec 2024 12:32:00 -0600 Subject: [PATCH 0571/1198] Add HEOS quality scale (#132311) --- .../components/heos/quality_scale.yaml | 114 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 114 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/heos/quality_scale.yaml diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml new file mode 100644 index 00000000000..ed9939bf37c --- /dev/null +++ b/homeassistant/components/heos/quality_scale.yaml @@ -0,0 +1,114 @@ +rules: + # Bronze + action-setup: + status: todo + comment: Future enhancement to move custom actions for login/out into an options flow. + appropriate-polling: + status: done + comment: Integration is a local push integration + brands: done + common-modules: todo + config-flow-test-coverage: + status: todo + comment: + 1. The config flow is 100% covered, however some tests need to let HA create the flow + handler instead of doing it manually in the test. + 2. We should also make sure every test ends in either CREATE_ENTRY or ABORT so we test + that the flow is able to recover from an error. + config-flow: + status: todo + comment: | + 1. YAML import to be removed after core team meeting discussion on approach. + 2. Consider enhnacement to automatically select a host when multiple are discovered. + 3. Move hass.data[heos_discovered_hosts] into hass.data[heos] + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: todo + comment: | + Simplify by using async_on_remove instead of keeping track of listeners to remove + later in async_will_remove_from_hass. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: done + unique-config-entry: + status: todo + comment: | + The HEOS integration only supports a single config entry, but needs to be migrated to use + the `single_config_entry` flag. HEOS devices interconnect to each other, so connecting to + a single node yields access to all the devices setup with HEOS on your network. The HEOS API + documentation does not recommend connecting to multiple nodes which would provide no bennefit. + # Silver + action-exceptions: + status: todo + comment: Actions currently only log and instead should raise exceptions. + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: | + The integration doesn't provide any additional configuration parameters. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: todo + comment: | + The integration currently spams the logs until reconnected + parallel-updates: + status: todo + comment: Needs to be set to 0. The underlying library handles parallel updates. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require re-authentication. + test-coverage: + status: todo + comment: | + 1. Integration has >95% coverage, however tests need to be updated to not patch internals. + 2. test_async_setup_entry_connect_failure and test_async_setup_entry_player_failure -> Instead of + calling async_setup_entry directly, rather use hass.config_entries.async_setup and then assert + the config_entry.state is what we expect. + 3. test_unload_entry -> We should use hass.config_entries.async_unload and assert the entry state + 4. Recommend using snapshot in test_state_attributes. + 5. Find a way to avoid using internal dispatcher in test_updates_from_connection_event. + # Gold + devices: + status: todo + comment: | + The integraiton creates devices, but needs to stringify the id for the device identifier and + also migrate the device. + diagnostics: todo + discovery-update-info: + status: todo + comment: Explore if this is possible. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: + status: todo + comment: Has some troublehsooting setps, but needs to be improved + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: done + comment: The integration does not use websession + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 49f05b78a16..784573f5f8f 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -481,7 +481,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "hddtemp", "hdmi_cec", "heatmiser", - "heos", "here_travel_time", "hikvision", "hikvisioncam", From b8ce1b010f1d144fcea88f777eb6f93055e5e2ec Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 19:39:24 +0100 Subject: [PATCH 0572/1198] Update demetriek to v1.1.0 (#133064) --- homeassistant/components/lametric/manifest.json | 2 +- homeassistant/components/lametric/number.py | 4 +++- homeassistant/components/lametric/switch.py | 9 +++++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/lametric/snapshots/test_diagnostics.ambr | 1 + 6 files changed, 14 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lametric/manifest.json b/homeassistant/components/lametric/manifest.json index b930192caf0..5a066d015f2 100644 --- a/homeassistant/components/lametric/manifest.json +++ b/homeassistant/components/lametric/manifest.json @@ -13,7 +13,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["demetriek"], - "requirements": ["demetriek==1.0.0"], + "requirements": ["demetriek==1.1.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:LaMetric:1" diff --git a/homeassistant/components/lametric/number.py b/homeassistant/components/lametric/number.py index cea9debb04b..1025e04a4a8 100644 --- a/homeassistant/components/lametric/number.py +++ b/homeassistant/components/lametric/number.py @@ -25,6 +25,7 @@ class LaMetricNumberEntityDescription(NumberEntityDescription): """Class describing LaMetric number entities.""" value_fn: Callable[[Device], int | None] + has_fn: Callable[[Device], bool] = lambda device: True set_value_fn: Callable[[LaMetricDevice, float], Awaitable[Any]] @@ -49,7 +50,8 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=100, - value_fn=lambda device: device.audio.volume, + has_fn=lambda device: bool(device.audio), + value_fn=lambda device: device.audio.volume if device.audio else 0, set_value_fn=lambda api, volume: api.audio(volume=int(volume)), ), ] diff --git a/homeassistant/components/lametric/switch.py b/homeassistant/components/lametric/switch.py index 9689bb7b802..3aabfaf17e1 100644 --- a/homeassistant/components/lametric/switch.py +++ b/homeassistant/components/lametric/switch.py @@ -25,6 +25,7 @@ class LaMetricSwitchEntityDescription(SwitchEntityDescription): """Class describing LaMetric switch entities.""" available_fn: Callable[[Device], bool] = lambda device: True + has_fn: Callable[[Device], bool] = lambda device: True is_on_fn: Callable[[Device], bool] set_fn: Callable[[LaMetricDevice, bool], Awaitable[Any]] @@ -34,8 +35,11 @@ SWITCHES = [ key="bluetooth", translation_key="bluetooth", entity_category=EntityCategory.CONFIG, - available_fn=lambda device: device.bluetooth.available, - is_on_fn=lambda device: device.bluetooth.active, + available_fn=lambda device: bool( + device.bluetooth and device.bluetooth.available + ), + has_fn=lambda device: bool(device.bluetooth), + is_on_fn=lambda device: bool(device.bluetooth and device.bluetooth.active), set_fn=lambda api, active: api.bluetooth(active=active), ), ] @@ -54,6 +58,7 @@ async def async_setup_entry( description=description, ) for description in SWITCHES + if description.has_fn(coordinator.data) ) diff --git a/requirements_all.txt b/requirements_all.txt index e866ba901cc..c361ffec5a8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -747,7 +747,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==1.0.0 +demetriek==1.1.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b93673f45bd..1c918cb2f1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -637,7 +637,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==1.0.0 +demetriek==1.1.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/tests/components/lametric/snapshots/test_diagnostics.ambr b/tests/components/lametric/snapshots/test_diagnostics.ambr index 15b35576ad4..7517cfe035e 100644 --- a/tests/components/lametric/snapshots/test_diagnostics.ambr +++ b/tests/components/lametric/snapshots/test_diagnostics.ambr @@ -26,6 +26,7 @@ 'brightness_mode': 'auto', 'display_type': 'mixed', 'height': 8, + 'on': None, 'screensaver': dict({ 'enabled': False, }), From 3c7502dd5da287992375056c27ef6eacd01b2523 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 19:46:35 +0100 Subject: [PATCH 0573/1198] Explicitly pass config entry to coordinator in Tailwind (#133065) --- homeassistant/components/tailwind/__init__.py | 3 +-- homeassistant/components/tailwind/binary_sensor.py | 2 +- homeassistant/components/tailwind/button.py | 2 +- homeassistant/components/tailwind/coordinator.py | 5 ++++- homeassistant/components/tailwind/cover.py | 2 +- homeassistant/components/tailwind/diagnostics.py | 2 +- homeassistant/components/tailwind/number.py | 2 +- homeassistant/components/tailwind/typing.py | 7 ------- 8 files changed, 10 insertions(+), 15 deletions(-) delete mode 100644 homeassistant/components/tailwind/typing.py diff --git a/homeassistant/components/tailwind/__init__.py b/homeassistant/components/tailwind/__init__.py index 6f1a234e94a..c48f5344763 100644 --- a/homeassistant/components/tailwind/__init__.py +++ b/homeassistant/components/tailwind/__init__.py @@ -8,8 +8,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from .const import DOMAIN -from .coordinator import TailwindDataUpdateCoordinator -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry, TailwindDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.COVER, Platform.NUMBER] diff --git a/homeassistant/components/tailwind/binary_sensor.py b/homeassistant/components/tailwind/binary_sensor.py index 0ce0b4bd964..d2f8e1e2ced 100644 --- a/homeassistant/components/tailwind/binary_sensor.py +++ b/homeassistant/components/tailwind/binary_sensor.py @@ -16,8 +16,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/tailwind/button.py b/homeassistant/components/tailwind/button.py index 2a675bbfdf7..edff3434866 100644 --- a/homeassistant/components/tailwind/button.py +++ b/homeassistant/components/tailwind/button.py @@ -19,8 +19,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/coordinator.py b/homeassistant/components/tailwind/coordinator.py index 4d1b4af74c9..770751ccc3b 100644 --- a/homeassistant/components/tailwind/coordinator.py +++ b/homeassistant/components/tailwind/coordinator.py @@ -18,11 +18,13 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER +type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] + class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]): """Class to manage fetching Tailwind data.""" - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: TailwindConfigEntry) -> None: """Initialize the coordinator.""" self.tailwind = Tailwind( host=entry.data[CONF_HOST], @@ -32,6 +34,7 @@ class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]) super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=timedelta(seconds=5), ) diff --git a/homeassistant/components/tailwind/cover.py b/homeassistant/components/tailwind/cover.py index 116fb4a9e6c..8ea1c7d4f6d 100644 --- a/homeassistant/components/tailwind/cover.py +++ b/homeassistant/components/tailwind/cover.py @@ -23,8 +23,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, LOGGER +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry async def async_setup_entry( diff --git a/homeassistant/components/tailwind/diagnostics.py b/homeassistant/components/tailwind/diagnostics.py index 5d681356647..b7a51b56775 100644 --- a/homeassistant/components/tailwind/diagnostics.py +++ b/homeassistant/components/tailwind/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry async def async_get_config_entry_diagnostics( diff --git a/homeassistant/components/tailwind/number.py b/homeassistant/components/tailwind/number.py index 0ff1f444280..b67df9a6a25 100644 --- a/homeassistant/components/tailwind/number.py +++ b/homeassistant/components/tailwind/number.py @@ -15,8 +15,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/typing.py b/homeassistant/components/tailwind/typing.py deleted file mode 100644 index 514a94a8e78..00000000000 --- a/homeassistant/components/tailwind/typing.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Typings for the Tailwind integration.""" - -from homeassistant.config_entries import ConfigEntry - -from .coordinator import TailwindDataUpdateCoordinator - -type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] From 40c3dd2095167c48c1ffd4dbcc16796d21393af5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:08:07 +0100 Subject: [PATCH 0574/1198] Migrate group light tests to use Kelvin (#133010) --- tests/components/group/test_light.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/components/group/test_light.py b/tests/components/group/test_light.py index af8556b5450..91604d663b3 100644 --- a/tests/components/group/test_light.py +++ b/tests/components/group/test_light.py @@ -12,7 +12,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, @@ -792,19 +791,19 @@ async def test_emulated_color_temp_group(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP: 200}, + {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP_KELVIN: 5000}, blocking=True, ) await hass.async_block_till_done() state = hass.states.get("light.test1") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test2") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test3") From ce70cb9e3370fbcba1ed79c7183ae4e279457477 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 20:13:41 +0100 Subject: [PATCH 0575/1198] Use ConfigEntry runtime_data in easyEnergy (#133053) --- .../components/easyenergy/__init__.py | 18 +++---- .../components/easyenergy/coordinator.py | 7 ++- .../components/easyenergy/diagnostics.py | 50 +++++++++---------- homeassistant/components/easyenergy/sensor.py | 13 +++-- .../components/easyenergy/services.py | 9 ++-- tests/components/easyenergy/test_init.py | 2 - 6 files changed, 49 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/easyenergy/__init__.py b/homeassistant/components/easyenergy/__init__.py index e520631158a..0548431f09d 100644 --- a/homeassistant/components/easyenergy/__init__.py +++ b/homeassistant/components/easyenergy/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,10 +9,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -25,25 +24,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Set up easyEnergy from a config entry.""" - coordinator = EasyEnergyDataUpdateCoordinator(hass) + coordinator = EasyEnergyDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: await coordinator.easyenergy.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Unload easyEnergy config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/easyenergy/coordinator.py b/homeassistant/components/easyenergy/coordinator.py index 8c1c593af93..e36bdf188ee 100644 --- a/homeassistant/components/easyenergy/coordinator.py +++ b/homeassistant/components/easyenergy/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EasyEnergyConfigEntry = ConfigEntry[EasyEnergyDataUpdateCoordinator] + class EasyEnergyData(NamedTuple): """Class for defining data in dict.""" @@ -33,15 +35,16 @@ class EasyEnergyData(NamedTuple): class EasyEnergyDataUpdateCoordinator(DataUpdateCoordinator[EasyEnergyData]): """Class to manage fetching easyEnergy data from single endpoint.""" - config_entry: ConfigEntry + config_entry: EasyEnergyConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> None: """Initialize global easyEnergy data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.easyenergy = EasyEnergy(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/easyenergy/diagnostics.py b/homeassistant/components/easyenergy/diagnostics.py index d6912e1c926..64f30ba61fd 100644 --- a/homeassistant/components/easyenergy/diagnostics.py +++ b/homeassistant/components/easyenergy/diagnostics.py @@ -5,12 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EasyEnergyDataUpdateCoordinator -from .const import DOMAIN -from .coordinator import EasyEnergyData +from .coordinator import EasyEnergyConfigEntry, EasyEnergyData def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: @@ -32,41 +29,42 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EasyEnergyConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today return { "entry": { "title": entry.title, }, "energy_usage": { - "current_hour_price": coordinator.data.energy_today.current_usage_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_usage_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_usage_price, - "max_price": coordinator.data.energy_today.extreme_usage_prices[1], - "min_price": coordinator.data.energy_today.extreme_usage_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_usage_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_usage_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_usage, + "average_price": energy_today.average_usage_price, + "max_price": energy_today.extreme_usage_prices[1], + "min_price": energy_today.extreme_usage_prices[0], + "highest_price_time": energy_today.highest_usage_price_time, + "lowest_price_time": energy_today.lowest_usage_price_time, + "percentage_of_max": energy_today.pct_of_max_usage, }, "energy_return": { - "current_hour_price": coordinator.data.energy_today.current_return_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1), "return" + "current_hour_price": energy_today.current_return_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1), "return" ), - "average_price": coordinator.data.energy_today.average_return_price, - "max_price": coordinator.data.energy_today.extreme_return_prices[1], - "min_price": coordinator.data.energy_today.extreme_return_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_return_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_return_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_return, + "average_price": energy_today.average_return_price, + "max_price": energy_today.extreme_return_prices[1], + "min_price": energy_today.extreme_return_prices[0], + "highest_price_time": energy_today.highest_return_price_time, + "lowest_price_time": energy_today.lowest_return_price_time, + "percentage_of_max": energy_today.pct_of_max_return, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/easyenergy/sensor.py b/homeassistant/components/easyenergy/sensor.py index 65fe2558d46..6976a38da49 100644 --- a/homeassistant/components/easyenergy/sensor.py +++ b/homeassistant/components/easyenergy/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -27,7 +26,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EasyEnergyData, EasyEnergyDataUpdateCoordinator +from .coordinator import ( + EasyEnergyConfigEntry, + EasyEnergyData, + EasyEnergyDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) @@ -208,10 +211,12 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EasyEnergyConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up easyEnergy sensors based on a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EasyEnergySensorEntity(coordinator=coordinator, description=description) for description in SENSORS diff --git a/homeassistant/components/easyenergy/services.py b/homeassistant/components/easyenergy/services.py index 5b80cfafd08..cb5424496ac 100644 --- a/homeassistant/components/easyenergy/services.py +++ b/homeassistant/components/easyenergy/services.py @@ -10,7 +10,7 @@ from typing import Final from easyenergy import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -23,7 +23,7 @@ from homeassistant.helpers import selector from homeassistant.util import dt as dt_util from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" @@ -91,7 +91,7 @@ def __get_coordinator( ) -> EasyEnergyDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EasyEnergyConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ServiceValidationError( @@ -110,8 +110,7 @@ def __get_coordinator( }, ) - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry_id] - return coordinator + return entry.runtime_data async def __get_prices( diff --git a/tests/components/easyenergy/test_init.py b/tests/components/easyenergy/test_init.py index 74293049fd1..c3c917bc9ed 100644 --- a/tests/components/easyenergy/test_init.py +++ b/tests/components/easyenergy/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock, patch from easyenergy import EasyEnergyConnectionError -from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -24,7 +23,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From 32c1b519ad1940659eabd5e78fde831fb3243946 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:14:56 +0100 Subject: [PATCH 0576/1198] Improve auth generic typing (#133061) --- homeassistant/auth/__init__.py | 2 +- homeassistant/auth/mfa_modules/__init__.py | 18 ++++++++++++++---- homeassistant/auth/mfa_modules/notify.py | 6 ++---- homeassistant/auth/mfa_modules/totp.py | 5 ++--- homeassistant/auth/providers/__init__.py | 14 ++++++++++---- homeassistant/auth/providers/command_line.py | 14 ++++++++------ homeassistant/auth/providers/homeassistant.py | 6 +++--- .../auth/providers/insecure_example.py | 9 +++++---- .../auth/providers/trusted_networks.py | 10 +++++----- 9 files changed, 50 insertions(+), 34 deletions(-) diff --git a/homeassistant/auth/__init__.py b/homeassistant/auth/__init__.py index 21a4b6113d0..afe3b2d7aa3 100644 --- a/homeassistant/auth/__init__.py +++ b/homeassistant/auth/__init__.py @@ -115,7 +115,7 @@ class AuthManagerFlowManager( *, context: AuthFlowContext | None = None, data: dict[str, Any] | None = None, - ) -> LoginFlow: + ) -> LoginFlow[Any]: """Create a login flow.""" auth_provider = self.auth_manager.get_auth_provider(*handler_key) if not auth_provider: diff --git a/homeassistant/auth/mfa_modules/__init__.py b/homeassistant/auth/mfa_modules/__init__.py index d57a274c7ff..8a6430d770a 100644 --- a/homeassistant/auth/mfa_modules/__init__.py +++ b/homeassistant/auth/mfa_modules/__init__.py @@ -4,8 +4,9 @@ from __future__ import annotations import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error @@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed") _LOGGER = logging.getLogger(__name__) +_MultiFactorAuthModuleT = TypeVar( + "_MultiFactorAuthModuleT", + bound="MultiFactorAuthModule", + default="MultiFactorAuthModule", +) + class MultiFactorAuthModule: """Multi-factor Auth Module of validation function.""" @@ -71,7 +78,7 @@ class MultiFactorAuthModule: """Return a voluptuous schema to define mfa auth module's input.""" raise NotImplementedError - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -95,11 +102,14 @@ class MultiFactorAuthModule: raise NotImplementedError -class SetupFlow(data_entry_flow.FlowHandler): +class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]): """Handler for the setup flow.""" def __init__( - self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str + self, + auth_module: _MultiFactorAuthModuleT, + setup_schema: vol.Schema, + user_id: str, ) -> None: """Initialize the setup flow.""" self._auth_module = auth_module diff --git a/homeassistant/auth/mfa_modules/notify.py b/homeassistant/auth/mfa_modules/notify.py index d2010dc2c9d..b60a3012aac 100644 --- a/homeassistant/auth/mfa_modules/notify.py +++ b/homeassistant/auth/mfa_modules/notify.py @@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule): return sorted(unordered_services) - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> NotifySetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule): await self.hass.services.async_call("notify", notify_service, data) -class NotifySetupFlow(SetupFlow): +class NotifySetupFlow(SetupFlow[NotifyAuthModule]): """Handler for the setup flow.""" def __init__( @@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow): ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user_id) - # to fix typing complaint - self._auth_module: NotifyAuthModule = auth_module self._available_notify_services = available_notify_services self._secret: str | None = None self._count: int | None = None diff --git a/homeassistant/auth/mfa_modules/totp.py b/homeassistant/auth/mfa_modules/totp.py index 3306f76217f..625b273f39a 100644 --- a/homeassistant/auth/mfa_modules/totp.py +++ b/homeassistant/auth/mfa_modules/totp.py @@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule): self._users[user_id] = ota_secret # type: ignore[index] return ota_secret - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> TotpSetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -174,10 +174,9 @@ class TotpAuthModule(MultiFactorAuthModule): return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) -class TotpSetupFlow(SetupFlow): +class TotpSetupFlow(SetupFlow[TotpAuthModule]): """Handler for the setup flow.""" - _auth_module: TotpAuthModule _ota_secret: str _url: str _image: str diff --git a/homeassistant/auth/providers/__init__.py b/homeassistant/auth/providers/__init__.py index 34278c47df7..02f99e7bd71 100644 --- a/homeassistant/auth/providers/__init__.py +++ b/homeassistant/auth/providers/__init__.py @@ -5,8 +5,9 @@ from __future__ import annotations from collections.abc import Mapping import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error @@ -46,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider") + class AuthProvider: """Provider of user authentication.""" @@ -105,7 +108,7 @@ class AuthProvider: # Implement by extending class - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]: """Return the data flow for logging in with auth provider. Auth provider should extend LoginFlow and return an instance. @@ -192,12 +195,15 @@ async def load_auth_provider_module( return module -class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]): +class LoginFlow( + FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]], + Generic[_AuthProviderT], +): """Handler for the login flow.""" _flow_result = AuthFlowResult - def __init__(self, auth_provider: AuthProvider) -> None: + def __init__(self, auth_provider: _AuthProviderT) -> None: """Initialize the login flow.""" self._auth_provider = auth_provider self._auth_module_id: str | None = None diff --git a/homeassistant/auth/providers/command_line.py b/homeassistant/auth/providers/command_line.py index 12447bc8c18..74630d925e1 100644 --- a/homeassistant/auth/providers/command_line.py +++ b/homeassistant/auth/providers/command_line.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Mapping import logging import os -from typing import Any, cast +from typing import Any import voluptuous as vol @@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider): super().__init__(*args, **kwargs) self._user_meta: dict[str, dict[str, Any]] = {} - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> CommandLineLoginFlow: """Return a flow to login.""" return CommandLineLoginFlow(self) @@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider): ) -class CommandLineLoginFlow(LoginFlow): +class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow): if user_input is not None: user_input["username"] = user_input["username"].strip() try: - await cast( - CommandLineAuthProvider, self._auth_provider - ).async_validate_login(user_input["username"], user_input["password"]) + await self._auth_provider.async_validate_login( + user_input["username"], user_input["password"] + ) except InvalidAuthError: errors["base"] = "invalid_auth" diff --git a/homeassistant/auth/providers/homeassistant.py b/homeassistant/auth/providers/homeassistant.py index e5dded74762..522e5d77a29 100644 --- a/homeassistant/auth/providers/homeassistant.py +++ b/homeassistant/auth/providers/homeassistant.py @@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider): await data.async_load() self.data = data - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow: """Return a flow to login.""" return HassLoginFlow(self) @@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider): pass -class HassLoginFlow(LoginFlow): +class HassLoginFlow(LoginFlow[HassAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow): if user_input is not None: try: - await cast(HassAuthProvider, self._auth_provider).async_validate_login( + await self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuth: diff --git a/homeassistant/auth/providers/insecure_example.py b/homeassistant/auth/providers/insecure_example.py index a7dced851a3..a92f5b55848 100644 --- a/homeassistant/auth/providers/insecure_example.py +++ b/homeassistant/auth/providers/insecure_example.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Mapping import hmac -from typing import cast import voluptuous as vol @@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError): class ExampleAuthProvider(AuthProvider): """Example auth provider based on hardcoded usernames and passwords.""" - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> ExampleLoginFlow: """Return a flow to login.""" return ExampleLoginFlow(self) @@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider): return UserMeta(name=name, is_active=True) -class ExampleLoginFlow(LoginFlow): +class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow): if user_input is not None: try: - cast(ExampleAuthProvider, self._auth_provider).async_validate_login( + self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuthError: diff --git a/homeassistant/auth/providers/trusted_networks.py b/homeassistant/auth/providers/trusted_networks.py index f32c35d4bd5..799fd4d2e16 100644 --- a/homeassistant/auth/providers/trusted_networks.py +++ b/homeassistant/auth/providers/trusted_networks.py @@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider): """Trusted Networks auth provider does not support MFA.""" return False - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> TrustedNetworksLoginFlow: """Return a flow to login.""" assert context is not None ip_addr = cast(IPAddress, context.get("ip_address")) @@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider): self.async_validate_access(ip_address(remote_ip)) -class TrustedNetworksLoginFlow(LoginFlow): +class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]): """Handler for the login flow.""" def __init__( @@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow): ) -> AuthFlowResult: """Handle the step of the form.""" try: - cast( - TrustedNetworksAuthProvider, self._auth_provider - ).async_validate_access(self._ip_address) + self._auth_provider.async_validate_access(self._ip_address) except InvalidAuthError: return self.async_abort(reason="not_allowed") From ad15786115673c5b3fe40ea2f5d61b4b896f433e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 20:16:18 +0100 Subject: [PATCH 0577/1198] Add support for subentries to config entries (#117355) * Add support for subentries to config entries * Improve error handling and test coverage * Include subentry_id in subentry containers * Auto-generate subentry_id and add optional unique_id * Tweak * Update tests * Fix stale docstring * Address review comments * Typing tweaks * Add methods to ConfigEntries to add and remove subentry * Improve ConfigSubentryData typed dict * Update test snapshots * Adjust tests * Fix unique_id logic * Allow multiple subentries with None unique_id * Add number of subentries to config entry JSON representation * Add subentry translation support * Allow integrations to implement multiple subentry flows * Update translations schema * Adjust exception text * Change subentry flow init step to user * Prevent creating a subentry with colliding unique_id * Update tests * Address review comments * Remove duplicaetd unique_id collision check * Remove change from the future * Improve test coverage * Add default value for unique_id --- .../components/config/config_entries.py | 126 ++++ homeassistant/config_entries.py | 315 ++++++++- homeassistant/helpers/data_entry_flow.py | 4 +- script/hassfest/translations.py | 9 + tests/common.py | 2 + .../aemet/snapshots/test_diagnostics.ambr | 2 + .../airly/snapshots/test_diagnostics.ambr | 2 + .../airnow/snapshots/test_diagnostics.ambr | 2 + .../airvisual/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../airzone/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../axis/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../blink/snapshots/test_diagnostics.ambr | 2 + .../braviatv/snapshots/test_diagnostics.ambr | 2 + .../co2signal/snapshots/test_diagnostics.ambr | 2 + .../coinbase/snapshots/test_diagnostics.ambr | 2 + .../comelit/snapshots/test_diagnostics.ambr | 4 + .../components/config/test_config_entries.py | 469 +++++++++++++ .../deconz/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../ecovacs/snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_config_flow.ambr | 4 + .../snapshots/test_diagnostics.ambr | 6 + .../esphome/snapshots/test_diagnostics.ambr | 2 + tests/components/esphome/test_diagnostics.py | 1 + .../forecast_solar/snapshots/test_init.ambr | 2 + .../fritz/snapshots/test_diagnostics.ambr | 2 + .../fronius/snapshots/test_diagnostics.ambr | 2 + .../fyta/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_config_flow.ambr | 8 + .../gios/snapshots/test_diagnostics.ambr | 2 + .../goodwe/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + tests/components/guardian/test_diagnostics.py | 1 + .../snapshots/test_config_flow.ambr | 16 + .../snapshots/test_diagnostics.ambr | 2 + .../imgw_pib/snapshots/test_diagnostics.ambr | 2 + .../iqvia/snapshots/test_diagnostics.ambr | 2 + .../kostal_plenticore/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../madvr/snapshots/test_diagnostics.ambr | 2 + .../melcloud/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../netatmo/snapshots/test_diagnostics.ambr | 2 + .../nextdns/snapshots/test_diagnostics.ambr | 2 + .../nice_go/snapshots/test_diagnostics.ambr | 2 + tests/components/notion/test_diagnostics.py | 1 + .../onvif/snapshots/test_diagnostics.ambr | 2 + tests/components/openuv/test_diagnostics.py | 1 + .../p1_monitor/snapshots/test_init.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../components/philips_js/test_config_flow.py | 1 + .../pi_hole/snapshots/test_diagnostics.ambr | 2 + .../proximity/snapshots/test_diagnostics.ambr | 2 + tests/components/ps4/test_init.py | 1 + .../components/purpleair/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_diagnostics.ambr | 4 + .../recollect_waste/test_diagnostics.py | 1 + .../ridwell/snapshots/test_diagnostics.ambr | 2 + .../components/samsungtv/test_diagnostics.py | 3 + .../snapshots/test_diagnostics.ambr | 2 + .../components/simplisafe/test_diagnostics.py | 1 + .../solarlog/snapshots/test_diagnostics.ambr | 2 + tests/components/subaru/test_config_flow.py | 2 + .../switcher_kis/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../tractive/snapshots/test_diagnostics.ambr | 2 + .../tuya/snapshots/test_config_flow.ambr | 8 + .../twinkly/snapshots/test_diagnostics.ambr | 2 + .../unifi/snapshots/test_diagnostics.ambr | 2 + .../uptime/snapshots/test_config_flow.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../v2c/snapshots/test_diagnostics.ambr | 2 + .../vicare/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../watttime/snapshots/test_diagnostics.ambr | 2 + .../webmin/snapshots/test_diagnostics.ambr | 2 + tests/components/webostv/test_diagnostics.py | 1 + .../whirlpool/snapshots/test_diagnostics.ambr | 2 + .../whois/snapshots/test_config_flow.ambr | 20 + .../workday/snapshots/test_diagnostics.ambr | 2 + .../wyoming/snapshots/test_config_flow.ambr | 12 + .../zha/snapshots/test_diagnostics.ambr | 2 + tests/snapshots/test_config_entries.ambr | 2 + tests/test_config_entries.py | 637 +++++++++++++++++- 95 files changed, 1771 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index da50f7e93a1..5794819995d 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options)) hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options)) + hass.http.register_view( + SubentryManagerFlowIndexView(hass.config_entries.subentries) + ) + hass.http.register_view( + SubentryManagerFlowResourceView(hass.config_entries.subentries) + ) + websocket_api.async_register_command(hass, config_entries_get) websocket_api.async_register_command(hass, config_entry_disable) websocket_api.async_register_command(hass, config_entry_get_single) @@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool: websocket_api.async_register_command(hass, config_entries_progress) websocket_api.async_register_command(hass, ignore_config_flow) + websocket_api.async_register_command(hass, config_subentry_delete) + websocket_api.async_register_command(hass, config_subentry_list) + return True @@ -285,6 +295,63 @@ class OptionManagerFlowResourceView( return await super().post(request, flow_id) +class SubentryManagerFlowIndexView( + FlowManagerIndexView[config_entries.ConfigSubentryFlowManager] +): + """View to create subentry flows.""" + + url = "/api/config/config_entries/subentries/flow" + name = "api:config:config_entries:subentries:flow" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + @RequestDataValidator( + vol.Schema( + { + vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)), + vol.Optional("show_advanced_options", default=False): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, + ) + ) + async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + """Handle a POST request. + + handler in request is [entry_id, subentry_type]. + """ + return await super()._post_impl(request, data) + + def get_context(self, data: dict[str, Any]) -> dict[str, Any]: + """Return context.""" + context = super().get_context(data) + context["source"] = config_entries.SOURCE_USER + return context + + +class SubentryManagerFlowResourceView( + FlowManagerResourceView[config_entries.ConfigSubentryFlowManager] +): + """View to interact with the subentry flow manager.""" + + url = "/api/config/config_entries/subentries/flow/{flow_id}" + name = "api:config:config_entries:subentries:flow:resource" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def get(self, request: web.Request, /, flow_id: str) -> web.Response: + """Get the current state of a data_entry_flow.""" + return await super().get(request, flow_id) + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def post(self, request: web.Request, flow_id: str) -> web.Response: + """Handle a POST request.""" + return await super().post(request, flow_id) + + @websocket_api.require_admin @websocket_api.websocket_command({"type": "config_entries/flow/progress"}) def config_entries_progress( @@ -588,3 +655,62 @@ async def _async_matching_config_entries_json_fragments( ) or (filter_is_not_helper and entry.domain not in integrations) ] + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/list", + "entry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_list( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List subentries of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + result = [ + { + "subentry_id": subentry.subentry_id, + "title": subentry.title, + "unique_id": subentry.unique_id, + } + for subentry_id, subentry in entry.subentries.items() + ] + connection.send_result(msg["id"], result) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/delete", + "entry_id": str, + "subentry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_delete( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Delete a subentry of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + try: + hass.config_entries.async_remove_subentry(entry, msg["subentry_id"]) + except config_entries.UnknownSubEntry: + connection.send_error( + msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found" + ) + return + + connection.send_result(msg["id"]) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index ade4cd855ca..d34828f5e46 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -15,6 +15,7 @@ from collections.abc import ( ) from contextvars import ContextVar from copy import deepcopy +from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum import functools @@ -22,7 +23,7 @@ from functools import cache import logging from random import randint from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Generic, Self, cast +from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, cast from async_interrupt import interrupt from propcache import cached_property @@ -128,7 +129,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 4 +STORAGE_VERSION_MINOR = 5 SAVE_DELAY = 1 @@ -256,6 +257,10 @@ class UnknownEntry(ConfigError): """Unknown entry specified.""" +class UnknownSubEntry(ConfigError): + """Unknown subentry specified.""" + + class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" @@ -300,6 +305,7 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): minor_version: int options: Mapping[str, Any] + subentries: Iterable[ConfigSubentryData] version: int @@ -313,6 +319,51 @@ def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> N ) +class ConfigSubentryData(TypedDict): + """Container for configuration subentry data. + + Returned by integrations, a subentry_id will be assigned automatically. + """ + + data: Mapping[str, Any] + title: str + unique_id: str | None + + +class ConfigSubentryDataWithId(ConfigSubentryData): + """Container for configuration subentry data. + + This type is used when loading existing subentries from storage. + """ + + subentry_id: str + + +class SubentryFlowResult(FlowResult[FlowContext, tuple[str, str]], total=False): + """Typed result dict for subentry flow.""" + + unique_id: str | None + + +@dataclass(frozen=True, kw_only=True) +class ConfigSubentry: + """Container for a configuration subentry.""" + + data: MappingProxyType[str, Any] + subentry_id: str = field(default_factory=ulid_util.ulid_now) + title: str + unique_id: str | None + + def as_dict(self) -> ConfigSubentryDataWithId: + """Return dictionary version of this subentry.""" + return { + "data": dict(self.data), + "subentry_id": self.subentry_id, + "title": self.title, + "unique_id": self.unique_id, + } + + class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -322,6 +373,7 @@ class ConfigEntry(Generic[_DataT]): data: MappingProxyType[str, Any] runtime_data: _DataT options: MappingProxyType[str, Any] + subentries: MappingProxyType[str, ConfigSubentry] unique_id: str | None state: ConfigEntryState reason: str | None @@ -337,6 +389,7 @@ class ConfigEntry(Generic[_DataT]): supports_remove_device: bool | None _supports_options: bool | None _supports_reconfigure: bool | None + _supported_subentries: tuple[str, ...] | None update_listeners: list[UpdateListenerType] _async_cancel_retry_setup: Callable[[], Any] | None _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None @@ -366,6 +419,7 @@ class ConfigEntry(Generic[_DataT]): pref_disable_polling: bool | None = None, source: str, state: ConfigEntryState = ConfigEntryState.NOT_LOADED, + subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None, title: str, unique_id: str | None, version: int, @@ -391,6 +445,24 @@ class ConfigEntry(Generic[_DataT]): # Entry options _setter(self, "options", MappingProxyType(options or {})) + # Subentries + subentries_data = subentries_data or () + subentries = {} + for subentry_data in subentries_data: + subentry_kwargs = {} + if "subentry_id" in subentry_data: + # If subentry_data has key "subentry_id", we're loading from storage + subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item] + subentry = ConfigSubentry( + data=MappingProxyType(subentry_data["data"]), + title=subentry_data["title"], + unique_id=subentry_data.get("unique_id"), + **subentry_kwargs, + ) + subentries[subentry.subentry_id] = subentry + + _setter(self, "subentries", MappingProxyType(subentries)) + # Entry system options if pref_disable_new_entities is None: pref_disable_new_entities = False @@ -427,6 +499,9 @@ class ConfigEntry(Generic[_DataT]): # Supports reconfigure _setter(self, "_supports_reconfigure", None) + # Supports subentries + _setter(self, "_supported_subentries", None) + # Listeners to call on update _setter(self, "update_listeners", []) @@ -499,6 +574,18 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False + @property + def supported_subentries(self) -> tuple[str, ...]: + """Return supported subentries.""" + if self._supported_subentries is None and ( + handler := HANDLERS.get(self.domain) + ): + # work out sub entries supported by the handler + object.__setattr__( + self, "_supported_subentries", handler.async_supported_subentries(self) + ) + return self._supported_subentries or () + def clear_state_cache(self) -> None: """Clear cached properties that are included in as_json_fragment.""" self.__dict__.pop("as_json_fragment", None) @@ -518,12 +605,14 @@ class ConfigEntry(Generic[_DataT]): "supports_remove_device": self.supports_remove_device or False, "supports_unload": self.supports_unload or False, "supports_reconfigure": self.supports_reconfigure, + "supported_subentries": self.supported_subentries, "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "disabled_by": self.disabled_by, "reason": self.reason, "error_reason_translation_key": self.error_reason_translation_key, "error_reason_translation_placeholders": self.error_reason_translation_placeholders, + "num_subentries": len(self.subentries), } return json_fragment(json_bytes(json_repr)) @@ -1018,6 +1107,7 @@ class ConfigEntry(Generic[_DataT]): "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "source": self.source, + "subentries": [subentry.as_dict() for subentry in self.subentries.values()], "title": self.title, "unique_id": self.unique_id, "version": self.version, @@ -1503,6 +1593,7 @@ class ConfigEntriesFlowManager( minor_version=result["minor_version"], options=result["options"], source=flow.context["source"], + subentries_data=result["subentries"], title=result["title"], unique_id=flow.unique_id, version=result["version"], @@ -1793,6 +1884,11 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): for entry in data["entries"]: entry["discovery_keys"] = {} + if old_minor_version < 5: + # Version 1.4 adds config subentries + for entry in data["entries"]: + entry.setdefault("subentries", entry.get("subentries", {})) + if old_major_version > 1: raise NotImplementedError return data @@ -1809,6 +1905,7 @@ class ConfigEntries: self.hass = hass self.flow = ConfigEntriesFlowManager(hass, self, hass_config) self.options = OptionsFlowManager(hass) + self.subentries = ConfigSubentryFlowManager(hass) self._hass_config = hass_config self._entries = ConfigEntryItems(hass) self._store = ConfigEntryStore(hass) @@ -2011,6 +2108,7 @@ class ConfigEntries: pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], source=entry["source"], + subentries_data=entry["subentries"], title=entry["title"], unique_id=entry["unique_id"], version=entry["version"], @@ -2170,6 +2268,44 @@ class ConfigEntries: If the entry was changed, the update_listeners are fired and this function returns True + If the entry was not changed, the update_listeners are + not fired and this function returns False + """ + return self._async_update_entry( + entry, + data=data, + discovery_keys=discovery_keys, + minor_version=minor_version, + options=options, + pref_disable_new_entities=pref_disable_new_entities, + pref_disable_polling=pref_disable_polling, + title=title, + unique_id=unique_id, + version=version, + ) + + @callback + def _async_update_entry( + self, + entry: ConfigEntry, + *, + data: Mapping[str, Any] | UndefinedType = UNDEFINED, + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] + | UndefinedType = UNDEFINED, + minor_version: int | UndefinedType = UNDEFINED, + options: Mapping[str, Any] | UndefinedType = UNDEFINED, + pref_disable_new_entities: bool | UndefinedType = UNDEFINED, + pref_disable_polling: bool | UndefinedType = UNDEFINED, + subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED, + title: str | UndefinedType = UNDEFINED, + unique_id: str | None | UndefinedType = UNDEFINED, + version: int | UndefinedType = UNDEFINED, + ) -> bool: + """Update a config entry. + + If the entry was changed, the update_listeners are + fired and this function returns True + If the entry was not changed, the update_listeners are not fired and this function returns False """ @@ -2232,6 +2368,11 @@ class ConfigEntries: changed = True _setter(entry, "options", MappingProxyType(options)) + if subentries is not UNDEFINED: + if entry.subentries != subentries: + changed = True + _setter(entry, "subentries", MappingProxyType(subentries)) + if not changed: return False @@ -2249,6 +2390,37 @@ class ConfigEntries: self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True + @callback + def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool: + """Add a subentry to a config entry.""" + self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id) + + return self._async_update_entry( + entry, + subentries=entry.subentries | {subentry.subentry_id: subentry}, + ) + + @callback + def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool: + """Remove a subentry from a config entry.""" + subentries = dict(entry.subentries) + try: + subentries.pop(subentry_id) + except KeyError as err: + raise UnknownSubEntry from err + + return self._async_update_entry(entry, subentries=subentries) + + def _raise_if_subentry_unique_id_exists( + self, entry: ConfigEntry, unique_id: str | None + ) -> None: + """Raise if a subentry with the same unique_id exists.""" + if unique_id is None: + return + for existing_subentry in entry.subentries.values(): + if existing_subentry.unique_id == unique_id: + raise data_entry_flow.AbortFlow("already_configured") + @callback def _async_dispatch( self, change_type: ConfigEntryChange, entry: ConfigEntry @@ -2585,6 +2757,20 @@ class ConfigFlow(ConfigEntryBaseFlow): """Return options flow support for this handler.""" return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow + @staticmethod + @callback + def async_get_subentry_flow( + config_entry: ConfigEntry, subentry_type: str + ) -> ConfigSubentryFlow: + """Get the subentry flow for this handler.""" + raise NotImplementedError + + @classmethod + @callback + def async_supported_subentries(cls, config_entry: ConfigEntry) -> tuple[str, ...]: + """Return subentries supported by this handler.""" + return () + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -2893,6 +3079,7 @@ class ConfigFlow(ConfigEntryBaseFlow): description: str | None = None, description_placeholders: Mapping[str, str] | None = None, options: Mapping[str, Any] | None = None, + subentries: Iterable[ConfigSubentryData] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: @@ -2912,6 +3099,7 @@ class ConfigFlow(ConfigEntryBaseFlow): result["minor_version"] = self.MINOR_VERSION result["options"] = options or {} + result["subentries"] = subentries or () result["version"] = self.VERSION return result @@ -3026,17 +3214,126 @@ class ConfigFlow(ConfigEntryBaseFlow): ) -class OptionsFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] -): - """Flow to set options for a configuration entry.""" +class _ConfigSubFlowManager: + """Mixin class for flow managers which manage flows tied to a config entry.""" - _flow_result = ConfigFlowResult + hass: HomeAssistant def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" return self.hass.config_entries.async_get_known_entry(config_entry_id) + +class ConfigSubentryFlowManager( + data_entry_flow.FlowManager[FlowContext, SubentryFlowResult, tuple[str, str]], + _ConfigSubFlowManager, +): + """Manage all the config subentry flows that are in progress.""" + + _flow_result = SubentryFlowResult + + async def async_create_flow( + self, + handler_key: tuple[str, str], + *, + context: FlowContext | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigSubentryFlow: + """Create a subentry flow for a config entry. + + The entry_id and flow.handler[0] is the same thing to map entry with flow. + """ + if not context or "source" not in context: + raise KeyError("Context not set or doesn't have a source set") + + entry_id, subentry_type = handler_key + entry = self._async_get_config_entry(entry_id) + handler = await _async_get_flow_handler(self.hass, entry.domain, {}) + if subentry_type not in handler.async_supported_subentries(entry): + raise data_entry_flow.UnknownHandler( + f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'" + ) + subentry_flow = handler.async_get_subentry_flow(entry, subentry_type) + subentry_flow.init_step = context["source"] + return subentry_flow + + async def async_finish_flow( + self, + flow: data_entry_flow.FlowHandler[ + FlowContext, SubentryFlowResult, tuple[str, str] + ], + result: SubentryFlowResult, + ) -> SubentryFlowResult: + """Finish a subentry flow and add a new subentry to the configuration entry. + + The flow.handler[0] and entry_id is the same thing to map flow with entry. + """ + flow = cast(ConfigSubentryFlow, flow) + + if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + return result + + entry_id = flow.handler[0] + entry = self.hass.config_entries.async_get_entry(entry_id) + if entry is None: + raise UnknownEntry(entry_id) + + unique_id = result.get("unique_id") + if unique_id is not None and not isinstance(unique_id, str): + raise HomeAssistantError("unique_id must be a string") + + self.hass.config_entries.async_add_subentry( + entry, + ConfigSubentry( + data=MappingProxyType(result["data"]), + title=result["title"], + unique_id=unique_id, + ), + ) + + result["result"] = True + return result + + +class ConfigSubentryFlow( + data_entry_flow.FlowHandler[FlowContext, SubentryFlowResult, tuple[str, str]] +): + """Base class for config subentry flows.""" + + _flow_result = SubentryFlowResult + handler: tuple[str, str] + + @callback + def async_create_entry( + self, + *, + title: str | None = None, + data: Mapping[str, Any], + description: str | None = None, + description_placeholders: Mapping[str, str] | None = None, + unique_id: str | None = None, + ) -> SubentryFlowResult: + """Finish config flow and create a config entry.""" + result = super().async_create_entry( + title=title, + data=data, + description=description, + description_placeholders=description_placeholders, + ) + + result["unique_id"] = unique_id + + return result + + +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult], + _ConfigSubFlowManager, +): + """Manage all the config entry option flows that are in progress.""" + + _flow_result = ConfigFlowResult + async def async_create_flow( self, handler_key: str, @@ -3046,7 +3343,7 @@ class OptionsFlowManager( ) -> OptionsFlow: """Create an options flow for a config entry. - Entry_id and flow.handler is the same thing to map entry with flow. + The entry_id and the flow.handler is the same thing to map entry with flow. """ entry = self._async_get_config_entry(handler_key) handler = await _async_get_flow_handler(self.hass, entry.domain, {}) @@ -3062,7 +3359,7 @@ class OptionsFlowManager( This method is called when a flow step returns FlowResultType.ABORT or FlowResultType.CREATE_ENTRY. - Flow.handler and entry_id is the same thing to map flow with entry. + The flow.handler and the entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index adb2062a8ea..e98061d50b7 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any, Any], + bound=data_entry_flow.FlowManager[Any, Any, Any], default=data_entry_flow.FlowManager, ) @@ -71,7 +71,7 @@ class FlowManagerIndexView(_BaseFlowManagerView[_FlowManagerT]): async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Initialize a POST request. - Override `_post_impl` in subclasses which need + Override `post` and call `_post_impl` in subclasses which need to implement their own `RequestDataValidator` """ return await self._post_impl(request, data) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 2fb70b6e0be..078c649666d 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -285,6 +285,15 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: "user" if integration.integration_type == "helper" else None ), ), + vol.Optional("config_subentries"): cv.schema_with_slug_keys( + gen_data_entry_schema( + config=config, + integration=integration, + flow_title=REQUIRED, + require_step_title=False, + ), + slug_validator=vol.Any("_", cv.slug), + ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, diff --git a/tests/common.py b/tests/common.py index ac6f10b8c44..d2b0dff8faa 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1000,6 +1000,7 @@ class MockConfigEntry(config_entries.ConfigEntry): reason=None, source=config_entries.SOURCE_USER, state=None, + subentries_data=None, title="Mock Title", unique_id=None, version=1, @@ -1016,6 +1017,7 @@ class MockConfigEntry(config_entries.ConfigEntry): "options": options or {}, "pref_disable_new_entities": pref_disable_new_entities, "pref_disable_polling": pref_disable_polling, + "subentries_data": subentries_data or (), "title": title, "unique_id": unique_id, "version": version, diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 54546507dfa..1e09a372352 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -21,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index ec501b2fd7e..1c760eaec52 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index 3dd4788dc61..73ba6a7123f 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -35,6 +35,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index 606d6082351..0dbdef1d508 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index cb1d3a7aee7..113db6e3b96 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -101,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'XXXXXXX', 'version': 1, diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index fb4f6530b1e..39668e3d19f 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -287,6 +287,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index c6ad36916bf..4bd7bfaccdd 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -101,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'installation1', 'version': 1, diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index 2f90b09d39f..07db19101ab 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index ebd0061f416..b475c796d2b 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr index e9540b5cec6..d7f9a045921 100644 --- a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Beosound Balance-11111111', 'unique_id': '11111111', 'version': 1, diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index edc2879a66b..54df2b48cdb 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -48,6 +48,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 3, diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index cd29c647df7..de76c00cd23 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 9218e7343ec..4159c8ec1a1 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 51bd946f140..3eab18fb9f3 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -44,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index 58ce74035f9..877f48a4611 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -71,6 +71,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -135,6 +137,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 4a3bff47d89..4d37f3c871b 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -137,11 +137,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": True, "supports_reconfigure": False, "supports_remove_device": False, @@ -155,11 +157,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -173,11 +177,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -191,11 +197,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -209,11 +217,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -571,11 +581,13 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -586,6 +598,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -654,11 +667,13 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -669,6 +684,7 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -1088,6 +1104,273 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} +async def test_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can start a subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + raise NotImplementedError + + async def async_step_user(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "user", + "data_schema": [{"name": "enabled", "required": True, "type": "boolean"}], + "description_placeholders": {"enabled": "Set to true to be true"}, + "errors": None, + "last_step": None, + "preview": None, + } + + +@pytest.mark.parametrize( + ("endpoint", "method"), + [ + ("/api/config/config_entries/subentries/flow", "post"), + ("/api/config/config_entries/subentries/flow/1", "get"), + ("/api/config/config_entries/subentries/flow/1", "post"), + ], +) +async def test_subentry_flow_unauth( + hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str +) -> None: + """Test unauthorized on subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + hass_admin_user.groups = [] + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await getattr(client, method)(endpoint, json={"handler": entry.entry_id}) + + assert resp.status == HTTPStatus.UNAUTHORIZED + + +async def test_two_step_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can finish a two step subentry flow.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return await self.async_step_finish() + + async def async_step_finish(self, user_input=None): + if user_input: + return self.async_create_entry( + title="Mock title", data=user_input, unique_id="test" + ) + + return self.async_show_form( + step_id="finish", data_schema=vol.Schema({"enabled": bool}) + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data["flow_id"] + expected_data = { + "data_schema": [{"name": "enabled", "type": "boolean"}], + "description_placeholders": None, + "errors": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "last_step": None, + "preview": None, + "step_id": "finish", + "type": "form", + } + assert data == expected_data + + resp = await client.get(f"/api/config/config_entries/subentries/flow/{flow_id}") + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == expected_data + + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"enabled": True}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == { + "description_placeholders": None, + "description": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "title": "Mock title", + "type": "create_entry", + "unique_id": "test", + } + + +async def test_subentry_flow_with_invalid_data(hass: HomeAssistant, client) -> None: + """Test a subentry flow with invalid_data.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return self.async_show_form( + step_id="finish", + data_schema=vol.Schema( + { + vol.Required( + "choices", default=["invalid", "valid"] + ): cv.multi_select({"valid": "Valid"}) + } + ), + ) + + async def async_step_finish(self, user_input=None): + return self.async_create_entry( + title="Enable disable", data=user_input + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "finish", + "data_schema": [ + { + "default": ["invalid", "valid"], + "name": "choices", + "options": {"valid": "Valid"}, + "required": True, + "type": "multi_select", + } + ], + "description_placeholders": None, + "errors": None, + "last_step": None, + "preview": None, + } + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"choices": ["valid", "invalid"]}, + ) + assert resp.status == HTTPStatus.BAD_REQUEST + data = await resp.json() + assert data == {"errors": {"choices": "invalid is not a valid option"}} + + @pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -1120,11 +1403,13 @@ async def test_get_single( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "user", "state": "loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1480,11 +1765,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1499,11 +1786,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1518,11 +1807,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1537,11 +1828,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1556,11 +1849,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1586,11 +1881,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1615,11 +1912,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1634,11 +1933,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1663,11 +1964,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1682,11 +1985,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1717,11 +2022,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1736,11 +2043,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1755,11 +2064,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1774,11 +2085,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1793,11 +2106,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1900,11 +2215,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1922,11 +2239,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1944,11 +2263,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1972,11 +2293,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2001,11 +2324,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2029,11 +2354,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2119,11 +2446,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2141,11 +2470,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2171,11 +2502,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2197,11 +2530,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2227,11 +2562,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2255,11 +2592,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2470,3 +2809,133 @@ async def test_does_not_support_reconfigure( response == '{"message":"Handler ConfigEntriesFlowManager doesn\'t support step reconfigure"}' ) + + +async def test_list_subentries( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can list subentries.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, + subentry_id="mock_id", + title="Mock title", + unique_id="test", + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [ + {"subentry_id": "mock_id", "title": "Mock title", "unique_id": "test"}, + ] + + # Try listing subentries for an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": "no_such_entry", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } + + +async def test_delete_subentry( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can delete a subentry.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, subentry_id="mock_id", title="Mock title" + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] is None + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [] + + # Try deleting the subentry again + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config subentry not found", + } + + # Try deleting subentry from an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": "no_such_entry", + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 1ca674a4fbe..20558b4bbbd 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -21,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index abedc128756..0e507ca0b28 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '123456', 'version': 1, diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 3da8c76c2b4..8fe6c7c2293 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -32,6 +32,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index d407fe2dc5b..0a46dd7f476 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'dsmr_reader', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index 38c8a9a5ab9..f9540e06038 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, @@ -70,6 +72,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 72e504c97c8..88b0af6dc7b 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -28,10 +28,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'EnergyZero', 'unique_id': 'energyzero', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'EnergyZero', 'type': , 'version': 1, diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 76835098f27..3cacd3a8518 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -20,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -454,6 +456,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -928,6 +932,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 4f7ea679b20..8f1711e829e 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -20,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'ESPHome Device', 'unique_id': '11:22:33:44:55:aa', 'version': 1, diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 832e7d6572f..0beeae71df3 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -79,6 +79,7 @@ async def test_diagnostics_with_bluetooth( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "11:22:33:44:55:aa", "version": 1, diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index 6ae4c2f6198..c0db54c2d4e 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -23,6 +23,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Green House', 'unique_id': 'unique', 'version': 2, diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr index 53f7093a21b..9b5b8c9353a 100644 --- a/tests/components/fritz/snapshots/test_diagnostics.ambr +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -61,6 +61,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index 010de06e276..b112839835a 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index eb19797e5b1..f1792cb7535 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'fyta_user', 'unique_id': None, 'version': 1, diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 6d521b1f2c8..10f23759fae 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -66,10 +66,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'bluetooth', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, @@ -223,10 +227,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 71e0afdc495..890edc00482 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '123', 'version': 1, diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index f52e47688e8..40ed22195d5 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index edbbdb1ba28..1ecedbd1173 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -15,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'import', + 'subentries': list([ + ]), 'title': '1234', 'unique_id': '1234', 'version': 1, diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index faba2103000..4487d0b6ac6 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -42,6 +42,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "valve_controller": { diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index 0a301fc3941..71e70f3a153 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -30,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -74,10 +78,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -118,10 +126,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Energy Socket', 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Energy Socket', 'type': , 'version': 1, @@ -158,10 +170,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index a0bb8302fcc..ce9fc9ac01a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -190,6 +190,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Husqvarna Automower of Erika Mustermann', 'unique_id': '123', 'version': 1, diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index 494980ba4ce..f15fc706d7e 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -15,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'River Name (Station Name)', 'unique_id': '123', 'version': 1, diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index f2fa656cb0f..41cfedb0e29 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -358,6 +358,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 08f06684d9a..3a99a7f681d 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -57,6 +57,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 201bbbc971e..640726e2355 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -25,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index c689d04949a..db82f41eb73 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -73,6 +73,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'test-site-name', 'unique_id': None, 'version': 1, diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr index 3a281391860..92d0578dba8 100644 --- a/tests/components/madvr/snapshots/test_diagnostics.ambr +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'envy', 'unique_id': '00:11:22:33:44:55', 'version': 1, diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index e6a432de07e..671f5afcc52 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'melcloud', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr index f8897a4a47f..1b4090ca5a4 100644 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'AA:BB:CC:DD:EE:FF', 'version': 1, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr index 5b4b169c0fe..d042dc02ac3 100644 --- a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -28,6 +28,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 463556ec657..4ea7e30bcf9 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -646,6 +646,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'netatmo', 'version': 1, diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 827d6aeb6e5..23f42fee077 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Fake Profile', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index f4ba363a421..b33726d2b72 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -60,6 +60,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 890ce2dfc4a..c1d1bd1bb2e 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -37,6 +37,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "bridges": [ diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index c8a9ff75d62..c3938efcbb6 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -24,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 61b68b5ad90..03b392b3e7b 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -39,6 +39,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "protection_window": { diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr index d0a676fce1b..83684e153c9 100644 --- a/tests/components/p1_monitor/snapshots/test_init.ambr +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, @@ -38,6 +40,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr index 1e55805f867..d0fdc81acb4 100644 --- a/tests/components/pegel_online/snapshots/test_diagnostics.ambr +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -31,6 +31,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', 'version': 1, diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 4f7a6176634..53db95f0534 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -94,6 +94,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index 80d05961813..4b8048a8ebe 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -155,6 +155,7 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) "version": 1, "options": {}, "minor_version": 1, + "subentries": (), } await hass.async_block_till_done() diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 3094fcef24b..2d6f6687d04 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -33,6 +33,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 3d9673ffd90..42ec74710f9 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -102,6 +102,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'home', 'unique_id': 'proximity_home', 'version': 1, diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index d14f367b2bd..24d45fee5b9 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -52,6 +52,7 @@ MOCK_FLOW_RESULT = { "title": "test_ps4", "data": MOCK_DATA, "options": {}, + "subentries": (), } MOCK_ENTRY_ID = "SomeID" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index ae4b28567be..6271a63d652 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -38,6 +38,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "fields": [ diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr index e131bf3d952..abf8e380916 100644 --- a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -84,6 +86,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index acd5fd165b4..681805996f1 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1144,6 +1144,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, @@ -2275,6 +2277,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 24c690bcb37..a57e289ec04 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -34,6 +34,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": [ { diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index b03d87c7a89..4b4dda7227d 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -44,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index 0319d5dd8dd..e8e0b699a7e 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -51,6 +51,7 @@ async def test_entry_diagnostics( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -91,6 +92,7 @@ async def test_entry_diagnostics_encrypted( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -130,6 +132,7 @@ async def test_entry_diagnostics_encrypte_offline( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index 237d3eab257..c7db7a33959 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Pentair: DD-EE-FF', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index d5479f00b06..13c1e28aa36 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -32,6 +32,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "subscription_data": { "12345": { diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr index e0f1bc2623c..6aef72ebbd5 100644 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'solarlog', 'unique_id': None, 'version': 1, diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a..0b45546902b 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -136,6 +136,7 @@ async def test_user_form_pin_not_required( "data": deepcopy(TEST_CONFIG), "options": {}, "minor_version": 1, + "subentries": (), } expected["data"][CONF_PIN] = None @@ -341,6 +342,7 @@ async def test_pin_form_success(hass: HomeAssistant, pin_form) -> None: "data": TEST_CONFIG, "options": {}, "minor_version": 1, + "subentries": (), } result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID assert result == expected diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index 53572085f9b..f59958420c4 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -69,5 +69,6 @@ async def test_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index 75d942fc601..afa508cc004 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -56,6 +56,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, @@ -111,6 +113,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index 3180c7c0b1d..b5b33d7c246 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -37,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index 11427a84801..3613f7e5997 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index a5a68a12a22..90d83d69814 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -24,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '12345', 'unique_id': '12345', 'version': 1, @@ -54,6 +56,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Old Tuya configuration entry', 'unique_id': '12345', 'version': 1, @@ -107,10 +111,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'mocked_username', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'mocked_username', 'type': , 'version': 1, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 28ec98cf572..e52f76634fd 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -37,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Twinkly', 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'version': 1, diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index 4ba90a00113..aa7337be0ba 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -42,6 +42,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '1', 'version': 1, diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 38312667375..93b1da60998 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -27,10 +27,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Uptime', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Uptime', 'type': , 'version': 1, diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index 6cdf121d7e3..ef235bba99d 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -25,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Energy Bill', 'unique_id': None, 'version': 2, diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index 96567b80c54..780a00acd64 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'ABC123', 'version': 1, diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index ae9b05389c7..0b1dcef5a29 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4731,6 +4731,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'ViCare', 'version': 1, diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr index c258b14dc2d..dd268f4ed1a 100644 --- a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -35,6 +35,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 0c137acc36b..3cc5e1d6f66 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -27,6 +27,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index 8299b0eafba..c64fa212a98 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -253,6 +253,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 3d7cb00e021..7f54e940966 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -61,5 +61,6 @@ async def test_diagnostics( "created_at": entry.created_at.isoformat(), "modified_at": entry.modified_at.isoformat(), "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index c60ce17b952..ee8abe04bf1 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -38,6 +38,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 937502d4d6c..0d99b0596e3 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -30,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -70,10 +74,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -110,10 +118,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -150,10 +162,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -190,10 +206,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr index f41b86b7f6d..e7331b911a8 100644 --- a/tests/components/workday/snapshots/test_diagnostics.ambr +++ b/tests/components/workday/snapshots/test_diagnostics.ambr @@ -40,6 +40,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index bdead0f2028..d288c531407 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -36,10 +36,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -82,10 +86,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -127,10 +135,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Test Satellite', 'unique_id': 'test_zeroconf_name._wyoming._tcp.local._Test Satellite', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Test Satellite', 'type': , 'version': 1, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index f46a06e84b8..08807f65d5d 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -113,6 +113,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 4, diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 51e56f4874e..08b532677f4 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index aba85a35349..1ad152e8e42 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import Generator +from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import timedelta import logging import re @@ -905,7 +906,7 @@ async def test_entries_excludes_ignore_and_disabled( async def test_saving_and_loading( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any] ) -> None: """Test that we're saving and loading correctly.""" mock_integration( @@ -922,7 +923,17 @@ async def test_saving_and_loading( async def async_step_user(self, user_input=None): """Test user step.""" await self.async_set_unique_id("unique") - return self.async_create_entry(title="Test Title", data={"token": "abcd"}) + subentries = [ + config_entries.ConfigSubentryData( + data={"foo": "bar"}, title="subentry 1" + ), + config_entries.ConfigSubentryData( + data={"sun": "moon"}, title="subentry 2", unique_id="very_unique" + ), + ] + return self.async_create_entry( + title="Test Title", data={"token": "abcd"}, subentries=subentries + ) with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( @@ -971,6 +982,98 @@ async def test_saving_and_loading( # To execute the save await hass.async_block_till_done() + stored_data = hass_storage["core.config_entries"] + assert stored_data == { + "data": { + "entries": [ + { + "created_at": ANY, + "data": { + "token": "abcd", + }, + "disabled_by": None, + "discovery_keys": {}, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": True, + "pref_disable_polling": True, + "source": "user", + "subentries": [ + { + "data": {"foo": "bar"}, + "subentry_id": ANY, + "title": "subentry 1", + "unique_id": None, + }, + { + "data": {"sun": "moon"}, + "subentry_id": ANY, + "title": "subentry 2", + "unique_id": "very_unique", + }, + ], + "title": "Test Title", + "unique_id": "unique", + "version": 5, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": "blah", "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": ["a", "b"], "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + ], + }, + "key": "core.config_entries", + "minor_version": 5, + "version": 1, + } + # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() @@ -983,6 +1086,25 @@ async def test_saving_and_loading( ): assert orig.as_dict() == loaded.as_dict() + hass.config_entries.async_update_entry( + entry_1, + pref_disable_polling=False, + pref_disable_new_entities=False, + ) + + # To trigger the call_later + freezer.tick(1.0) + async_fire_time_changed(hass) + # To execute the save + await hass.async_block_till_done() + + # Assert no data is lost when storing again + expected_stored_data = stored_data + expected_stored_data["data"]["entries"][0]["modified_at"] = ANY + expected_stored_data["data"]["entries"][0]["pref_disable_new_entities"] = False + expected_stored_data["data"]["entries"][0]["pref_disable_polling"] = False + assert hass_storage["core.config_entries"] == expected_stored_data | {} + @freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: @@ -1416,6 +1538,42 @@ async def test_update_entry_options_and_trigger_listener( assert len(update_listener_calls) == 1 +async def test_update_subentry_and_trigger_listener( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can update subentry and trigger listener.""" + entry = MockConfigEntry(domain="test", options={"first": True}) + entry.add_to_manager(manager) + update_listener_calls = [] + + subentry = config_entries.ConfigSubentry( + data={"test": "test"}, unique_id="test", title="Mock title" + ) + + async def update_listener( + hass: HomeAssistant, entry: config_entries.ConfigEntry + ) -> None: + """Test function.""" + assert entry.subentries == expected_subentries + update_listener_calls.append(None) + + entry.add_update_listener(update_listener) + + expected_subentries = {subentry.subentry_id: subentry} + assert manager.async_add_subentry(entry, subentry) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 1 + + expected_subentries = {} + assert manager.async_remove_subentry(entry, subentry.subentry_id) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 2 + + async def test_setup_raise_not_ready( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -1742,17 +1900,453 @@ async def test_entry_options_unknown_config_entry( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - class TestFlow: + with pytest.raises(config_entries.UnknownEntry): + await manager.options.async_create_flow( + "blah", context={"source": "test"}, data=None + ) + + +async def test_create_entry_subentries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test a config entry being created with subentries.""" + + subentrydata = config_entries.ConfigSubentryData( + data={"test": "test"}, + title="Mock title", + unique_id="test", + ) + + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Mock setup.""" + hass.async_create_task( + hass.config_entries.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IMPORT}, + data={"data": "data", "subentry": subentrydata}, + ) + ) + return True + + async_setup_entry = AsyncMock(return_value=True) + mock_integration( + hass, + MockModule( + "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry + ), + ) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_import(self, user_input): + """Test import step creating entry, with subentry.""" + return self.async_create_entry( + title="title", + data={"example": user_input["data"]}, + subentries=[user_input["subentry"]], + ) + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + assert await async_setup_component(hass, "comp", {}) + + await hass.async_block_till_done() + + assert len(async_setup_entry.mock_calls) == 1 + + entries = hass.config_entries.async_entries("comp") + assert len(entries) == 1 + assert entries[0].supported_subentries == () + assert entries[0].data == {"example": "data"} + assert len(entries[0].subentries) == 1 + subentry_id = list(entries[0].subentries)[0] + subentry = config_entries.ConfigSubentry( + data=subentrydata["data"], + subentry_id=subentry_id, + title=subentrydata["title"], + unique_id="test", + ) + assert entries[0].subentries == {subentry_id: subentry} + + +async def test_entry_subentry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can add a subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): """Test flow.""" @staticmethod @callback - def async_get_options_flow(config_entry): - """Test options flow.""" + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + assert entry.data == {"first": True} + assert entry.options == {} + subentry_id = list(entry.subentries)[0] + assert entry.subentries == { + subentry_id: config_entries.ConfigSubentry( + data={"second": True}, + subentry_id=subentry_id, + title="Mock title", + unique_id="test", + ) + } + assert entry.supported_subentries == ("test",) + + +async def test_entry_subentry_non_string( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test adding an invalid subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with pytest.raises(HomeAssistantError): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": 123, + }, + ) + + +@pytest.mark.parametrize("context", [None, {}, {"bla": "bleh"}]) +async def test_entry_subentry_no_context( + hass: HomeAssistant, manager: config_entries.ConfigEntries, context: dict | None +) -> None: + """Test starting a subentry flow without "source" in context.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow), pytest.raises(KeyError): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context=context, data=None + ) + + +@pytest.mark.parametrize( + ("unique_id", "expected_result"), + [(None, does_not_raise()), ("test", pytest.raises(HomeAssistantError))], +) +async def test_entry_subentry_duplicate( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + unique_id: str | None, + expected_result: AbstractContextManager, +) -> None: + """Test adding a duplicated subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry( + domain="test", + data={"first": True}, + subentries_data=[ + config_entries.ConfigSubentryData( + data={}, + subentry_id="blabla", + title="Mock title", + unique_id=unique_id, + ) + ], + ) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with expected_result: + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": unique_id, + }, + ) + + +async def test_entry_subentry_abort( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can abort subentry flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + assert await manager.subentries.async_finish_flow( + flow, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "test"} + ) + + +async def test_entry_subentry_unknown_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for an unknown config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) with pytest.raises(config_entries.UnknownEntry): - await manager.options.async_create_flow( - "blah", context={"source": "test"}, data=None + await manager.subentries.async_create_flow( + ("blah", "blah"), context={"source": "test"}, data=None + ) + + +async def test_entry_subentry_deleted_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to finish a subentry flow for a deleted config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await hass.config_entries.async_remove(entry.entry_id) + + with pytest.raises(config_entries.UnknownEntry): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + +async def test_entry_subentry_unsupported( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + ( + entry.entry_id, + "unknown", + ), + context={"source": "test"}, + data=None, + ) + + +async def test_entry_subentry_unsupported_subentry_type( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None ) @@ -3911,21 +4505,20 @@ async def test_updating_entry_with_and_without_changes( assert manager.async_update_entry(entry) is False - for change in ( - {"data": {"second": True, "third": 456}}, - {"data": {"second": True}}, - {"minor_version": 2}, - {"options": {"hello": True}}, - {"pref_disable_new_entities": True}, - {"pref_disable_polling": True}, - {"title": "sometitle"}, - {"unique_id": "abcd1234"}, - {"version": 2}, + for change, expected_value in ( + ({"data": {"second": True, "third": 456}}, {"second": True, "third": 456}), + ({"data": {"second": True}}, {"second": True}), + ({"minor_version": 2}, 2), + ({"options": {"hello": True}}, {"hello": True}), + ({"pref_disable_new_entities": True}, True), + ({"pref_disable_polling": True}, True), + ({"title": "sometitle"}, "sometitle"), + ({"unique_id": "abcd1234"}, "abcd1234"), + ({"version": 2}, 2), ): assert manager.async_update_entry(entry, **change) is True key = next(iter(change)) - value = next(iter(change.values())) - assert getattr(entry, key) == value + assert getattr(entry, key) == expected_value assert manager.async_update_entry(entry, **change) is False assert manager.async_entry_for_domain_unique_id("test", "abc123") is None @@ -5459,6 +6052,7 @@ async def test_unhashable_unique_id_fails( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -5494,6 +6088,7 @@ async def test_unhashable_unique_id_fails_on_update( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id="123", version=1, @@ -5524,6 +6119,7 @@ async def test_string_unique_id_no_warning( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id="123", version=1, @@ -5566,6 +6162,7 @@ async def test_hashable_unique_id( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -5600,6 +6197,7 @@ async def test_no_unique_id_no_warning( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=None, version=1, @@ -6524,6 +7122,7 @@ async def test_migration_from_1_2( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "import", + "subentries": {}, "title": "Sun", "unique_id": None, "version": 1, From a3584919706cd5497d9c8ac9331123893a616001 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:16:54 +0100 Subject: [PATCH 0578/1198] Migrate wiz light tests to use Kelvin (#133032) --- tests/components/wiz/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/wiz/test_light.py b/tests/components/wiz/test_light.py index 1fb87b30a5f..5c74d407238 100644 --- a/tests/components/wiz/test_light.py +++ b/tests/components/wiz/test_light.py @@ -4,7 +4,7 @@ from pywizlight import PilotBuilder from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -91,7 +91,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -99,7 +99,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 bulb.turn_on.reset_mock() await hass.services.async_call( @@ -148,7 +148,7 @@ async def test_rgbw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -162,7 +162,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -171,7 +171,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 async def test_old_firmware_dimmable_light(hass: HomeAssistant) -> None: From 798f3a34f3151d5c2e99bb4f8b8b39a98ab9c566 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:17:45 +0100 Subject: [PATCH 0579/1198] Migrate abode light tests to use Kelvin (#133001) --- tests/components/abode/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/abode/test_light.py b/tests/components/abode/test_light.py index d556a20fa90..4be94a09ee8 100644 --- a/tests/components/abode/test_light.py +++ b/tests/components/abode/test_light.py @@ -6,7 +6,7 @@ from homeassistant.components.abode import ATTR_DEVICE_ID from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, @@ -46,7 +46,7 @@ async def test_attributes(hass: HomeAssistant) -> None: assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 204 assert state.attributes.get(ATTR_RGB_COLOR) == (0, 64, 255) - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_DEVICE_ID) == "ZB:db5b1a" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") From c164507952e3400d0aecf020921173955a0b2c62 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:18:19 +0100 Subject: [PATCH 0580/1198] Add new integration slide_local (#132632) Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/brands/slide.json | 5 + .../components/slide_local/__init__.py | 33 ++ .../components/slide_local/config_flow.py | 183 +++++++++ homeassistant/components/slide_local/const.py | 13 + .../components/slide_local/coordinator.py | 112 ++++++ homeassistant/components/slide_local/cover.py | 113 ++++++ .../components/slide_local/entity.py | 29 ++ .../components/slide_local/manifest.json | 17 + .../components/slide_local/quality_scale.yaml | 66 ++++ .../components/slide_local/strings.json | 35 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 17 +- homeassistant/generated/zeroconf.py | 4 + requirements_all.txt | 1 + requirements_test_all.txt | 4 + tests/components/slide_local/__init__.py | 21 + tests/components/slide_local/conftest.py | 63 +++ tests/components/slide_local/const.py | 8 + .../slide_local/fixtures/slide_1.json | 11 + .../slide_local/test_config_flow.py | 373 ++++++++++++++++++ 21 files changed, 1108 insertions(+), 3 deletions(-) create mode 100644 homeassistant/brands/slide.json create mode 100644 homeassistant/components/slide_local/__init__.py create mode 100644 homeassistant/components/slide_local/config_flow.py create mode 100644 homeassistant/components/slide_local/const.py create mode 100644 homeassistant/components/slide_local/coordinator.py create mode 100644 homeassistant/components/slide_local/cover.py create mode 100644 homeassistant/components/slide_local/entity.py create mode 100644 homeassistant/components/slide_local/manifest.json create mode 100644 homeassistant/components/slide_local/quality_scale.yaml create mode 100644 homeassistant/components/slide_local/strings.json create mode 100644 tests/components/slide_local/__init__.py create mode 100644 tests/components/slide_local/conftest.py create mode 100644 tests/components/slide_local/const.py create mode 100644 tests/components/slide_local/fixtures/slide_1.json create mode 100644 tests/components/slide_local/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 03b0e7b893b..6c11f57da83 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1359,6 +1359,8 @@ build.json @home-assistant/supervisor /homeassistant/components/sleepiq/ @mfugate1 @kbickar /tests/components/sleepiq/ @mfugate1 @kbickar /homeassistant/components/slide/ @ualex73 +/homeassistant/components/slide_local/ @dontinelli +/tests/components/slide_local/ @dontinelli /homeassistant/components/slimproto/ @marcelveldt /tests/components/slimproto/ @marcelveldt /homeassistant/components/sma/ @kellerza @rklomp diff --git a/homeassistant/brands/slide.json b/homeassistant/brands/slide.json new file mode 100644 index 00000000000..808a54affc3 --- /dev/null +++ b/homeassistant/brands/slide.json @@ -0,0 +1,5 @@ +{ + "domain": "slide", + "name": "Slide", + "integrations": ["slide", "slide_local"] +} diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py new file mode 100644 index 00000000000..878830fe513 --- /dev/null +++ b/homeassistant/components/slide_local/__init__.py @@ -0,0 +1,33 @@ +"""Component for the Slide local API.""" + +from __future__ import annotations + +from goslideapi.goslideapi import GoSlideLocal as SlideLocalApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SlideCoordinator + +PLATFORMS = [Platform.COVER] +type SlideConfigEntry = ConfigEntry[SlideLocalApi] + + +async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Set up the slide_local integration.""" + + coordinator = SlideCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py new file mode 100644 index 00000000000..bc5033e972b --- /dev/null +++ b/homeassistant/components/slide_local/config_flow.py @@ -0,0 +1,183 @@ +"""Config flow for slide_local integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.helpers.device_registry import format_mac + +from .const import CONF_INVERT_POSITION, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class SlideConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for slide_local.""" + + _mac: str = "" + _host: str = "" + _api_version: int | None = None + + VERSION = 1 + MINOR_VERSION = 1 + + async def async_test_connection( + self, user_input: dict[str, str | int] + ) -> dict[str, str]: + """Reusable Auth Helper.""" + slide = SlideLocalApi() + + # first test, if API version 2 is working + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 2, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is not None: + self._api_version = 2 + self._mac = format_mac(result["mac"]) + return {} + + # API version 2 is not working, try API version 1 instead + await slide.slide_del(user_input[CONF_HOST]) + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 1, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is None: + # API version 1 isn't working either + return {"base": "unknown"} + + self._api_version = 1 + self._mac = format_mac(result["mac"]) + + return {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + errors = {} + if user_input is not None: + if not (errors := await self.async_test_connection(user_input)): + await self.async_set_unique_id(self._mac) + self._abort_if_unique_id_configured() + user_input |= { + CONF_MAC: self._mac, + CONF_API_VERSION: self._api_version, + } + + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + if user_input is not None and user_input.get(CONF_HOST) is not None: + self._host = user_input[CONF_HOST] + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Optional(CONF_PASSWORD): str, + } + ), + {CONF_HOST: self._host}, + ), + errors=errors, + ) + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + + # id is in the format 'slide_000000000000' + self._mac = format_mac(str(discovery_info.properties.get("id"))[6:]) + + await self.async_set_unique_id(self._mac) + + self._abort_if_unique_id_configured( + {CONF_HOST: discovery_info.host}, reload_on_update=True + ) + + errors = {} + if errors := await self.async_test_connection( + { + CONF_HOST: self._host, + } + ): + return self.async_abort( + reason="discovery_connection_failed", + description_placeholders={ + "error": errors["base"], + }, + ) + + self._host = discovery_info.host + + return await self.async_step_zeroconf_confirm() + + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + + if user_input is not None: + user_input |= { + CONF_HOST: self._host, + CONF_API_VERSION: 2, + CONF_MAC: format_mac(self._mac), + } + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={ + "host": self._host, + }, + ) diff --git a/homeassistant/components/slide_local/const.py b/homeassistant/components/slide_local/const.py new file mode 100644 index 00000000000..9dc6d4ac925 --- /dev/null +++ b/homeassistant/components/slide_local/const.py @@ -0,0 +1,13 @@ +"""Define constants for the Slide component.""" + +API_LOCAL = "api_local" +ATTR_TOUCHGO = "touchgo" +CONF_INVERT_POSITION = "invert_position" +CONF_VERIFY_SSL = "verify_ssl" +DOMAIN = "slide_local" +SLIDES = "slides" +SLIDES_LOCAL = "slides_local" +DEFAULT_OFFSET = 0.15 +DEFAULT_RETRY = 120 +SERVICE_CALIBRATE = "calibrate" +SERVICE_TOUCHGO = "touchgo" diff --git a/homeassistant/components/slide_local/coordinator.py b/homeassistant/components/slide_local/coordinator.py new file mode 100644 index 00000000000..c7542a4b813 --- /dev/null +++ b/homeassistant/components/slide_local/coordinator.py @@ -0,0 +1,112 @@ +"""DataUpdateCoordinator for slide_local integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import TYPE_CHECKING, Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) + +from homeassistant.const import ( + CONF_API_VERSION, + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_OFFSET, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +if TYPE_CHECKING: + from . import SlideConfigEntry + + +class SlideCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Get and update the latest data.""" + + def __init__(self, hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Initialize the data object.""" + super().__init__( + hass, _LOGGER, name="Slide", update_interval=timedelta(seconds=15) + ) + self.slide = SlideLocalApi() + self.api_version = entry.data[CONF_API_VERSION] + self.mac = entry.data[CONF_MAC] + self.host = entry.data[CONF_HOST] + self.password = entry.data[CONF_PASSWORD] + + async def _async_setup(self) -> None: + """Do initialization logic for Slide coordinator.""" + _LOGGER.debug("Initializing Slide coordinator") + + await self.slide.slide_add( + self.host, + self.password, + self.api_version, + ) + + _LOGGER.debug("Slide coordinator initialized") + + async def _async_update_data(self) -> dict[str, Any]: + """Update the data from the Slide device.""" + _LOGGER.debug("Start data update") + + try: + data = await self.slide.slide_info(self.host) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) from ex + + if data is None: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) + + if "pos" in data: + if self.data is None: + oldpos = None + else: + oldpos = self.data.get("pos") + + data["pos"] = max(0, min(1, data["pos"])) + + if oldpos is None or oldpos == data["pos"]: + data["state"] = ( + STATE_CLOSED if data["pos"] > (1 - DEFAULT_OFFSET) else STATE_OPEN + ) + elif oldpos < data["pos"]: + data["state"] = ( + STATE_CLOSED + if data["pos"] >= (1 - DEFAULT_OFFSET) + else STATE_CLOSING + ) + else: + data["state"] = ( + STATE_OPEN if data["pos"] <= DEFAULT_OFFSET else STATE_OPENING + ) + + _LOGGER.debug("Data successfully updated: %s", data) + + return data diff --git a/homeassistant/components/slide_local/cover.py b/homeassistant/components/slide_local/cover.py new file mode 100644 index 00000000000..1bf026746c6 --- /dev/null +++ b/homeassistant/components/slide_local/cover.py @@ -0,0 +1,113 @@ +"""Support for Slide covers.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity +from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .const import CONF_INVERT_POSITION, DEFAULT_OFFSET +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up cover(s) for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + [ + SlideCoverLocal( + coordinator, + entry, + ) + ] + ) + + +class SlideCoverLocal(SlideEntity, CoverEntity): + """Representation of a Slide Local API cover.""" + + _attr_assumed_state = True + _attr_device_class = CoverDeviceClass.CURTAIN + + def __init__( + self, + coordinator: SlideCoordinator, + entry: SlideConfigEntry, + ) -> None: + """Initialize the cover.""" + super().__init__(coordinator) + + self._attr_name = None + self._invert = entry.options[CONF_INVERT_POSITION] + self._attr_unique_id = coordinator.data["mac"] + + @property + def is_opening(self) -> bool: + """Return if the cover is opening or not.""" + return self.coordinator.data["state"] == STATE_OPENING + + @property + def is_closing(self) -> bool: + """Return if the cover is closing or not.""" + return self.coordinator.data["state"] == STATE_CLOSING + + @property + def is_closed(self) -> bool: + """Return None if status is unknown, True if closed, else False.""" + return self.coordinator.data["state"] == STATE_CLOSED + + @property + def current_cover_position(self) -> int | None: + """Return the current position of cover shutter.""" + pos = self.coordinator.data["pos"] + if pos is not None: + if (1 - pos) <= DEFAULT_OFFSET or pos <= DEFAULT_OFFSET: + pos = round(pos) + if not self._invert: + pos = 1 - pos + pos = int(pos * 100) + return pos + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + self.coordinator.data["state"] = STATE_OPENING + await self.coordinator.slide.slide_open(self.coordinator.host) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + self.coordinator.data["state"] = STATE_CLOSING + await self.coordinator.slide.slide_close(self.coordinator.host) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.coordinator.slide.slide_stop(self.coordinator.host) + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + position = kwargs[ATTR_POSITION] / 100 + if not self._invert: + position = 1 - position + + if self.coordinator.data["pos"] is not None: + if position > self.coordinator.data["pos"]: + self.coordinator.data["state"] = STATE_CLOSING + else: + self.coordinator.data["state"] = STATE_OPENING + + await self.coordinator.slide.slide_set_position(self.coordinator.host, position) diff --git a/homeassistant/components/slide_local/entity.py b/homeassistant/components/slide_local/entity.py new file mode 100644 index 00000000000..c1dbc101e6f --- /dev/null +++ b/homeassistant/components/slide_local/entity.py @@ -0,0 +1,29 @@ +"""Entities for slide_local integration.""" + +from homeassistant.const import CONF_MAC +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SlideCoordinator + + +class SlideEntity(CoordinatorEntity[SlideCoordinator]): + """Base class of a Slide local API cover.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SlideCoordinator, + ) -> None: + """Initialize the Slide device.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + manufacturer="Innovation in Motion", + connections={(CONF_MAC, coordinator.data["mac"])}, + name=coordinator.data["device_name"], + sw_version=coordinator.api_version, + serial_number=coordinator.data["mac"], + configuration_url=f"http://{coordinator.host}", + ) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json new file mode 100644 index 00000000000..42c74b2c308 --- /dev/null +++ b/homeassistant/components/slide_local/manifest.json @@ -0,0 +1,17 @@ +{ + "domain": "slide_local", + "name": "Slide Local", + "codeowners": ["@dontinelli"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/slide_local", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["goslide-api==0.7.0"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "slide*" + } + ] +} diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml new file mode 100644 index 00000000000..048a428f236 --- /dev/null +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: No explicit event subscriptions. + dependency-transparency: done + action-setup: done + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: todo + + # Gold + entity-translations: todo + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: todo + diagnostics: todo + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: todo + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + This integration doesn't have known issues that could be resolved by the user. + docs-examples: done + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json new file mode 100644 index 00000000000..38090c7e62d --- /dev/null +++ b/homeassistant/components/slide_local/strings.json @@ -0,0 +1,35 @@ +{ + "config": { + "step": { + "user": { + "description": "Provide information to connect to the Slide device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your local Slide", + "password": "The device code of your Slide (inside of the Slide or in the box, length is 8 characters). If your Slide runs firmware version 2 this is optional, as it is not used by the local API." + } + }, + "zeroconf_confirm": { + "title": "Confirm setup for Slide", + "description": "Do you want to setup {host}?" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index a3858fd176f..b074ff714f6 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -545,6 +545,7 @@ FLOWS = { "skybell", "slack", "sleepiq", + "slide_local", "slimproto", "sma", "smappee", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 5128578b606..fcd974534af 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5660,9 +5660,20 @@ }, "slide": { "name": "Slide", - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling" + "integrations": { + "slide": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling", + "name": "Slide" + }, + "slide_local": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling", + "name": "Slide Local" + } + } }, "slimproto": { "name": "SlimProto (Squeezebox players)", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 9bfff93cc2f..b04e6ad6f52 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -562,6 +562,10 @@ ZEROCONF = { "domain": "shelly", "name": "shelly*", }, + { + "domain": "slide_local", + "name": "slide*", + }, { "domain": "synology_dsm", "properties": { diff --git a/requirements_all.txt b/requirements_all.txt index c361ffec5a8..4ee02e13695 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1028,6 +1028,7 @@ google-photos-library-api==0.12.1 googlemaps==2.5.1 # homeassistant.components.slide +# homeassistant.components.slide_local goslide-api==0.7.0 # homeassistant.components.tailwind diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1c918cb2f1c..f7faaa3ae0d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -877,6 +877,10 @@ google-photos-library-api==0.12.1 # homeassistant.components.google_travel_time googlemaps==2.5.1 +# homeassistant.components.slide +# homeassistant.components.slide_local +goslide-api==0.7.0 + # homeassistant.components.tailwind gotailwind==0.3.0 diff --git a/tests/components/slide_local/__init__.py b/tests/components/slide_local/__init__.py new file mode 100644 index 00000000000..cd7bd6cb6d1 --- /dev/null +++ b/tests/components/slide_local/__init__.py @@ -0,0 +1,21 @@ +"""Tests for the slide_local integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Set up the slide local integration.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.slide_local.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/slide_local/conftest.py b/tests/components/slide_local/conftest.py new file mode 100644 index 00000000000..0d70d1989e7 --- /dev/null +++ b/tests/components/slide_local/conftest.py @@ -0,0 +1,63 @@ +"""Test fixtures for Slide local.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.const import CONF_API_VERSION, CONF_HOST + +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="slide", + data={ + CONF_HOST: HOST, + CONF_API_VERSION: 2, + }, + options={ + CONF_INVERT_POSITION: False, + }, + minor_version=1, + unique_id="12:34:56:78:90:ab", + entry_id="ce5f5431554d101905d31797e1232da8", + ) + + +@pytest.fixture +def mock_slide_api(): + """Build a fixture for the SlideLocalApi that connects successfully and returns one device.""" + + mock_slide_local_api = AsyncMock() + mock_slide_local_api.slide_info.return_value = SLIDE_INFO_DATA + + with ( + patch( + "homeassistant.components.slide_local.SlideLocalApi", + autospec=True, + return_value=mock_slide_local_api, + ), + patch( + "homeassistant.components.slide_local.config_flow.SlideLocalApi", + autospec=True, + return_value=mock_slide_local_api, + ), + ): + yield mock_slide_local_api + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.slide_local.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/slide_local/const.py b/tests/components/slide_local/const.py new file mode 100644 index 00000000000..edf45753407 --- /dev/null +++ b/tests/components/slide_local/const.py @@ -0,0 +1,8 @@ +"""Common const used across tests for slide_local.""" + +from homeassistant.components.slide_local.const import DOMAIN + +from tests.common import load_json_object_fixture + +HOST = "127.0.0.2" +SLIDE_INFO_DATA = load_json_object_fixture("slide_1.json", DOMAIN) diff --git a/tests/components/slide_local/fixtures/slide_1.json b/tests/components/slide_local/fixtures/slide_1.json new file mode 100644 index 00000000000..e8c3c85a324 --- /dev/null +++ b/tests/components/slide_local/fixtures/slide_1.json @@ -0,0 +1,11 @@ +{ + "slide_id": "slide_300000000000", + "mac": "300000000000", + "board_rev": 1, + "device_name": "slide bedroom", + "zone_name": "bedroom", + "curtain_type": 0, + "calib_time": 10239, + "pos": 0.0, + "touch_go": true +} diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py new file mode 100644 index 00000000000..35aa99a90d7 --- /dev/null +++ b/tests/components/slide_local/test_config_flow.py @@ -0,0 +1,373 @@ +"""Test the slide_local config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + +MOCK_ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.2"), + ip_addresses=[ip_address("127.0.0.2")], + hostname="Slide-1234567890AB.local.", + name="Slide-1234567890AB._http._tcp.local.", + port=80, + properties={ + "id": "slide-1234567890AB", + "arch": "esp32", + "app": "slide", + "fw_version": "2.0.0-1683059251", + "fw_id": "20230502-202745", + }, + type="mock_type", +) + + +async def test_user( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 2 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_1( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_error( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, None] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == "unknown" + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_1_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 1.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, exception] + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_2_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 2.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = exception + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_abort_if_already_setup( + hass: HomeAssistant, + mock_slide_api: AsyncMock, +) -> None: + """Test we abort if the device is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id="30:00:00:00:00:00").add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "127.0.0.2" + assert result["data"][CONF_HOST] == "127.0.0.2" + assert not result["options"][CONF_INVERT_POSITION] + assert result["result"].unique_id == "12:34:56:78:90:ab" + + +async def test_zeroconf_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: HOST}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +async def test_zeroconf_update_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test updating an existing entry from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.3"}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +@pytest.mark.parametrize( + ("exception"), + [ + (ClientConnectionError), + (ClientTimeoutError), + (AuthenticationFailed), + (DigestAuthCalcError), + (Exception), + ], +) +async def test_zeroconf_connection_error( + hass: HomeAssistant, + exception: Exception, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "slide_host"}, unique_id="12:34:56:78:90:cd" + ).add_to_hass(hass) + + mock_slide_api.slide_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "discovery_connection_failed" From 55fa717f100e96626e077a61c874512a98b4dc44 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:18:27 +0100 Subject: [PATCH 0581/1198] Migrate flux_led light tests to use Kelvin (#133009) --- tests/components/flux_led/test_light.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/components/flux_led/test_light.py b/tests/components/flux_led/test_light.py index c12776eb552..a881bc2ea27 100644 --- a/tests/components/flux_led/test_light.py +++ b/tests/components/flux_led/test_light.py @@ -41,7 +41,7 @@ from homeassistant.components.flux_led.light import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -777,12 +777,12 @@ async def test_rgb_cct_light(hass: HomeAssistant) -> None: assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgb"] - assert attributes[ATTR_COLOR_TEMP] == 200 + assert attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 370}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2702}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(2702, 128) @@ -1003,7 +1003,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1012,7 +1012,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1021,7 +1021,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1241,7 +1241,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1250,7 +1250,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1259,7 +1259,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1316,7 +1316,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 170}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 5882}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(5882, MIN_CCT_BRIGHTNESS) From 56db5368834da5c05da2699a2bae68d27fc0fac8 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Thu, 12 Dec 2024 20:23:14 +0100 Subject: [PATCH 0582/1198] Add Cookidoo integration (#129800) --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/cookidoo/__init__.py | 49 +++ .../components/cookidoo/config_flow.py | 167 ++++++++++ homeassistant/components/cookidoo/const.py | 3 + .../components/cookidoo/coordinator.py | 101 ++++++ homeassistant/components/cookidoo/entity.py | 30 ++ homeassistant/components/cookidoo/icons.json | 12 + .../components/cookidoo/manifest.json | 11 + .../components/cookidoo/quality_scale.yaml | 90 ++++++ .../components/cookidoo/strings.json | 68 ++++ homeassistant/components/cookidoo/todo.py | 185 +++++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/cookidoo/__init__.py | 15 + tests/components/cookidoo/conftest.py | 76 +++++ .../cookidoo/fixtures/additional_items.json | 9 + .../cookidoo/fixtures/ingredient_items.json | 10 + .../cookidoo/snapshots/test_todo.ambr | 95 ++++++ tests/components/cookidoo/test_config_flow.py | 182 +++++++++++ tests/components/cookidoo/test_init.py | 102 ++++++ tests/components/cookidoo/test_todo.py | 292 ++++++++++++++++++ 25 files changed, 1523 insertions(+) create mode 100644 homeassistant/components/cookidoo/__init__.py create mode 100644 homeassistant/components/cookidoo/config_flow.py create mode 100644 homeassistant/components/cookidoo/const.py create mode 100644 homeassistant/components/cookidoo/coordinator.py create mode 100644 homeassistant/components/cookidoo/entity.py create mode 100644 homeassistant/components/cookidoo/icons.json create mode 100644 homeassistant/components/cookidoo/manifest.json create mode 100644 homeassistant/components/cookidoo/quality_scale.yaml create mode 100644 homeassistant/components/cookidoo/strings.json create mode 100644 homeassistant/components/cookidoo/todo.py create mode 100644 tests/components/cookidoo/__init__.py create mode 100644 tests/components/cookidoo/conftest.py create mode 100644 tests/components/cookidoo/fixtures/additional_items.json create mode 100644 tests/components/cookidoo/fixtures/ingredient_items.json create mode 100644 tests/components/cookidoo/snapshots/test_todo.ambr create mode 100644 tests/components/cookidoo/test_config_flow.py create mode 100644 tests/components/cookidoo/test_init.py create mode 100644 tests/components/cookidoo/test_todo.py diff --git a/.strict-typing b/.strict-typing index 130ae6e9393..ade5d6afb7b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -137,6 +137,7 @@ homeassistant.components.co2signal.* homeassistant.components.command_line.* homeassistant.components.config.* homeassistant.components.configurator.* +homeassistant.components.cookidoo.* homeassistant.components.counter.* homeassistant.components.cover.* homeassistant.components.cpuspeed.* diff --git a/CODEOWNERS b/CODEOWNERS index 6c11f57da83..afd150ffb0c 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -284,6 +284,8 @@ build.json @home-assistant/supervisor /tests/components/control4/ @lawtancool /homeassistant/components/conversation/ @home-assistant/core @synesthesiam /tests/components/conversation/ @home-assistant/core @synesthesiam +/homeassistant/components/cookidoo/ @miaucl +/tests/components/cookidoo/ @miaucl /homeassistant/components/coolmaster/ @OnFreund /tests/components/coolmaster/ @OnFreund /homeassistant/components/counter/ @fabaff diff --git a/homeassistant/components/cookidoo/__init__.py b/homeassistant/components/cookidoo/__init__.py new file mode 100644 index 00000000000..bb78f2a569d --- /dev/null +++ b/homeassistant/components/cookidoo/__init__.py @@ -0,0 +1,49 @@ +"""The Cookidoo integration.""" + +from __future__ import annotations + +from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig + +from homeassistant.const import ( + CONF_COUNTRY, + CONF_EMAIL, + CONF_LANGUAGE, + CONF_PASSWORD, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.TODO] + + +async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Set up Cookidoo from a config entry.""" + + cookidoo = Cookidoo( + async_get_clientsession(hass), + CookidooConfig( + email=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=entry.data[CONF_COUNTRY].lower(), + language=entry.data[CONF_LANGUAGE], + ), + ), + ) + + coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py new file mode 100644 index 00000000000..ce7ad9fde87 --- /dev/null +++ b/homeassistant/components/cookidoo/config_flow.py @@ -0,0 +1,167 @@ +"""Config flow for Cookidoo integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from cookidoo_api import ( + Cookidoo, + CookidooAuthException, + CookidooConfig, + CookidooLocalizationConfig, + CookidooRequestException, + get_country_options, + get_localization_options, +) +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + CountrySelector, + CountrySelectorConfig, + LanguageSelector, + LanguageSelectorConfig, + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +AUTH_DATA_SCHEMA = { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), +} + + +class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Cookidoo.""" + + COUNTRY_DATA_SCHEMA: dict + LANGUAGE_DATA_SCHEMA: dict + + user_input: dict[str, Any] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + errors: dict[str, str] = {} + + if user_input is not None and not ( + errors := await self.validate_input(user_input) + ): + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + self.user_input = user_input + return await self.async_step_language() + await self.generate_country_schema() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema( + {**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA} + ), + suggested_values=user_input, + ), + errors=errors, + ) + + async def async_step_language( + self, + language_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Async language step to set up the connection.""" + errors: dict[str, str] = {} + if language_input is not None and not ( + errors := await self.validate_input(self.user_input, language_input) + ): + return self.async_create_entry( + title="Cookidoo", data={**self.user_input, **language_input} + ) + + await self.generate_language_schema() + return self.async_show_form( + step_id="language", + data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA), + errors=errors, + ) + + async def generate_country_schema(self) -> None: + """Generate country schema.""" + self.COUNTRY_DATA_SCHEMA = { + vol.Required(CONF_COUNTRY): CountrySelector( + CountrySelectorConfig( + countries=[ + country.upper() for country in await get_country_options() + ], + ) + ) + } + + async def generate_language_schema(self) -> None: + """Generate language schema.""" + self.LANGUAGE_DATA_SCHEMA = { + vol.Required(CONF_LANGUAGE): LanguageSelector( + LanguageSelectorConfig( + languages=[ + option.language + for option in await get_localization_options( + country=self.user_input[CONF_COUNTRY].lower() + ) + ], + native_name=True, + ), + ), + } + + async def validate_input( + self, + user_input: Mapping[str, Any], + language_input: Mapping[str, Any] | None = None, + ) -> dict[str, str]: + """Input Helper.""" + + errors: dict[str, str] = {} + + session = async_get_clientsession(self.hass) + cookidoo = Cookidoo( + session, + CookidooConfig( + email=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=user_input[CONF_COUNTRY].lower(), + language=language_input[CONF_LANGUAGE] + if language_input + else "de-ch", + ), + ), + ) + try: + await cookidoo.login() + if language_input: + await cookidoo.get_additional_items() + except CookidooRequestException: + errors["base"] = "cannot_connect" + except CookidooAuthException: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + return errors diff --git a/homeassistant/components/cookidoo/const.py b/homeassistant/components/cookidoo/const.py new file mode 100644 index 00000000000..37c584404a0 --- /dev/null +++ b/homeassistant/components/cookidoo/const.py @@ -0,0 +1,3 @@ +"""Constants for the Cookidoo integration.""" + +DOMAIN = "cookidoo" diff --git a/homeassistant/components/cookidoo/coordinator.py b/homeassistant/components/cookidoo/coordinator.py new file mode 100644 index 00000000000..23a133ea16f --- /dev/null +++ b/homeassistant/components/cookidoo/coordinator.py @@ -0,0 +1,101 @@ +"""DataUpdateCoordinator for the Cookidoo integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from cookidoo_api import ( + Cookidoo, + CookidooAdditionalItem, + CookidooAuthException, + CookidooException, + CookidooIngredientItem, + CookidooRequestException, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +type CookidooConfigEntry = ConfigEntry[CookidooDataUpdateCoordinator] + + +@dataclass +class CookidooData: + """Cookidoo data type.""" + + ingredient_items: list[CookidooIngredientItem] + additional_items: list[CookidooAdditionalItem] + + +class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]): + """A Cookidoo Data Update Coordinator.""" + + config_entry: CookidooConfigEntry + + def __init__( + self, hass: HomeAssistant, cookidoo: Cookidoo, entry: CookidooConfigEntry + ) -> None: + """Initialize the Cookidoo data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=90), + config_entry=entry, + ) + self.cookidoo = cookidoo + + async def _async_setup(self) -> None: + try: + await self.cookidoo.login() + except CookidooRequestException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="setup_request_exception", + ) from e + except CookidooAuthException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from e + + async def _async_update_data(self) -> CookidooData: + try: + ingredient_items = await self.cookidoo.get_ingredient_items() + additional_items = await self.cookidoo.get_additional_items() + except CookidooAuthException: + try: + await self.cookidoo.refresh_token() + except CookidooAuthException as exc: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from exc + _LOGGER.debug( + "Authentication failed but re-authentication was successful, trying again later" + ) + return self.data + except CookidooException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_exception", + ) from e + + return CookidooData( + ingredient_items=ingredient_items, additional_items=additional_items + ) diff --git a/homeassistant/components/cookidoo/entity.py b/homeassistant/components/cookidoo/entity.py new file mode 100644 index 00000000000..5c8f3ec8441 --- /dev/null +++ b/homeassistant/components/cookidoo/entity.py @@ -0,0 +1,30 @@ +"""Base entity for the Cookidoo integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import CookidooDataUpdateCoordinator + + +class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]): + """Cookidoo base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: CookidooDataUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + name="Cookidoo", + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer="Vorwerk International & Co. KmG", + model="Cookidoo - Thermomix® recipe portal", + ) diff --git a/homeassistant/components/cookidoo/icons.json b/homeassistant/components/cookidoo/icons.json new file mode 100644 index 00000000000..36c0724331a --- /dev/null +++ b/homeassistant/components/cookidoo/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "todo": { + "ingredient_list": { + "default": "mdi:cart-plus" + }, + "additional_item_list": { + "default": "mdi:cart-plus" + } + } + } +} diff --git a/homeassistant/components/cookidoo/manifest.json b/homeassistant/components/cookidoo/manifest.json new file mode 100644 index 00000000000..7e9e86f9d9d --- /dev/null +++ b/homeassistant/components/cookidoo/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "cookidoo", + "name": "Cookidoo", + "codeowners": ["@miaucl"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/cookidoo", + "integration_type": "service", + "iot_class": "cloud_polling", + "quality_scale": "bronze", + "requirements": ["cookidoo-api==0.10.0"] +} diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml new file mode 100644 index 00000000000..7b2bbb7592b --- /dev/null +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -0,0 +1,90 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: exempt + comment: No special external action required + entity-event-setup: + status: exempt + comment: No callbacks are implemented + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: Offloaded to coordinator + entity-unavailable: + status: done + comment: Offloaded to coordinator + action-exceptions: + status: done + comment: Only providing todo actions + reauthentication-flow: todo + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: No options flow + + # Gold + entity-translations: done + entity-device-class: + status: exempt + comment: currently no platform with device classes + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: No disabled entities implemented + discovery: + status: exempt + comment: Nothing to discover + stale-devices: + status: exempt + comment: No stale entities possible + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: No dynamic entities available + discovery-update-info: + status: exempt + comment: No discoverable entities implemented + repair-issues: + status: exempt + comment: No issues/repairs + docs-use-cases: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json new file mode 100644 index 00000000000..2c518f472d5 --- /dev/null +++ b/homeassistant/components/cookidoo/strings.json @@ -0,0 +1,68 @@ +{ + "config": { + "step": { + "user": { + "title": "Login to Cookidoo", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]", + "country": "Country" + }, + "data_description": { + "email": "Email used access your Cookidoo account.", + "password": "Password used access your Cookidoo account.", + "country": "Pick your language for the Cookidoo content." + } + }, + "language": { + "title": "Login to Cookidoo", + "data": { + "language": "[%key:common::config_flow::data::language%]" + }, + "data_description": { + "language": "Pick your language for the Cookidoo content." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "entity": { + "todo": { + "ingredient_list": { + "name": "Shopping list" + }, + "additional_item_list": { + "name": "Additional purchases" + } + } + }, + "exceptions": { + "todo_save_item_failed": { + "message": "Failed to save {name} to Cookidoo shopping list" + }, + "todo_update_item_failed": { + "message": "Failed to update {name} in Cookidoo shopping list" + }, + "todo_delete_item_failed": { + "message": "Failed to delete {count} item(s) from Cookidoo shopping list" + }, + "setup_request_exception": { + "message": "Failed to connect to server, try again later" + }, + "setup_authentication_exception": { + "message": "Authentication failed for {email}, check your email and password" + }, + "update_exception": { + "message": "Unable to connect and retrieve data from cookidoo" + } + } +} diff --git a/homeassistant/components/cookidoo/todo.py b/homeassistant/components/cookidoo/todo.py new file mode 100644 index 00000000000..4a70dadc65a --- /dev/null +++ b/homeassistant/components/cookidoo/todo.py @@ -0,0 +1,185 @@ +"""Todo platform for the Cookidoo integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooException, + CookidooIngredientItem, +) + +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator +from .entity import CookidooBaseEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: CookidooConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the todo list from a config entry created in the integrations UI.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [ + CookidooIngredientsTodoListEntity(coordinator), + CookidooAdditionalItemTodoListEntity(coordinator), + ] + ) + + +class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the ingredients in the Cookidoo Shopping List.""" + + _attr_translation_key = "ingredient_list" + _attr_supported_features = TodoListEntityFeature.UPDATE_TODO_ITEM + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo ingredients.""" + return [ + TodoItem( + uid=item.id, + summary=item.name, + description=item.description or "", + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.ingredient_items + ] + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an ingredient to the To-do list. + + Cookidoo ingredients can be changed in state, but not in summary or description. This is currently not possible to distinguish in home assistant and just fails silently. + """ + try: + if TYPE_CHECKING: + assert item.uid + await self.coordinator.cookidoo.edit_ingredient_items_ownership( + [ + CookidooIngredientItem( + id=item.uid, + name="", + description="", + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + ] + ) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + +class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the additional items in the Cookidoo Shopping List.""" + + _attr_translation_key = "additional_item_list" + _attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + ) + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo items.""" + + return [ + TodoItem( + uid=item.id, + summary=item.name, + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.additional_items + ] + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.summary + await self.coordinator.cookidoo.add_additional_items([item.summary]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_save_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.uid + assert item.summary + new_item = CookidooAdditionalItem( + id=item.uid, + name=item.summary, + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + await self.coordinator.cookidoo.edit_additional_items_ownership([new_item]) + await self.coordinator.cookidoo.edit_additional_items([new_item]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item from the To-do list.""" + + try: + await self.coordinator.cookidoo.remove_additional_items(uids) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_delete_item_failed", + translation_placeholders={"count": str(len(uids))}, + ) from e + + await self.coordinator.async_refresh() diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index b074ff714f6..930bda4e81b 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -113,6 +113,7 @@ FLOWS = { "color_extractor", "comelit", "control4", + "cookidoo", "coolmaster", "cpuspeed", "crownstone", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index fcd974534af..ecbe3f0dcbf 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1044,6 +1044,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "cookidoo": { + "name": "Cookidoo", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "coolmaster": { "name": "CoolMasterNet", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index a0c441c44f9..2d8e0ea3f61 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1124,6 +1124,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.cookidoo.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.counter.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 4ee02e13695..8f4705e878e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -704,6 +704,9 @@ connect-box==0.3.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + # homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f7faaa3ae0d..3a88a5a2d41 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -600,6 +600,9 @@ colorthief==0.2.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + # homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/tests/components/cookidoo/__init__.py b/tests/components/cookidoo/__init__.py new file mode 100644 index 00000000000..043f627ecc6 --- /dev/null +++ b/tests/components/cookidoo/__init__.py @@ -0,0 +1,15 @@ +"""Tests for the Cookidoo integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Mock setup of the cookidoo integration.""" + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/cookidoo/conftest.py b/tests/components/cookidoo/conftest.py new file mode 100644 index 00000000000..68700967d35 --- /dev/null +++ b/tests/components/cookidoo/conftest.py @@ -0,0 +1,76 @@ +"""Common fixtures for the Cookidoo tests.""" + +from collections.abc import Generator +from typing import cast +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooAuthResponse, + CookidooIngredientItem, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_object_fixture + +EMAIL = "test-email" +PASSWORD = "test-password" +COUNTRY = "CH" +LANGUAGE = "de-CH" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.cookidoo.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_cookidoo_client() -> Generator[AsyncMock]: + """Mock a Cookidoo client.""" + with ( + patch( + "homeassistant.components.cookidoo.Cookidoo", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.cookidoo.config_flow.Cookidoo", + new=mock_client, + ), + ): + client = mock_client.return_value + client.login.return_value = cast(CookidooAuthResponse, {"name": "Cookidoo"}) + client.get_ingredient_items.return_value = [ + CookidooIngredientItem(**item) + for item in load_json_object_fixture("ingredient_items.json", DOMAIN)[ + "data" + ] + ] + client.get_additional_items.return_value = [ + CookidooAdditionalItem(**item) + for item in load_json_object_fixture("additional_items.json", DOMAIN)[ + "data" + ] + ] + yield client + + +@pytest.fixture(name="cookidoo_config_entry") +def mock_cookidoo_config_entry() -> MockConfigEntry: + """Mock cookidoo configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + entry_id="01JBVVVJ87F6G5V0QJX6HBC94T", + ) diff --git a/tests/components/cookidoo/fixtures/additional_items.json b/tests/components/cookidoo/fixtures/additional_items.json new file mode 100644 index 00000000000..97cd206f6ad --- /dev/null +++ b/tests/components/cookidoo/fixtures/additional_items.json @@ -0,0 +1,9 @@ +{ + "data": [ + { + "id": "unique_id_tomaten", + "name": "Tomaten", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/fixtures/ingredient_items.json b/tests/components/cookidoo/fixtures/ingredient_items.json new file mode 100644 index 00000000000..7fbeb90e91a --- /dev/null +++ b/tests/components/cookidoo/fixtures/ingredient_items.json @@ -0,0 +1,10 @@ +{ + "data": [ + { + "id": "unique_id_mehl", + "name": "Mehl", + "description": "200 g", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/snapshots/test_todo.ambr b/tests/components/cookidoo/snapshots/test_todo.ambr new file mode 100644 index 00000000000..965cbb0adde --- /dev/null +++ b/tests/components/cookidoo/snapshots/test_todo.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_todo[todo.cookidoo_additional_purchases-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_additional_purchases', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Additional purchases', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'additional_item_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_additional_items', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_additional_purchases-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Additional purchases', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_additional_purchases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_shopping_list', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Shopping list', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'ingredient_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_ingredients', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Shopping list', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_shopping_list', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py new file mode 100644 index 00000000000..0da8afe7d07 --- /dev/null +++ b/tests/components/cookidoo/test_config_flow.py @@ -0,0 +1,182 @@ +"""Test the Cookidoo config flow.""" + +from unittest.mock import AsyncMock + +from cookidoo_api.exceptions import ( + CookidooAuthException, + CookidooException, + CookidooRequestException, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import COUNTRY, EMAIL, LANGUAGE, PASSWORD + +from tests.common import MockConfigEntry + +MOCK_DATA_USER_STEP = { + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, +} + +MOCK_DATA_LANGUAGE_STEP = { + CONF_LANGUAGE: LANGUAGE, +} + + +async def test_flow_user_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_cookidoo_client: AsyncMock +) -> None: + """Test we get the user flow and create entry with success.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cookidoo" + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +async def test_flow_user_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "user"} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/cookidoo/test_init.py b/tests/components/cookidoo/test_init.py new file mode 100644 index 00000000000..c73295bcd96 --- /dev/null +++ b/tests/components/cookidoo/test_init.py @@ -0,0 +1,102 @@ +"""Unit tests for the cookidoo integration.""" + +from unittest.mock import AsyncMock + +from cookidoo_api import CookidooAuthException, CookidooRequestException +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_load_unload( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test loading and unloading of the config entry.""" + await setup_integration(hass, cookidoo_config_entry) + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(cookidoo_config_entry.entry_id) + assert cookidoo_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_init_failure( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + status: ConfigEntryState, + exception: Exception, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test an initialization error on integration load.""" + mock_cookidoo_client.login.side_effect = exception + await setup_integration(hass, cookidoo_config_entry) + assert cookidoo_config_entry.state == status + + +@pytest.mark.parametrize( + "cookidoo_method", + [ + "get_ingredient_items", + "get_additional_items", + ], +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + cookidoo_method: str, +) -> None: + """Test config entry not ready.""" + getattr( + mock_cookidoo_client, cookidoo_method + ).side_effect = CookidooRequestException() + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (None, ConfigEntryState.LOADED), + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_config_entry_not_ready_auth_error( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + exception: Exception | None, + status: ConfigEntryState, +) -> None: + """Test config entry not ready from authentication error.""" + + mock_cookidoo_client.get_ingredient_items.side_effect = CookidooAuthException + mock_cookidoo_client.refresh_token.side_effect = exception + + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is status diff --git a/tests/components/cookidoo/test_todo.py b/tests/components/cookidoo/test_todo.py new file mode 100644 index 00000000000..0e60a86d225 --- /dev/null +++ b/tests/components/cookidoo/test_todo.py @@ -0,0 +1,292 @@ +"""Test for todo platform of the Cookidoo integration.""" + +from collections.abc import Generator +import re +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooIngredientItem, + CookidooRequestException, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoItemStatus, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +def todo_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.cookidoo.PLATFORMS", + [Platform.TODO], + ): + yield + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_todo( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of todo platform.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, cookidoo_config_entry.entry_id + ) + + +async def test_update_ingredient( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + mock_cookidoo_client.edit_ingredient_items_ownership.assert_called_once_with( + [ + CookidooIngredientItem( + id="unique_id_mehl", + name="", + description="", + is_owned=True, + ) + ], + ) + + +async def test_update_ingredient_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_ingredient_items_ownership.side_effect = ( + CookidooRequestException + ) + with pytest.raises( + HomeAssistantError, match="Failed to update Mehl in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + +async def test_add_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.add_additional_items.assert_called_once_with( + ["Äpfel"], + ) + + +async def test_add_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.add_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to save Äpfel to Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_update_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.edit_additional_items_ownership.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + mock_cookidoo_client.edit_additional_items.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + + +async def test_update_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_additional_items_ownership.side_effect = ( + CookidooRequestException + ) + mock_cookidoo_client.edit_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to update Peperoni in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_delete_additional_items( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.remove_additional_items.assert_called_once_with( + ["unique_id_tomaten"] + ) + + +async def test_delete_additional_items_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + mock_cookidoo_client.remove_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, + match=re.escape("Failed to delete 1 item(s) from Cookidoo shopping list"), + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) From fd811c85e9e69d8f3399f9891d8b7ec628371353 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:28:08 +0100 Subject: [PATCH 0583/1198] Migrate wemo light tests to use Kelvin (#133031) --- tests/components/wemo/test_light_bridge.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/wemo/test_light_bridge.py b/tests/components/wemo/test_light_bridge.py index 48be2823750..4deddeaba94 100644 --- a/tests/components/wemo/test_light_bridge.py +++ b/tests/components/wemo/test_light_bridge.py @@ -11,7 +11,7 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.light import ( ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -116,7 +116,7 @@ async def test_light_update_entity( blocking=True, ) state = hass.states.get(wemo_entity.entity_id) - assert state.attributes.get(ATTR_COLOR_TEMP) == 432 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2314 assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.COLOR_TEMP] assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP assert state.state == STATE_ON From f0391f4963adcb0a6b2bb2f5ea135af340a0892c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:28:42 +0100 Subject: [PATCH 0584/1198] Migrate tradfri light tests to use Kelvin (#133030) --- tests/components/tradfri/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/tradfri/test_light.py b/tests/components/tradfri/test_light.py index 887b043689f..c7091e77343 100644 --- a/tests/components/tradfri/test_light.py +++ b/tests/components/tradfri/test_light.py @@ -9,10 +9,10 @@ from pytradfri.device import Device from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -67,9 +67,9 @@ def bulb_cws() -> str: "light.test_ws", { ATTR_BRIGHTNESS: 250, - ATTR_COLOR_TEMP: 400, - ATTR_MIN_MIREDS: 250, - ATTR_MAX_MIREDS: 454, + ATTR_COLOR_TEMP_KELVIN: 2500, + ATTR_MAX_COLOR_TEMP_KELVIN: 4000, + ATTR_MIN_COLOR_TEMP_KELVIN: 2202, ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, }, From de35bfce77dfe1a2c76dd4a0d2bc2a5d53e2aefb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:29:15 +0100 Subject: [PATCH 0585/1198] Migrate yeelight light tests to use Kelvin (#133033) --- tests/components/yeelight/test_light.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index f4ff82e7757..274d0a158f0 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -24,7 +24,7 @@ from yeelight.main import _MODEL_SPECS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -107,7 +107,6 @@ from homeassistant.util.color import ( color_RGB_to_hs, color_RGB_to_xy, color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, ) from . import ( @@ -289,7 +288,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.last_properties["power"] = "off" await hass.services.async_call( @@ -298,7 +297,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_LONG, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -316,7 +315,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -327,7 +326,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp - flash short brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.async_start_music.reset_mock() mocked_bulb.async_set_brightness.reset_mock() @@ -342,7 +341,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -360,7 +359,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -691,7 +690,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -707,7 +706,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -720,7 +719,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] From e276f8ee896b422701ff8ac13c9f1c6cd040882e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:32:39 +0100 Subject: [PATCH 0586/1198] Migrate zwave_js light tests to use Kelvin (#133034) --- tests/components/zwave_js/test_light.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 4c725c6dc29..21a6c0a8fae 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -7,10 +7,10 @@ from zwave_js_server.event import Event from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_SUPPORTED_COLOR_MODES, @@ -51,8 +51,8 @@ async def test_light( assert state assert state.state == STATE_OFF - assert state.attributes[ATTR_MIN_MIREDS] == 153 - assert state.attributes[ATTR_MAX_MIREDS] == 370 + assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -130,7 +130,7 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 370 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2702 assert state.attributes[ATTR_RGB_COLOR] is not None # Test turning on with same brightness @@ -256,7 +256,7 @@ async def test_light( assert state.attributes[ATTR_COLOR_MODE] == "hs" assert state.attributes[ATTR_BRIGHTNESS] == 255 assert state.attributes[ATTR_RGB_COLOR] == (255, 76, 255) - assert state.attributes[ATTR_COLOR_TEMP] is None + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None client.async_send_command.reset_mock() @@ -293,7 +293,7 @@ async def test_light( await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -358,14 +358,14 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 170 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5881 assert ATTR_RGB_COLOR in state.attributes # Test turning on with same color temp await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -379,7 +379,7 @@ async def test_light( "turn_on", { "entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, - ATTR_COLOR_TEMP: 170, + ATTR_COLOR_TEMP_KELVIN: 5881, ATTR_TRANSITION: 35, }, blocking=True, From 483688dba2f93d2bbc263db13a5a5a74f7a86aac Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 20:32:59 +0100 Subject: [PATCH 0587/1198] Promote Twente Milieu quality scale to silver (#133074) --- .../components/twentemilieu/manifest.json | 1 + .../twentemilieu/quality_scale.yaml | 19 ++++++++----------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index 292887c6c5b..c04c5492a40 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,5 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], + "quality_scale": "silver", "requirements": ["twentemilieu==2.2.0"] } diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index 3d7535a249c..42ff152cb4d 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -14,12 +14,9 @@ rules: status: exempt comment: | This integration does not provide additional actions. - docs-high-level-description: - status: todo - comment: | - The introduction can be improved and is missing links to the provider. + docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | @@ -51,7 +48,7 @@ rules: data), there is no need to implement parallel updates. test-coverage: done integration-owner: done - docs-installation-parameters: todo + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -95,16 +92,16 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: status: exempt comment: | This is an service, which doesn't integrate with any devices. docs-supported-functions: done - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo - docs-examples: todo + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done # Platinum async-dependency: done From 7c9992f5d34ca0931be1ce610bfa77adf5ffcd0f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:37:32 +0100 Subject: [PATCH 0588/1198] Migrate demo light tests to use Kelvin (#133003) --- tests/components/demo/test_light.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index 8fcdb8a9c2e..b39b09d9307 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -9,11 +9,10 @@ from homeassistant.components.demo import DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, - ATTR_KELVIN, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_XY_COLOR, DOMAIN as LIGHT_DOMAIN, @@ -79,25 +78,33 @@ async def test_state_attributes(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_EFFECT: "none", ATTR_COLOR_TEMP: 400}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_EFFECT: "none", + ATTR_COLOR_TEMP_KELVIN: 2500, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 400 - assert state.attributes.get(ATTR_MIN_MIREDS) == 153 - assert state.attributes.get(ATTR_MAX_MIREDS) == 500 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2500 + assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 + assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 assert state.attributes.get(ATTR_EFFECT) == "none" await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS_PCT: 50, ATTR_KELVIN: 3000}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_BRIGHTNESS_PCT: 50, + ATTR_COLOR_TEMP_KELVIN: 3000, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 333 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 3000 assert state.attributes.get(ATTR_BRIGHTNESS) == 128 From 708084d3005d935f64f64886ba81cf773a25bac0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:38:13 +0100 Subject: [PATCH 0589/1198] Migrate switch_as_x light tests to use Kelvin (#133023) --- tests/components/switch_as_x/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/switch_as_x/test_light.py b/tests/components/switch_as_x/test_light.py index 5e48b7db965..5f724a2d7e7 100644 --- a/tests/components/switch_as_x/test_light.py +++ b/tests/components/switch_as_x/test_light.py @@ -3,7 +3,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -57,7 +57,7 @@ async def test_default_state(hass: HomeAssistant) -> None: assert state.attributes["supported_features"] == 0 assert state.attributes.get(ATTR_BRIGHTNESS) is None assert state.attributes.get(ATTR_HS_COLOR) is None - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_EFFECT_LIST) is None assert state.attributes.get(ATTR_EFFECT) is None assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.ONOFF] From b189bc6146b2930231eda5d67afa1519ebd22173 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:38:49 +0100 Subject: [PATCH 0590/1198] Migrate smartthings light tests to use Kelvin (#133022) --- tests/components/smartthings/test_light.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/components/smartthings/test_light.py b/tests/components/smartthings/test_light.py index 22b181a3645..b46188b5b5f 100644 --- a/tests/components/smartthings/test_light.py +++ b/tests/components/smartthings/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -101,8 +101,8 @@ async def test_entity_state(hass: HomeAssistant, light_devices) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_BRIGHTNESS] == 255 assert ATTR_HS_COLOR not in state.attributes[ATTR_HS_COLOR] - assert isinstance(state.attributes[ATTR_COLOR_TEMP], int) - assert state.attributes[ATTR_COLOR_TEMP] == 222 + assert isinstance(state.attributes[ATTR_COLOR_TEMP_KELVIN], int) + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4500 async def test_entity_and_device_attributes( @@ -273,7 +273,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No await hass.services.async_call( "light", "turn_on", - {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP: 300}, + {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP_KELVIN: 3333}, blocking=True, ) # This test schedules and update right after the call @@ -282,7 +282,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No state = hass.states.get("light.color_dimmer_2") assert state is not None assert state.state == "on" - assert state.attributes[ATTR_COLOR_TEMP] == 300 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3333 async def test_update_from_signal(hass: HomeAssistant, device_factory) -> None: From 3baa432bae94ddb635f0bd357ce3ace8596c2ea0 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 20:48:01 +0100 Subject: [PATCH 0591/1198] Use runtime_data in velbus (#132988) --- homeassistant/components/velbus/__init__.py | 36 +++++++++++-------- .../components/velbus/binary_sensor.py | 11 +++--- homeassistant/components/velbus/button.py | 13 +++---- homeassistant/components/velbus/climate.py | 12 ++++--- homeassistant/components/velbus/cover.py | 13 +++---- .../components/velbus/diagnostics.py | 11 +++--- homeassistant/components/velbus/light.py | 16 +++++---- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 13 +++---- homeassistant/components/velbus/sensor.py | 10 +++--- homeassistant/components/velbus/services.py | 32 ++++++++++++----- homeassistant/components/velbus/switch.py | 13 +++---- 12 files changed, 104 insertions(+), 78 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index fec6395c890..f8426bc4130 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio +from dataclasses import dataclass import logging import os import shutil @@ -34,6 +36,16 @@ PLATFORMS = [ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +type VelbusConfigEntry = ConfigEntry[VelbusData] + + +@dataclass +class VelbusData: + """Runtime data for the Velbus config entry.""" + + controller: Velbus + connect_task: asyncio.Task + async def velbus_connect_task( controller: Velbus, hass: HomeAssistant, entry_id: str @@ -67,19 +79,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: """Establish connection with velbus.""" - hass.data.setdefault(DOMAIN, {}) - controller = Velbus( entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) - hass.data[DOMAIN][entry.entry_id] = {} - hass.data[DOMAIN][entry.entry_id]["cntrl"] = controller - hass.data[DOMAIN][entry.entry_id]["tsk"] = hass.async_create_task( - velbus_connect_task(controller, hass, entry.entry_id) - ) + task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) + entry.runtime_data = VelbusData(controller=controller, connect_task=task) _migrate_device_identifiers(hass, entry.entry_id) @@ -88,17 +95,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: """Unload (close) the velbus connection.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - await hass.data[DOMAIN][entry.entry_id]["cntrl"].stop() - hass.data[DOMAIN].pop(entry.entry_id) - if not hass.data[DOMAIN]: - hass.data.pop(DOMAIN) + await entry.runtime_data.controller.stop() return unload_ok -async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_remove_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> None: """Remove the velbus entry, so we also have to cleanup the cache dir.""" await hass.async_add_executor_job( shutil.rmtree, @@ -106,7 +110,9 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: ) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: VelbusConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) cache_path = hass.config.path(STORAGE_DIR, f"velbuscache-{config_entry.entry_id}/") diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index 5f363c1a035..dd65ff7d50d 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -3,24 +3,23 @@ from velbusaio.channels import Button as VelbusButton from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task async_add_entities( - VelbusBinarySensor(channel) for channel in cntrl.get_all("binary_sensor") + VelbusBinarySensor(channel) + for channel in entry.runtime_data.controller.get_all_binary_sensor() ) diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index bd5b81d67a0..2b908c188b8 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -8,24 +8,25 @@ from velbusaio.channels import ( ) from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusButton(channel) for channel in cntrl.get_all("button")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusButton(channel) + for channel in entry.runtime_data.controller.get_all_button() + ) class VelbusButton(VelbusEntity, ButtonEntity): diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index 18142482539..fa8391d4199 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -11,25 +11,27 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VelbusConfigEntry from .const import DOMAIN, PRESET_MODES from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusClimate(channel) for channel in cntrl.get_all("climate")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusClimate(channel) + for channel in entry.runtime_data.controller.get_all_climate() + ) class VelbusClimate(VelbusEntity, ClimateEntity): diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 8b9d927f3d7..7850e7b1895 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -11,23 +11,24 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusCover(channel) for channel in cntrl.get_all("cover")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusCover(channel) + for channel in entry.runtime_data.controller.get_all_cover() + ) class VelbusCover(VelbusEntity, CoverEntity): diff --git a/homeassistant/components/velbus/diagnostics.py b/homeassistant/components/velbus/diagnostics.py index f7e29e2f57e..75b7669edec 100644 --- a/homeassistant/components/velbus/diagnostics.py +++ b/homeassistant/components/velbus/diagnostics.py @@ -7,18 +7,17 @@ from typing import Any from velbusaio.channels import Channel as VelbusChannel from velbusaio.module import Module as VelbusModule -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from .const import DOMAIN +from . import VelbusConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: VelbusConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller data: dict[str, Any] = {"entry": entry.as_dict(), "modules": []} for module in controller.get_modules().values(): data["modules"].append(_build_module_diagnostics_info(module)) @@ -26,10 +25,10 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: VelbusConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller channel = list(next(iter(device.identifiers)))[1] modules = controller.get_modules() return _build_module_diagnostics_info(modules[int(channel)]) diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index 7145576be6a..0df4f70d753 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -20,28 +20,30 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task entities: list[Entity] = [ - VelbusLight(channel) for channel in cntrl.get_all("light") + VelbusLight(channel) + for channel in entry.runtime_data.controller.get_all_light() ] - entities.extend(VelbusButtonLight(channel) for channel in cntrl.get_all("led")) + entities.extend( + VelbusButtonLight(channel) + for channel in entry.runtime_data.controller.get_all_led() + ) async_add_entities(entities) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index adea896a1c6..68fe5ead781 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -23,7 +23,7 @@ rules: entity-event-setup: todo entity-unique-id: done has-entity-name: todo - runtime-data: todo + runtime-data: done test-before-configure: done test-before-setup: todo unique-config-entry: diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index 7eecb85fc47..f0ad509270c 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -3,24 +3,25 @@ from velbusaio.channels import SelectedProgram from homeassistant.components.select import SelectEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus select based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSelect(channel) for channel in cntrl.get_all("select")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusSelect(channel) + for channel in entry.runtime_data.controller.get_all_select() + ) class VelbusSelect(VelbusEntity, SelectEntity): diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index b765eebcddc..598287839c1 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -9,24 +9,22 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task entities = [] - for channel in cntrl.get_all("sensor"): + for channel in entry.runtime_data.controller.get_all_sensor(): entities.append(VelbusSensor(channel)) if channel.is_counter_channel(): entities.append(VelbusSensor(channel, True)) diff --git a/homeassistant/components/velbus/services.py b/homeassistant/components/velbus/services.py index 83633eb66bc..3f0b1bd6cdb 100644 --- a/homeassistant/components/velbus/services.py +++ b/homeassistant/components/velbus/services.py @@ -5,6 +5,7 @@ from __future__ import annotations from contextlib import suppress import os import shutil +from typing import TYPE_CHECKING import voluptuous as vol @@ -13,6 +14,9 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR +if TYPE_CHECKING: + from . import VelbusConfigEntry + from .const import ( CONF_INTERFACE, CONF_MEMO_TEXT, @@ -35,20 +39,32 @@ def setup_services(hass: HomeAssistant) -> None: "The interface provided is not defined as a port in a Velbus integration" ) + def get_config_entry(interface: str) -> VelbusConfigEntry | None: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry + return None + async def scan(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() + """Handle a scan service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.scan() async def syn_clock(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() + """Handle a sync clock service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.sync_clock() async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" - memo_text = call.data[CONF_MEMO_TEXT] - await ( - hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] - .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text.async_render()) - ) + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + memo_text = call.data[CONF_MEMO_TEXT] + module = entry.runtime_data.controller.get_module(call.data[CONF_ADDRESS]) + if module: + await module.set_memo_text(memo_text.async_render()) async def clear_cache(call: ServiceCall) -> None: """Handle a clear cache service call.""" diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index 1e6014b8d90..f3bd009d25e 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -5,23 +5,24 @@ from typing import Any from velbusaio.channels import Relay as VelbusRelay from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSwitch(channel) for channel in cntrl.get_all("switch")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusSwitch(channel) + for channel in entry.runtime_data.controller.get_all_switch() + ) class VelbusSwitch(VelbusEntity, SwitchEntity): From 839f06b2dc1f39ec9785888645c8a262723f4f7b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 21:12:11 +0100 Subject: [PATCH 0592/1198] Small improvements to the AdGuard tests (#133073) --- tests/components/adguard/__init__.py | 2 +- tests/components/adguard/test_config_flow.py | 87 ++++++++++---------- 2 files changed, 46 insertions(+), 43 deletions(-) diff --git a/tests/components/adguard/__init__.py b/tests/components/adguard/__init__.py index 318e881ef2f..4d8ae091dc5 100644 --- a/tests/components/adguard/__init__.py +++ b/tests/components/adguard/__init__.py @@ -1 +1 @@ -"""Tests for the AdGuard Home component.""" +"""Tests for the AdGuard Home integration.""" diff --git a/tests/components/adguard/test_config_flow.py b/tests/components/adguard/test_config_flow.py index 6644a4ca20f..bd0f1b0a08f 100644 --- a/tests/components/adguard/test_config_flow.py +++ b/tests/components/adguard/test_config_flow.py @@ -59,9 +59,9 @@ async def test_connection_error( ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} async def test_full_flow_implementation( @@ -83,25 +83,27 @@ async def test_full_flow_implementation( ) assert result - assert result.get("flow_id") - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["flow_id"] + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=FIXTURE_USER_INPUT ) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == FIXTURE_USER_INPUT[CONF_HOST] + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == FIXTURE_USER_INPUT[CONF_HOST] - assert data[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert data[CONF_PORT] == FIXTURE_USER_INPUT[CONF_PORT] - assert data[CONF_SSL] == FIXTURE_USER_INPUT[CONF_SSL] - assert data[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert data[CONF_VERIFY_SSL] == FIXTURE_USER_INPUT[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == FIXTURE_USER_INPUT[CONF_HOST] + assert config_entry.data == { + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD], + CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], + CONF_SSL: FIXTURE_USER_INPUT[CONF_SSL], + CONF_USERNAME: FIXTURE_USER_INPUT[CONF_USERNAME], + CONF_VERIFY_SSL: FIXTURE_USER_INPUT[CONF_VERIFY_SSL], + } + assert not config_entry.options async def test_integration_already_exists(hass: HomeAssistant) -> None: @@ -116,8 +118,8 @@ async def test_integration_already_exists(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_USER}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_already_configured(hass: HomeAssistant) -> None: @@ -141,8 +143,8 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -166,8 +168,8 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_confirm( @@ -195,24 +197,25 @@ async def test_hassio_confirm( context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("description_placeholders") == {"addon": "AdGuard Home Addon"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["description_placeholders"] == {"addon": "AdGuard Home Addon"} - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "AdGuard Home Addon" + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == "mock-adguard" - assert data[CONF_PASSWORD] is None - assert data[CONF_PORT] == 3000 - assert data[CONF_SSL] is False - assert data[CONF_USERNAME] is None - assert data[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == "AdGuard Home Addon" + assert config_entry.data == { + CONF_HOST: "mock-adguard", + CONF_PASSWORD: None, + CONF_PORT: 3000, + CONF_SSL: False, + CONF_USERNAME: None, + CONF_VERIFY_SSL: True, + } async def test_hassio_connection_error( @@ -241,6 +244,6 @@ async def test_hassio_connection_error( result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["errors"] == {"base": "cannot_connect"} From d79dc8d22f73346ee406b95be32cc266cc686283 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:13:37 -0500 Subject: [PATCH 0593/1198] Add source zone exclusion to Russound RIO (#130392) * Add source zone exclusion to Russound RIO * Ruff format --- .../components/russound_rio/media_player.py | 15 ++++++++++++++- tests/components/russound_rio/conftest.py | 4 +++- tests/components/russound_rio/const.py | 1 + 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index d0d8e02a282..299a6fb2cea 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -5,8 +5,10 @@ from __future__ import annotations import logging from aiorussound import Controller +from aiorussound.const import FeatureFlag from aiorussound.models import PlayStatus, Source from aiorussound.rio import ZoneControlSurface +from aiorussound.util import is_feature_supported from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -155,7 +157,18 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def source_list(self) -> list[str]: """Return a list of available input sources.""" - return [x.name for x in self._sources.values()] + available_sources = ( + [ + source + for source_id, source in self._sources.items() + if source_id in self._zone.enabled_sources + ] + if is_feature_supported( + self._client.rio_version, FeatureFlag.SUPPORT_ZONE_SOURCE_EXCLUSION + ) + else self._sources.values() + ) + return [x.name for x in available_sources] @property def media_title(self) -> str | None: diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index deb7bfccdf0..5522c1e6ea2 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -11,7 +11,7 @@ import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT +from .const import API_VERSION, HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT from tests.common import MockConfigEntry, load_json_object_fixture @@ -71,4 +71,6 @@ def mock_russound_client() -> Generator[AsyncMock]: client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) client.is_connected = Mock(return_value=True) client.unregister_state_update_callbacks.return_value = True + client.rio_version = API_VERSION + yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 3d2924693d2..8f8ae7b59ea 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -8,6 +8,7 @@ HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" HARDWARE_MAC = "00:11:22:33:44:55" +API_VERSION = "1.08.00" MOCK_CONFIG = { "host": HOST, From b9a7307df854b0b5beda88d26892195a7355deeb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:05 +0100 Subject: [PATCH 0594/1198] Refactor light reproduce state to use kelvin attribute (#132854) --- .../components/light/reproduce_state.py | 21 ++++++-- .../components/light/test_reproduce_state.py | 48 ++++++++++++------- 2 files changed, 50 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index c933b517ccc..a89209eb426 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -15,11 +15,13 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import Context, HomeAssistant, State +from homeassistant.util import color as color_util from . import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -40,6 +42,7 @@ ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -55,7 +58,7 @@ class ColorModeAttr(NamedTuple): COLOR_MODE_TO_ATTRIBUTE = { - ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP, ATTR_COLOR_TEMP), + ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP_KELVIN, ATTR_COLOR_TEMP_KELVIN), ColorMode.HS: ColorModeAttr(ATTR_HS_COLOR, ATTR_HS_COLOR), ColorMode.RGB: ColorModeAttr(ATTR_RGB_COLOR, ATTR_RGB_COLOR), ColorMode.RGBW: ColorModeAttr(ATTR_RGBW_COLOR, ATTR_RGBW_COLOR), @@ -124,13 +127,25 @@ async def _async_reproduce_state( color_mode = state.attributes[ATTR_COLOR_MODE] if cm_attr := COLOR_MODE_TO_ATTRIBUTE.get(color_mode): if (cm_attr_state := state.attributes.get(cm_attr.state_attr)) is None: + if ( + color_mode != ColorMode.COLOR_TEMP + or (mireds := state.attributes.get(ATTR_COLOR_TEMP)) is None + ): + _LOGGER.warning( + "Color mode %s specified but attribute %s missing for: %s", + color_mode, + cm_attr.state_attr, + state.entity_id, + ) + return _LOGGER.warning( - "Color mode %s specified but attribute %s missing for: %s", + "Color mode %s specified but attribute %s missing for: %s, " + "using color_temp (mireds) as fallback", color_mode, cm_attr.state_attr, state.entity_id, ) - return + cm_attr_state = color_util.color_temperature_mired_to_kelvin(mireds) service_data[cm_attr.parameter] = cm_attr_state else: # Fall back to Choosing the first color that is specified diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index 30a5e3f6842..987e97c6eb2 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -10,7 +10,7 @@ from tests.common import async_mock_service VALID_BRIGHTNESS = {"brightness": 180} VALID_EFFECT = {"effect": "random"} -VALID_COLOR_TEMP = {"color_temp": 240} +VALID_COLOR_TEMP_KELVIN = {"color_temp_kelvin": 4200} VALID_HS_COLOR = {"hs_color": (345, 75)} VALID_RGB_COLOR = {"rgb_color": (255, 63, 111)} VALID_RGBW_COLOR = {"rgbw_color": (255, 63, 111, 10)} @@ -19,7 +19,7 @@ VALID_XY_COLOR = {"xy_color": (0.59, 0.274)} NONE_BRIGHTNESS = {"brightness": None} NONE_EFFECT = {"effect": None} -NONE_COLOR_TEMP = {"color_temp": None} +NONE_COLOR_TEMP_KELVIN = {"color_temp_kelvin": None} NONE_HS_COLOR = {"hs_color": None} NONE_RGB_COLOR = {"rgb_color": None} NONE_RGBW_COLOR = {"rgbw_color": None} @@ -34,7 +34,7 @@ async def test_reproducing_states( hass.states.async_set("light.entity_off", "off", {}) hass.states.async_set("light.entity_bright", "on", VALID_BRIGHTNESS) hass.states.async_set("light.entity_effect", "on", VALID_EFFECT) - hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP) + hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN) hass.states.async_set("light.entity_hs", "on", VALID_HS_COLOR) hass.states.async_set("light.entity_rgb", "on", VALID_RGB_COLOR) hass.states.async_set("light.entity_xy", "on", VALID_XY_COLOR) @@ -49,7 +49,7 @@ async def test_reproducing_states( State("light.entity_off", "off"), State("light.entity_bright", "on", VALID_BRIGHTNESS), State("light.entity_effect", "on", VALID_EFFECT), - State("light.entity_temp", "on", VALID_COLOR_TEMP), + State("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_hs", "on", VALID_HS_COLOR), State("light.entity_rgb", "on", VALID_RGB_COLOR), State("light.entity_xy", "on", VALID_XY_COLOR), @@ -73,7 +73,7 @@ async def test_reproducing_states( State("light.entity_xy", "off"), State("light.entity_off", "on", VALID_BRIGHTNESS), State("light.entity_bright", "on", VALID_EFFECT), - State("light.entity_effect", "on", VALID_COLOR_TEMP), + State("light.entity_effect", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_temp", "on", VALID_HS_COLOR), State("light.entity_hs", "on", VALID_RGB_COLOR), State("light.entity_rgb", "on", VALID_XY_COLOR), @@ -92,7 +92,7 @@ async def test_reproducing_states( expected_bright["entity_id"] = "light.entity_bright" expected_calls.append(expected_bright) - expected_effect = dict(VALID_COLOR_TEMP) + expected_effect = dict(VALID_COLOR_TEMP_KELVIN) expected_effect["entity_id"] = "light.entity_effect" expected_calls.append(expected_effect) @@ -146,7 +146,7 @@ async def test_filter_color_modes( """Test filtering of parameters according to color mode.""" hass.states.async_set("light.entity", "off", {}) all_colors = { - **VALID_COLOR_TEMP, + **VALID_COLOR_TEMP_KELVIN, **VALID_HS_COLOR, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, @@ -162,7 +162,7 @@ async def test_filter_color_modes( ) expected_map = { - light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP}, + light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP_KELVIN}, light.ColorMode.BRIGHTNESS: VALID_BRIGHTNESS, light.ColorMode.HS: {**VALID_BRIGHTNESS, **VALID_HS_COLOR}, light.ColorMode.ONOFF: {**VALID_BRIGHTNESS}, @@ -201,13 +201,14 @@ async def test_filter_color_modes_missing_attributes( hass.states.async_set("light.entity", "off", {}) expected_log = ( "Color mode color_temp specified " - "but attribute color_temp missing for: light.entity" + "but attribute color_temp_kelvin missing for: light.entity" ) + expected_fallback_log = "using color_temp (mireds) as fallback" turn_on_calls = async_mock_service(hass, "light", "turn_on") all_colors = { - **VALID_COLOR_TEMP, + **VALID_COLOR_TEMP_KELVIN, **VALID_HS_COLOR, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, @@ -216,9 +217,9 @@ async def test_filter_color_modes_missing_attributes( **VALID_BRIGHTNESS, } - # Test missing `color_temp` attribute + # Test missing `color_temp_kelvin` attribute stored_attributes = {**all_colors} - stored_attributes.pop("color_temp") + stored_attributes.pop("color_temp_kelvin") caplog.clear() await async_reproduce_state( hass, @@ -226,11 +227,25 @@ async def test_filter_color_modes_missing_attributes( ) assert len(turn_on_calls) == 0 assert expected_log in caplog.text + assert expected_fallback_log not in caplog.text - # Test with correct `color_temp` attribute - stored_attributes["color_temp"] = 240 - expected = {"brightness": 180, "color_temp": 240} + # Test with deprecated `color_temp` attribute + stored_attributes["color_temp"] = 250 + expected = {"brightness": 180, "color_temp_kelvin": 4000} caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + + assert len(turn_on_calls) == 1 + assert expected_log in caplog.text + assert expected_fallback_log in caplog.text + + # Test with correct `color_temp_kelvin` attribute + expected = {"brightness": 180, "color_temp_kelvin": 4200} + caplog.clear() + turn_on_calls.clear() await async_reproduce_state( hass, [State("light.entity", "on", {**all_colors, "color_mode": color_mode})], @@ -239,6 +254,7 @@ async def test_filter_color_modes_missing_attributes( assert turn_on_calls[0].domain == "light" assert dict(turn_on_calls[0].data) == {"entity_id": "light.entity", **expected} assert expected_log not in caplog.text + assert expected_fallback_log not in caplog.text @pytest.mark.parametrize( @@ -246,7 +262,7 @@ async def test_filter_color_modes_missing_attributes( [ NONE_BRIGHTNESS, NONE_EFFECT, - NONE_COLOR_TEMP, + NONE_COLOR_TEMP_KELVIN, NONE_HS_COLOR, NONE_RGB_COLOR, NONE_RGBW_COLOR, From d02bceb6f32282267a710867ef0529996601585b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:31 +0100 Subject: [PATCH 0595/1198] Migrate alexa color_temp handlers to use Kelvin (#132995) --- homeassistant/components/alexa/handlers.py | 16 ++++++++-------- tests/components/alexa/test_capabilities.py | 20 ++++++++++++++------ 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 9b857ff4dfd..04bef105546 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -376,14 +376,14 @@ async def async_api_decrease_color_temp( ) -> AlexaResponse: """Process a decrease color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN]) - value = min(max_mireds, current + 50) + value = max(min_kelvin, current - 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) @@ -400,14 +400,14 @@ async def async_api_increase_color_temp( ) -> AlexaResponse: """Process an increase color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN]) - value = max(min_mireds, current - 50) + value = min(max_kelvin, current + 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 823afd515b2..b10a93df0c9 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -163,7 +163,7 @@ async def test_api_set_color_temperature(hass: HomeAssistant) -> None: assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(383, "333"), (500, "500")]) +@pytest.mark.parametrize(("result", "initial"), [(2500, "3000"), (2000, "2000")]) async def test_api_decrease_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -176,7 +176,11 @@ async def test_api_decrease_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "max_mireds": 500}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "min_color_temp_kelvin": 2000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -189,11 +193,11 @@ async def test_api_decrease_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(283, "333"), (142, "142")]) +@pytest.mark.parametrize(("result", "initial"), [(3500, "3000"), (7000, "7000")]) async def test_api_increase_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -206,7 +210,11 @@ async def test_api_increase_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "min_mireds": 142}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "max_color_temp_kelvin": 7000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -219,7 +227,7 @@ async def test_api_increase_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" From aa7e02485301b788d0c58d30ae1333132049703c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:52 +0100 Subject: [PATCH 0596/1198] Migrate lifx light tests to use Kelvin (#133020) --- tests/components/lifx/test_light.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 88c2115ce47..ffe819fa2cb 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components import lifx from homeassistant.components.lifx import DOMAIN -from homeassistant.components.lifx.const import ATTR_POWER +from homeassistant.components.lifx.const import _ATTR_COLOR_TEMP, ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( ATTR_CLOUD_SATURATION_MAX, @@ -31,7 +31,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -1263,7 +1262,7 @@ async def test_white_bulb(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 32000, 2500] @@ -1759,7 +1758,7 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: await hass.services.async_call( DOMAIN, "set_state", - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, _ATTR_COLOR_TEMP: 400}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 2500] From 61b1b50c342018b847125316ac19d0b6a6d5a1b0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 21:19:05 +0100 Subject: [PATCH 0597/1198] Improve Solar.Forecast configuration flow tests (#133077) --- .../forecast_solar/test_config_flow.py | 111 +++++++++++------- 1 file changed, 71 insertions(+), 40 deletions(-) diff --git a/tests/components/forecast_solar/test_config_flow.py b/tests/components/forecast_solar/test_config_flow.py index abaad402e1b..8fffb5096bc 100644 --- a/tests/components/forecast_solar/test_config_flow.py +++ b/tests/components/forecast_solar/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, CONF_DAMPING_EVENING, @@ -25,10 +27,10 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_NAME: "Name", @@ -40,13 +42,16 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "Name" - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Name" + assert config_entry.unique_id is None + assert config_entry.data == { CONF_LATITUDE: 52.42, CONF_LONGITUDE: 4.42, } - assert result2.get("options") == { + assert config_entry.options == { CONF_AZIMUTH: 142, CONF_DECLINATION: 42, CONF_MODULES_POWER: 4242, @@ -55,9 +60,9 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_options_flow_invalid_api( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test options config flow when API key is invalid.""" @@ -67,10 +72,10 @@ async def test_options_flow_invalid_api( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "solarPOWER!", @@ -84,27 +89,11 @@ async def test_options_flow_invalid_api( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.FORM - assert result2["errors"] == {CONF_API_KEY: "invalid_api_key"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_API_KEY: "invalid_api_key"} - -async def test_options_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test config flow options.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" - - # With the API key - result2 = await hass.config_entries.options.async_configure( + # Ensure we can recover from this error + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "SolarForecast150", @@ -118,8 +107,8 @@ async def test_options_flow( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: "SolarForecast150", CONF_DECLINATION: 21, CONF_AZIMUTH: 22, @@ -130,9 +119,9 @@ async def test_options_flow( } -async def test_options_flow_without_key( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test config flow options.""" @@ -142,11 +131,53 @@ async def test_options_flow_without_key( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # With the API key + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + } + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow_without_key( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test config flow options.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" # Without the API key - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_DECLINATION: 21, @@ -159,8 +190,8 @@ async def test_options_flow_without_key( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: None, CONF_DECLINATION: 21, CONF_AZIMUTH: 22, From 2cff7526d01e985c9b9035dced9a662a092cded9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:15:49 +0100 Subject: [PATCH 0598/1198] Add test-before-setup rule to quality_scale validation (#132255) * Add test-before-setup rule to quality_scale validation * Use ast_parse_module * Add rules_done * Add Config argument --- script/hassfest/quality_scale.py | 3 +- .../test_before_setup.py | 69 +++++++++++++++++++ 2 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/test_before_setup.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 784573f5f8f..f3b285c8485 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -23,6 +23,7 @@ from .quality_scale_validation import ( reconfiguration_flow, runtime_data, strict_typing, + test_before_setup, unique_config_entry, ) @@ -56,7 +57,7 @@ ALL_RULES = [ Rule("has-entity-name", ScaledQualityScaleTiers.BRONZE), Rule("runtime-data", ScaledQualityScaleTiers.BRONZE, runtime_data), Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), - Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE), + Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE, test_before_setup), Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE, unique_config_entry), # SILVER Rule("action-exceptions", ScaledQualityScaleTiers.SILVER), diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py new file mode 100644 index 00000000000..db737c99e37 --- /dev/null +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -0,0 +1,69 @@ +"""Enforce that the integration raises correctly during initialisation. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/test-before-setup/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +_VALID_EXCEPTIONS = { + "ConfigEntryNotReady", + "ConfigEntryAuthFailed", + "ConfigEntryError", +} + + +def _raises_exception(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: + """Check that a valid exception is raised within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if isinstance(node, ast.Raise): + if isinstance(node.exc, ast.Name) and node.exc.id in _VALID_EXCEPTIONS: + return True + if isinstance(node.exc, ast.Call) and node.exc.func.id in _VALID_EXCEPTIONS: + return True + + return False + + +def _calls_first_refresh(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: + """Check that a async_config_entry_first_refresh within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if ( + isinstance(node, ast.Call) + and isinstance(node.func, ast.Attribute) + and node.func.attr == "async_config_entry_first_refresh" + ): + return True + + return False + + +def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None: + """Get async_setup_entry function.""" + for item in module.body: + if isinstance(item, ast.AsyncFunctionDef) and item.name == "async_setup_entry": + return item + return None + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate correct use of ConfigEntry.runtime_data.""" + init_file = integration.path / "__init__.py" + init = ast_parse_module(init_file) + + # Should not happen, but better to be safe + if not (async_setup_entry := _get_setup_entry_function(init)): + return [f"Could not find `async_setup_entry` in {init_file}"] + + if not ( + _raises_exception(async_setup_entry) or _calls_first_refresh(async_setup_entry) + ): + return [ + f"Integration does not raise one of {_VALID_EXCEPTIONS} " + f"in async_setup_entry ({init_file})" + ] + return None From bf9788b9c4724b46a0289342d6122477df2d883e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:16:28 +0100 Subject: [PATCH 0599/1198] Fix CI failure in russound_rio (#133081) * Fix CI in russound_rio * Adjust --- homeassistant/components/russound_rio/media_player.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 299a6fb2cea..02467731ec3 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import TYPE_CHECKING from aiorussound import Controller from aiorussound.const import FeatureFlag @@ -157,6 +158,8 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def source_list(self) -> list[str]: """Return a list of available input sources.""" + if TYPE_CHECKING: + assert self._client.rio_version available_sources = ( [ source From 2af5c5ecda516bb2adf774140622a3d52ea11146 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 12 Dec 2024 20:26:30 -0800 Subject: [PATCH 0600/1198] Update Rainbird quality scale grading on the Silver quality checks (#131498) * Grade Rainbird on the Silver quality scale * Remove done comments * Update quality_scale.yaml * Update config-flow-test-coverage --- .../components/rainbird/quality_scale.yaml | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/rainbird/quality_scale.yaml b/homeassistant/components/rainbird/quality_scale.yaml index cd000c63fad..8b4805a9b0e 100644 --- a/homeassistant/components/rainbird/quality_scale.yaml +++ b/homeassistant/components/rainbird/quality_scale.yaml @@ -34,21 +34,31 @@ rules: docs-removal-instructions: todo test-before-setup: done docs-high-level-description: done - config-flow-test-coverage: done + config-flow-test-coverage: + status: todo + comment: | + All config flow tests should finish with CREATE_ENTRY and ABORT to + test they are able to recover from errors docs-actions: done runtime-data: done # Silver - log-when-unavailable: todo - config-entry-unloading: todo + log-when-unavailable: done + config-entry-unloading: done reauthentication-flow: done - action-exceptions: todo - docs-installation-parameters: todo - integration-owner: todo - parallel-updates: todo - test-coverage: todo - docs-configuration-parameters: todo - entity-unavailable: todo + action-exceptions: done + docs-installation-parameters: + status: todo + comment: The documentation does not mention installation parameters + integration-owner: done + parallel-updates: + status: todo + comment: The integration does not explicitly set a number of parallel updates. + test-coverage: done + docs-configuration-parameters: + status: todo + comment: The documentation for configuration parameters could be improved. + entity-unavailable: done # Gold docs-examples: todo From 72cc1f4d39b2bc844d9e2572f9789c4edd8335d9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 06:51:55 +0100 Subject: [PATCH 0601/1198] Use correct ATTR_KELVIN constant in yeelight tests (#133088) --- tests/components/yeelight/test_light.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index 274d0a158f0..56162d4d9d1 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -28,7 +28,6 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, FLASH_LONG, @@ -59,6 +58,7 @@ from homeassistant.components.yeelight.const import ( YEELIGHT_TEMPERATURE_TRANSACTION, ) from homeassistant.components.yeelight.light import ( + ATTR_KELVIN, ATTR_MINUTES, ATTR_MODE, EFFECT_CANDLE_FLICKER, From 09b06f839d7a154dcaed298eb360a839f915d2eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 07:47:40 +0100 Subject: [PATCH 0602/1198] Bump github/codeql-action from 3.27.7 to 3.27.9 (#133104) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8f6e393f853..d3efa8ebaa3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.7 + uses: github/codeql-action/init@v3.27.9 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.7 + uses: github/codeql-action/analyze@v3.27.9 with: category: "/language:python" From 0ffb588d5cdaeceba4c18a2ac5af42c4c0848348 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Fri, 13 Dec 2024 07:53:25 +0100 Subject: [PATCH 0603/1198] Move config entry type of energyzero integration (#133094) Move config_entry type to coordinator file --- homeassistant/components/energyzero/__init__.py | 7 ++----- homeassistant/components/energyzero/coordinator.py | 5 ++++- homeassistant/components/energyzero/diagnostics.py | 3 +-- homeassistant/components/energyzero/sensor.py | 7 +++++-- homeassistant/components/energyzero/services.py | 7 ++----- 5 files changed, 14 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/energyzero/__init__.py b/homeassistant/components/energyzero/__init__.py index f7591056383..fc2855374dd 100644 --- a/homeassistant/components/energyzero/__init__.py +++ b/homeassistant/components/energyzero/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,14 +9,12 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator from .services import async_setup_services PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up EnergyZero services.""" @@ -30,7 +27,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Set up EnergyZero from a config entry.""" - coordinator = EnergyZeroDataUpdateCoordinator(hass) + coordinator = EnergyZeroDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: diff --git a/homeassistant/components/energyzero/coordinator.py b/homeassistant/components/energyzero/coordinator.py index 65955b2ebe6..35054f7b3b7 100644 --- a/homeassistant/components/energyzero/coordinator.py +++ b/homeassistant/components/energyzero/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] + class EnergyZeroData(NamedTuple): """Class for defining data in dict.""" @@ -35,13 +37,14 @@ class EnergyZeroDataUpdateCoordinator(DataUpdateCoordinator[EnergyZeroData]): config_entry: ConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> None: """Initialize global EnergyZero data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.energyzero = EnergyZero(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index e6116eac259..0a45d87fee5 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -7,8 +7,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import EnergyZeroConfigEntry -from .coordinator import EnergyZeroData +from .coordinator import EnergyZeroConfigEntry, EnergyZeroData def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: diff --git a/homeassistant/components/energyzero/sensor.py b/homeassistant/components/energyzero/sensor.py index d52da599966..141ac793fba 100644 --- a/homeassistant/components/energyzero/sensor.py +++ b/homeassistant/components/energyzero/sensor.py @@ -25,9 +25,12 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import EnergyZeroConfigEntry from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EnergyZeroData, EnergyZeroDataUpdateCoordinator +from .coordinator import ( + EnergyZeroConfigEntry, + EnergyZeroData, + EnergyZeroDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index ba2bbf0573f..286735895ad 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import date, datetime from enum import Enum from functools import partial -from typing import TYPE_CHECKING, Final +from typing import Final from energyzero import Electricity, Gas, VatOption import voluptuous as vol @@ -22,11 +22,8 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import selector from homeassistant.util import dt as dt_util -if TYPE_CHECKING: - from . import EnergyZeroConfigEntry - from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" From 263eb41e799d73915ee979b14fa6464872473ea1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 08:24:18 +0100 Subject: [PATCH 0604/1198] Remove unused constant from blink (#133109) --- homeassistant/components/blink/services.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py index 5f51598e721..dd5d1e37627 100644 --- a/homeassistant/components/blink/services.py +++ b/homeassistant/components/blink/services.py @@ -5,7 +5,7 @@ from __future__ import annotations import voluptuous as vol from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN +from homeassistant.const import CONF_PIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv @@ -13,11 +13,6 @@ from homeassistant.helpers import config_validation as cv from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN from .coordinator import BlinkConfigEntry -SERVICE_UPDATE_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), - } -) SERVICE_SEND_PIN_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]), From 8bd2c183e280d14643ce5b56bd0de44191a921b8 Mon Sep 17 00:00:00 2001 From: Brandon Rothweiler <2292715+bdr99@users.noreply.github.com> Date: Fri, 13 Dec 2024 02:46:15 -0500 Subject: [PATCH 0605/1198] Bump py-aosmith to 1.0.12 (#133100) --- homeassistant/components/aosmith/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aosmith/manifest.json b/homeassistant/components/aosmith/manifest.json index eae7981d5b9..a928a6677cb 100644 --- a/homeassistant/components/aosmith/manifest.json +++ b/homeassistant/components/aosmith/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/aosmith", "iot_class": "cloud_polling", - "requirements": ["py-aosmith==1.0.11"] + "requirements": ["py-aosmith==1.0.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8f4705e878e..17998ba7fef 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1677,7 +1677,7 @@ pushover_complete==1.1.1 pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a88a5a2d41..3965fbc0a3a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1378,7 +1378,7 @@ pushover_complete==1.1.1 pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 From de89be05129b9fe00f561f29179d12bc5bd8b400 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Fri, 13 Dec 2024 07:54:14 +0000 Subject: [PATCH 0606/1198] Bugfix to use evohome's new hostname (#133085) --- homeassistant/components/evohome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index da3d197f6aa..22edadad7f4 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], "quality_scale": "legacy", - "requirements": ["evohome-async==0.4.20"] + "requirements": ["evohome-async==0.4.21"] } diff --git a/requirements_all.txt b/requirements_all.txt index 17998ba7fef..4f61b88ed00 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -879,7 +879,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3965fbc0a3a..06448688306 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -745,7 +745,7 @@ eternalegypt==0.0.16 eufylife-ble-client==0.1.8 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 From 53439d6e2a31dcea27727613f4e06660973ffb05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Fri, 13 Dec 2024 08:55:44 +0100 Subject: [PATCH 0607/1198] Handle step size correctly in myuplink number platform (#133016) --- homeassistant/components/myuplink/number.py | 13 +- .../fixtures/device_points_nibe_f730.json | 17 +++ .../myuplink/snapshots/test_diagnostics.ambr | 34 +++++ .../myuplink/snapshots/test_number.ambr | 126 ++++++++++++++++-- 4 files changed, 177 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index b05ab5d46c9..3d336953396 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -110,13 +110,16 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): # Internal properties self.point_id = device_point.parameter_id self._attr_name = device_point.parameter_name + _scale = float(device_point.scale_value if device_point.scale_value else 1.0) self._attr_native_min_value = ( - device_point.raw["minValue"] if device_point.raw["minValue"] else -30000 - ) * float(device_point.raw.get("scaleValue", 1)) + device_point.min_value if device_point.min_value else -30000 + ) * _scale self._attr_native_max_value = ( - device_point.raw["maxValue"] if device_point.raw["maxValue"] else 30000 - ) * float(device_point.raw.get("scaleValue", 1)) - self._attr_step_value = device_point.raw.get("stepValue", 20) + device_point.max_value if device_point.max_value else 30000 + ) * _scale + self._attr_native_step = ( + device_point.step_value if device_point.step_value else 1.0 + ) * _scale if entity_description is not None: self.entity_description = entity_description diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index aaccdec530a..0a61ab05f21 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -1091,5 +1091,22 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 71b33c58a87..6fe6becff11 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1152,6 +1152,23 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] @@ -2297,6 +2314,23 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_number.ambr b/tests/components/myuplink/snapshots/test_number.ambr index db1a8e0949f..c47d3c60295 100644 --- a/tests/components/myuplink/snapshots/test_number.ambr +++ b/tests/components/myuplink/snapshots/test_number.ambr @@ -8,7 +8,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, }), 'config_entry_id': , 'device_class': None, @@ -44,7 +44,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, 'unit_of_measurement': 'DM', }), 'context': , @@ -64,7 +64,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, }), 'config_entry_id': , 'device_class': None, @@ -100,7 +100,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, 'unit_of_measurement': 'DM', }), 'context': , @@ -221,6 +221,116 @@ 'state': '1.0', }) # --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- # name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -230,7 +340,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, }), 'config_entry_id': , 'device_class': None, @@ -266,7 +376,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, 'unit_of_measurement': 'DM', }), 'context': , @@ -286,7 +396,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, }), 'config_entry_id': , 'device_class': None, @@ -322,7 +432,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, 'unit_of_measurement': 'DM', }), 'context': , From e3d14e699316bef29f41c0ba580d0cef434ec98d Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:01:48 +0100 Subject: [PATCH 0608/1198] Bump pysuezV2 to 1.3.5 (#133076) --- homeassistant/components/suez_water/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 240be0f37bd..7e720a86afd 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuezV2==1.3.2"] + "requirements": ["pysuezV2==1.3.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4f61b88ed00..9c1285b6d32 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2301,7 +2301,7 @@ pysqueezebox==0.10.0 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 06448688306..56c8be03f43 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1864,7 +1864,7 @@ pyspeex-noise==1.0.2 pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 From 11b65b1eb313c0d816bfdc99d36b7c9d3d347cd8 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Fri, 13 Dec 2024 09:21:14 +0100 Subject: [PATCH 0609/1198] Bump watchdog to 6.0.0 (#132895) --- .../components/folder_watcher/__init__.py | 14 +++++++++----- .../components/folder_watcher/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index 3aeaa6f7ef2..dd56b3aad72 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -7,6 +7,10 @@ import os from typing import cast from watchdog.events import ( + DirCreatedEvent, + DirDeletedEvent, + DirModifiedEvent, + DirMovedEvent, FileClosedEvent, FileCreatedEvent, FileDeletedEvent, @@ -68,7 +72,7 @@ class EventHandler(PatternMatchingEventHandler): def __init__(self, patterns: list[str], hass: HomeAssistant, entry_id: str) -> None: """Initialise the EventHandler.""" - super().__init__(patterns) + super().__init__(patterns=patterns) self.hass = hass self.entry_id = entry_id @@ -101,19 +105,19 @@ class EventHandler(PatternMatchingEventHandler): signal = f"folder_watcher-{self.entry_id}" dispatcher_send(self.hass, signal, event.event_type, fireable) - def on_modified(self, event: FileModifiedEvent) -> None: + def on_modified(self, event: DirModifiedEvent | FileModifiedEvent) -> None: """File modified.""" self.process(event) - def on_moved(self, event: FileMovedEvent) -> None: + def on_moved(self, event: DirMovedEvent | FileMovedEvent) -> None: """File moved.""" self.process(event, moved=True) - def on_created(self, event: FileCreatedEvent) -> None: + def on_created(self, event: DirCreatedEvent | FileCreatedEvent) -> None: """File created.""" self.process(event) - def on_deleted(self, event: FileDeletedEvent) -> None: + def on_deleted(self, event: DirDeletedEvent | FileDeletedEvent) -> None: """File deleted.""" self.process(event) diff --git a/homeassistant/components/folder_watcher/manifest.json b/homeassistant/components/folder_watcher/manifest.json index 7b471e08fcc..1f0d9c595ee 100644 --- a/homeassistant/components/folder_watcher/manifest.json +++ b/homeassistant/components/folder_watcher/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["watchdog"], "quality_scale": "internal", - "requirements": ["watchdog==2.3.1"] + "requirements": ["watchdog==6.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9c1285b6d32..e4fcb06671b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2980,7 +2980,7 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 # homeassistant.components.waterfurnace waterfurnace==1.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 56c8be03f43..257125c450d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2387,7 +2387,7 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 # homeassistant.components.watergate watergate-local-api==2024.4.1 From e4cca3fe408ed2c20f3eeda9b4b7a73b7bdaf86f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:22:01 +0100 Subject: [PATCH 0610/1198] Update devcontainer to Python 3.13 (#132313) --- Dockerfile.dev | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile.dev b/Dockerfile.dev index 48f582a1581..5a3f1a2ae64 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:1-3.12 +FROM mcr.microsoft.com/devcontainers/python:1-3.13 SHELL ["/bin/bash", "-o", "pipefail", "-c"] From f9f37b9932f345b8a0cc2615c7feacb6e903d6d9 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 09:23:53 +0100 Subject: [PATCH 0611/1198] Velbus docs quality bump (#133070) --- homeassistant/components/velbus/quality_scale.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 68fe5ead781..ab2df68f973 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -16,10 +16,10 @@ rules: comment: | Dynamically build up the port parameter based on inputs provided by the user, do not fill-in a name parameter, build it up in the config flow dependency-transparency: done - docs-actions: todo - docs-high-level-description: todo - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: todo entity-unique-id: done has-entity-name: todo From 899fb091fc12dc610c9f74291d61d5bfea8ef166 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:31:21 +0100 Subject: [PATCH 0612/1198] Simplify access to hass in service calls (#133062) --- homeassistant/core.py | 6 +- tests/components/homeassistant/test_init.py | 1 + tests/components/text/test_init.py | 9 +- tests/conftest.py | 2 +- tests/helpers/test_entity_component.py | 17 +- tests/helpers/test_service.py | 259 ++++++++++++++------ tests/test_core.py | 4 +- 7 files changed, 204 insertions(+), 94 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 0640664d64f..da7a206b14e 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -2432,10 +2432,11 @@ class Service: class ServiceCall: """Representation of a call to a service.""" - __slots__ = ("domain", "service", "data", "context", "return_response") + __slots__ = ("hass", "domain", "service", "data", "context", "return_response") def __init__( self, + hass: HomeAssistant, domain: str, service: str, data: dict[str, Any] | None = None, @@ -2443,6 +2444,7 @@ class ServiceCall: return_response: bool = False, ) -> None: """Initialize a service call.""" + self.hass = hass self.domain = domain self.service = service self.data = ReadOnlyDict(data or {}) @@ -2768,7 +2770,7 @@ class ServiceRegistry: processed_data = service_data service_call = ServiceCall( - domain, service, processed_data, context, return_response + self._hass, domain, service, processed_data, context, return_response ) self._hass.bus.async_fire_internal( diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index 33d78cd6c9f..56eeb4177b1 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -184,6 +184,7 @@ async def test_turn_on_skips_domains_without_service( # because by mocking out the call service method, we mock out all # So we mimic how the service registry calls services service_call = ha.ServiceCall( + hass, "homeassistant", "turn_on", {"entity_id": ["light.test", "sensor.bla", "binary_sensor.blub", "light.bla"]}, diff --git a/tests/components/text/test_init.py b/tests/components/text/test_init.py index 8e20af6cb7a..3764d481928 100644 --- a/tests/components/text/test_init.py +++ b/tests/components/text/test_init.py @@ -64,21 +64,22 @@ async def test_text_set_value(hass: HomeAssistant) -> None: with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}) + text, + ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}), ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) ) await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) ) assert text.state == "test2" diff --git a/tests/conftest.py b/tests/conftest.py index c46ed0407e5..2cefe72f414 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1899,7 +1899,7 @@ def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: return_response: bool = False, ) -> ServiceResponse: calls.append( - ServiceCall(domain, service, service_data, context, return_response) + ServiceCall(hass, domain, service, service_data, context, return_response) ) try: return await _original_async_call( diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 9723b91eb9a..940bd3e37fd 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -189,13 +189,14 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] ) - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) ) == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -256,17 +257,18 @@ async def test_extract_from_service_fails_if_no_entity_id(hass: HomeAssistant) - ) assert ( - await component.async_extract_from_service(ServiceCall("test", "service")) == [] + await component.async_extract_from_service(ServiceCall(hass, "test", "service")) + == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"entity_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"entity_id": ENTITY_MATCH_NONE}) ) == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"area_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"area_id": ENTITY_MATCH_NONE}) ) == [] ) @@ -283,6 +285,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ) call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -299,7 +302,7 @@ async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None await component.async_setup({}) await component.async_add_entities([MockEntity(entity_id="group.test_group")]) - call = ServiceCall("test", "service", {"entity_id": ["group.test_group"]}) + call = ServiceCall(hass, "test", "service", {"entity_id": ["group.test_group"]}) extracted = await component.async_extract_from_service(call, expand_group=False) assert len(extracted) == 1 @@ -465,7 +468,7 @@ async def test_extract_all_omit_entity_id( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert ( sorted( @@ -485,7 +488,7 @@ async def test_extract_all_use_match_all( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service", {"entity_id": "all"}) + call = ServiceCall(hass, "test", "service", {"entity_id": "all"}) assert sorted( ent.entity_id for ent in await component.async_extract_from_service(call) diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index e63cb69909c..6d03e09cdf7 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -642,11 +642,11 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: order=None, ) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call @@ -659,7 +659,7 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( hass, - ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), + ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) @@ -669,20 +669,22 @@ async def test_extract_entity_ids_from_area( hass: HomeAssistant, floor_area_mock ) -> None: """Test extract_entity_ids method with areas.""" - call = ServiceCall("light", "turn_on", {"area_id": "own-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "own-area"}) assert { "light.in_own_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) assert { "light.in_area", @@ -692,7 +694,7 @@ async def test_extract_entity_ids_from_area( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -703,13 +705,13 @@ async def test_extract_entity_ids_from_devices( ) -> None: """Test extract_entity_ids method with devices.""" assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-no-area-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-no-area-id"}) ) == { "light.no_area", } assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-area-a-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-area-a-id"}) ) == { "light.in_area_a", "light.in_area_b", @@ -717,7 +719,8 @@ async def test_extract_entity_ids_from_devices( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "non-existing-id"}) + hass, + ServiceCall(hass, "light", "turn_on", {"device_id": "non-existing-id"}), ) == set() ) @@ -726,14 +729,16 @@ async def test_extract_entity_ids_from_devices( @pytest.mark.usefixtures("floor_area_mock") async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: """Test extract_entity_ids method with floors.""" - call = ServiceCall("light", "turn_on", {"floor_id": "test-floor"}) + call = ServiceCall(hass, "light", "turn_on", {"floor_id": "test-floor"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"floor_id": ["test-floor", "floor-a"]}) + call = ServiceCall( + hass, "light", "turn_on", {"floor_id": ["test-floor", "floor-a"]} + ) assert { "light.in_area", @@ -743,7 +748,7 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -752,13 +757,13 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("label_mock") async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: """Test extract_entity_ids method with labels.""" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) assert { "light.with_my_label", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": "label1"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label1"}) assert { "light.with_label1_from_device", @@ -767,14 +772,14 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label2"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label2"]}) assert { "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label_area"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label_area"]}) assert { "light.with_labels_from_device", @@ -782,7 +787,7 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -1281,7 +1286,7 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) @@ -1305,7 +1310,7 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - mock_entities, HassJob(test_service_mock), ServiceCall( - "test_domain", "test_service", {"entity_id": "light.living_room"} + hass, "test_domain", "test_service", {"entity_id": "light.living_room"} ), required_features=[SUPPORT_A], ) @@ -1321,7 +1326,7 @@ async def test_call_with_both_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) @@ -1340,7 +1345,7 @@ async def test_call_with_one_of_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) @@ -1361,7 +1366,9 @@ async def test_call_with_sync_func(hass: HomeAssistant, mock_entities) -> None: hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), + ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"} + ), ) assert test_service_mock.call_count == 1 @@ -1374,6 +1381,7 @@ async def test_call_with_sync_attr(hass: HomeAssistant, mock_entities) -> None: mock_entities, "sync_method", ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, @@ -1392,6 +1400,7 @@ async def test_call_context_user_not_exist(hass: HomeAssistant) -> None: {}, Mock(), ServiceCall( + hass, "test_domain", "test_service", context=Context(user_id="non-existing"), @@ -1419,6 +1428,7 @@ async def test_call_context_target_all( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, @@ -1447,6 +1457,7 @@ async def test_call_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1474,6 +1485,7 @@ async def test_call_context_target_specific_no_auth( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1494,7 +1506,7 @@ async def test_call_no_context_target_all( mock_entities, Mock(), ServiceCall( - "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) @@ -1513,6 +1525,7 @@ async def test_call_no_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, @@ -1534,7 +1547,7 @@ async def test_call_with_match_all( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 @@ -1551,7 +1564,7 @@ async def test_call_with_omit_entity_id( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service"), + ServiceCall(hass, "test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 @@ -1797,7 +1810,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert [ ent.entity_id @@ -1805,6 +1818,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -1820,6 +1834,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non hass, entities, ServiceCall( + hass, "test", "service", data={"entity_id": ENTITY_MATCH_NONE}, @@ -1835,7 +1850,7 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert [ ent.entity_id @@ -1853,6 +1868,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ] call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -1874,12 +1890,14 @@ async def test_extract_from_service_area_id( MockEntity(name="diff_area", entity_id="light.diff_area"), ] - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1888,6 +1906,7 @@ async def test_extract_from_service_area_id( ] call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"], "device_id": "device-no-area-id"}, @@ -1912,17 +1931,17 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ), ] - call = ServiceCall("light", "turn_on", {"label_id": "label_area"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label_area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_labels_from_device" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_my_label" - call = ServiceCall("light", "turn_on", {"label_id": ["my-label", "label1"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["my-label", "label1"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1931,6 +1950,7 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ] call = ServiceCall( + hass, "light", "turn_on", {"label_id": ["my-label", "label1"], "device_id": "device-no-labels"}, @@ -1949,6 +1969,7 @@ async def test_entity_service_call_warn_referenced( ) -> None: """Test we only warn for referenced entities in entity_service_call.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1972,6 +1993,7 @@ async def test_async_extract_entities_warn_referenced( ) -> None: """Test we only warn for referenced entities in async_extract_entities.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1997,6 +2019,7 @@ async def test_async_extract_config_entry_ids(hass: HomeAssistant) -> None: device_no_entities = dr.DeviceEntry(id="device-no-entities", config_entries={"abc"}) call = ServiceCall( + hass, "homeassistant", "reload_config_entry", { @@ -2042,17 +2065,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloader = service.ReloadServiceHelper(reload_service_handler, reload_targets) tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2063,13 +2102,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2078,13 +2125,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) @@ -2093,21 +2148,45 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2118,14 +2197,22 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2134,17 +2221,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) diff --git a/tests/test_core.py b/tests/test_core.py index 0100c35055e..60b907d57ca 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1562,10 +1562,10 @@ async def test_statemachine_avoids_updating_attributes(hass: HomeAssistant) -> N def test_service_call_repr() -> None: """Test ServiceCall repr.""" - call = ha.ServiceCall("homeassistant", "start") + call = ha.ServiceCall(None, "homeassistant", "start") assert str(call) == f"" - call2 = ha.ServiceCall("homeassistant", "start", {"fast": "yes"}) + call2 = ha.ServiceCall(None, "homeassistant", "start", {"fast": "yes"}) assert ( str(call2) == f"" From a0e49ebc97cd860637f74976931487b2c65a0e99 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:33:40 +0100 Subject: [PATCH 0613/1198] Use internal min/max mireds in template (#133113) --- homeassistant/components/template/light.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 9c7bc23022a..0654a42406a 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -78,6 +78,9 @@ CONF_TEMPERATURE_TEMPLATE = "temperature_template" CONF_WHITE_VALUE_ACTION = "set_white_value" CONF_WHITE_VALUE_TEMPLATE = "white_value_template" +DEFAULT_MIN_MIREDS = 153 +DEFAULT_MAX_MIREDS = 500 + LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( @@ -764,7 +767,9 @@ class LightTemplate(TemplateEntity, LightEntity): self._temperature = None return temperature = int(render) - if self.min_mireds <= temperature <= self.max_mireds: + min_mireds = self._min_mireds or DEFAULT_MIN_MIREDS + max_mireds = self._max_mireds or DEFAULT_MAX_MIREDS + if min_mireds <= temperature <= max_mireds: self._temperature = temperature else: _LOGGER.error( @@ -774,8 +779,8 @@ class LightTemplate(TemplateEntity, LightEntity): ), temperature, self.entity_id, - self.min_mireds, - self.max_mireds, + min_mireds, + max_mireds, ) self._temperature = None except ValueError: From 9ab69aa41c4afe15a48d1af03770e49a734c669b Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Fri, 13 Dec 2024 09:33:58 +0100 Subject: [PATCH 0614/1198] Add mWh as unit of measurement for Matter energy sensors (#133005) --- homeassistant/components/matter/sensor.py | 5 +++-- homeassistant/components/number/const.py | 4 ++-- homeassistant/components/random/config_flow.py | 6 +++++- homeassistant/components/sensor/const.py | 4 ++-- homeassistant/components/template/config_flow.py | 6 +++++- homeassistant/const.py | 1 + homeassistant/util/unit_conversion.py | 1 + tests/components/matter/snapshots/test_sensor.ambr | 6 ++++++ tests/components/template/test_config_flow.py | 2 +- tests/util/test_unit_conversion.py | 2 ++ 10 files changed, 28 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index e10f081d497..b2a5da2aa71 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -612,11 +612,12 @@ DISCOVERY_SCHEMAS = [ key="ElectricalEnergyMeasurementCumulativeEnergyImported", device_class=SensorDeviceClass.ENERGY, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.MILLIWATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=3, state_class=SensorStateClass.TOTAL_INCREASING, # id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh) - measurement_to_ha=lambda x: x.energy / 1000000, + measurement_to_ha=lambda x: x.energy, ), entity_class=MatterSensor, required_attributes=( diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 47158826e75..56466934e5f 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -163,7 +163,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -172,7 +172,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" diff --git a/homeassistant/components/random/config_flow.py b/homeassistant/components/random/config_flow.py index 00314169260..35b7757580e 100644 --- a/homeassistant/components/random/config_flow.py +++ b/homeassistant/components/random/config_flow.py @@ -106,8 +106,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still (sorted() is guaranteed stable). sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index a2e3cb52173..2fb563051a9 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -191,7 +191,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -200,7 +200,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index 8ecef8539d3..e6cc377bc26 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -235,8 +235,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) is not None and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still. sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: diff --git a/homeassistant/const.py b/homeassistant/const.py index 2eb4194ad15..c026a8e5427 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -619,6 +619,7 @@ class UnitOfEnergy(StrEnum): KILO_JOULE = "kJ" MEGA_JOULE = "MJ" GIGA_JOULE = "GJ" + MILLIWATT_HOUR = "mWh" WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" MEGA_WATT_HOUR = "MWh" diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 3cffcb5768e..8bf6d4b9fc9 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -266,6 +266,7 @@ class EnergyConverter(BaseUnitConverter): UnitOfEnergy.KILO_JOULE: _WH_TO_J, UnitOfEnergy.MEGA_JOULE: _WH_TO_J / 1e3, UnitOfEnergy.GIGA_JOULE: _WH_TO_J / 1e6, + UnitOfEnergy.MILLIWATT_HOUR: 1e6, UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 96346b906c3..44ad02d4b1e 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1543,6 +1543,9 @@ 'sensor': dict({ 'suggested_display_precision': 3, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2480,6 +2483,9 @@ 'sensor': dict({ 'suggested_display_precision': 3, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index e0d95ff968d..2c9b81e7c91 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -804,7 +804,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'mWh', 'TWh', 'Wh'" ), }, ), diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 4d1eda3d8de..4be32b2851e 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -441,6 +441,8 @@ _CONVERTED_VALUE: dict[ (5, UnitOfElectricPotential.MICROVOLT, 5e-6, UnitOfElectricPotential.VOLT), ], EnergyConverter: [ + (10, UnitOfEnergy.MILLIWATT_HOUR, 0.00001, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 10000, UnitOfEnergy.MILLIWATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), From 2cd4ebbfb20ebee2994e326bec44999f89211c18 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 13 Dec 2024 09:45:38 +0100 Subject: [PATCH 0615/1198] Bump deebot-client to 9.4.0 (#133114) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index b9315e0c1c6..271f9ee8dcd 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e4fcb06671b..cc715c895f9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -739,7 +739,7 @@ debugpy==1.8.8 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 257125c450d..7094270a7a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -629,7 +629,7 @@ dbus-fast==2.24.3 debugpy==1.8.8 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 566843591eccdc6c57468a0d1f39d56b618b942a Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Fri, 13 Dec 2024 02:46:52 -0600 Subject: [PATCH 0616/1198] Remove HEOS yaml import (#133082) --- homeassistant/components/heos/__init__.py | 38 +---- homeassistant/components/heos/config_flow.py | 35 ++--- homeassistant/components/heos/const.py | 1 - homeassistant/components/heos/manifest.json | 1 + .../components/heos/quality_scale.yaml | 25 +-- homeassistant/components/heos/strings.json | 1 + tests/components/heos/conftest.py | 19 +++ tests/components/heos/test_config_flow.py | 145 +++++++----------- tests/components/heos/test_init.py | 29 ---- 9 files changed, 92 insertions(+), 202 deletions(-) diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index de56e541501..e6a46f5a4ca 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -8,23 +8,19 @@ from datetime import timedelta import logging from pyheos import Heos, HeosError, HeosPlayer, const as heos_const -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle from . import services -from .config_flow import format_title from .const import ( COMMAND_RETRY_ATTEMPTS, COMMAND_RETRY_DELAY, @@ -35,14 +31,6 @@ from .const import ( PLATFORMS = [Platform.MEDIA_PLAYER] -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - {DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, - ), - extra=vol.ALLOW_EXTRA, -) - MIN_UPDATE_SOURCES = timedelta(seconds=1) _LOGGER = logging.getLogger(__name__) @@ -61,30 +49,6 @@ class HeosRuntimeData: type HeosConfigEntry = ConfigEntry[HeosRuntimeData] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the HEOS component.""" - if DOMAIN not in config: - return True - host = config[DOMAIN][CONF_HOST] - entries = hass.config_entries.async_entries(DOMAIN) - if not entries: - # Create new entry based on config - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: host} - ) - ) - else: - # Check if host needs to be updated - entry = entries[0] - if entry.data[CONF_HOST] != host: - hass.config_entries.async_update_entry( - entry, title=format_title(host), data={**entry.data, CONF_HOST: host} - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Initialize config entry which represents the HEOS controller.""" # For backwards compat diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index 57ed51a3c05..e8a4dbf7b63 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST -from .const import DATA_DISCOVERED_HOSTS, DOMAIN +from .const import DOMAIN def format_title(host: str) -> str: @@ -34,43 +34,32 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): friendly_name = ( f"{discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME]} ({hostname})" ) - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - self.hass.data[DATA_DISCOVERED_HOSTS][friendly_name] = hostname - # Abort if other flows in progress or an entry already exists - if self._async_in_progress() or self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + self.hass.data[DOMAIN][friendly_name] = hostname await self.async_set_unique_id(DOMAIN) # Show selection form return self.async_show_form(step_id="user") - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Occurs when an entry is setup through config.""" - host = import_data[CONF_HOST] - # raise_on_progress is False here in case ssdp discovers - # heos first which would block the import - await self.async_set_unique_id(DOMAIN, raise_on_progress=False) - return self.async_create_entry(title=format_title(host), data={CONF_HOST: host}) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Obtain host and validate connection.""" - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - # Only a single entry is needed for all devices - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + await self.async_set_unique_id(DOMAIN) # Try connecting to host if provided errors = {} host = None if user_input is not None: host = user_input[CONF_HOST] # Map host from friendly name if in discovered hosts - host = self.hass.data[DATA_DISCOVERED_HOSTS].get(host, host) + host = self.hass.data[DOMAIN].get(host, host) heos = Heos(host) try: await heos.connect() - self.hass.data.pop(DATA_DISCOVERED_HOSTS) - return await self.async_step_import({CONF_HOST: host}) + self.hass.data.pop(DOMAIN) + return self.async_create_entry( + title=format_title(host), data={CONF_HOST: host} + ) except HeosError: errors[CONF_HOST] = "cannot_connect" finally: @@ -78,9 +67,7 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): # Return form host_type = ( - str - if not self.hass.data[DATA_DISCOVERED_HOSTS] - else vol.In(list(self.hass.data[DATA_DISCOVERED_HOSTS])) + str if not self.hass.data[DOMAIN] else vol.In(list(self.hass.data[DOMAIN])) ) return self.async_show_form( step_id="user", diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 827a0c53fbf..5b2df2b5ebf 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,7 +4,6 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" COMMAND_RETRY_ATTEMPTS = 2 COMMAND_RETRY_DELAY = 1 -DATA_DISCOVERED_HOSTS = "heos_discovered_hosts" DOMAIN = "heos" SERVICE_SIGN_IN = "sign_in" SERVICE_SIGN_OUT = "sign_out" diff --git a/homeassistant/components/heos/manifest.json b/homeassistant/components/heos/manifest.json index a90f0aebaae..12f10bcd0e3 100644 --- a/homeassistant/components/heos/manifest.json +++ b/homeassistant/components/heos/manifest.json @@ -7,6 +7,7 @@ "iot_class": "local_push", "loggers": ["pyheos"], "requirements": ["pyheos==0.7.2"], + "single_config_entry": true, "ssdp": [ { "st": "urn:schemas-denon-com:device:ACT-Denon:1" diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml index ed9939bf37c..861ca750780 100644 --- a/homeassistant/components/heos/quality_scale.yaml +++ b/homeassistant/components/heos/quality_scale.yaml @@ -8,19 +8,10 @@ rules: comment: Integration is a local push integration brands: done common-modules: todo - config-flow-test-coverage: - status: todo - comment: - 1. The config flow is 100% covered, however some tests need to let HA create the flow - handler instead of doing it manually in the test. - 2. We should also make sure every test ends in either CREATE_ENTRY or ABORT so we test - that the flow is able to recover from an error. + config-flow-test-coverage: done config-flow: - status: todo - comment: | - 1. YAML import to be removed after core team meeting discussion on approach. - 2. Consider enhnacement to automatically select a host when multiple are discovered. - 3. Move hass.data[heos_discovered_hosts] into hass.data[heos] + status: done + comment: Consider enhnacement to automatically select a host when multiple are discovered. dependency-transparency: done docs-actions: done docs-high-level-description: done @@ -34,15 +25,9 @@ rules: entity-unique-id: done has-entity-name: done runtime-data: done - test-before-configure: todo + test-before-configure: done test-before-setup: done - unique-config-entry: - status: todo - comment: | - The HEOS integration only supports a single config entry, but needs to be migrated to use - the `single_config_entry` flag. HEOS devices interconnect to each other, so connecting to - a single node yields access to all the devices setup with HEOS on your network. The HEOS API - documentation does not recommend connecting to multiple nodes which would provide no bennefit. + unique-config-entry: done # Silver action-exceptions: status: todo diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index df18fc7834a..20a8a2e978b 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -16,6 +16,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index a12f4c610ad..95a388d87a8 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -164,6 +164,25 @@ def discovery_data_fixture() -> dict: ) +@pytest.fixture(name="discovery_data_bedroom") +def discovery_data_fixture_bedroom() -> dict: + """Return mock discovery data for testing.""" + return ssdp.SsdpServiceInfo( + ssdp_usn="mock_usn", + ssdp_st="mock_st", + ssdp_location="http://127.0.0.2:60006/upnp/desc/aios_device/aios_device.xml", + upnp={ + ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-denon-com:device:AiosDevice:1", + ssdp.ATTR_UPNP_FRIENDLY_NAME: "Bedroom", + ssdp.ATTR_UPNP_MANUFACTURER: "Denon", + ssdp.ATTR_UPNP_MODEL_NAME: "HEOS Drive", + ssdp.ATTR_UPNP_MODEL_NUMBER: "DWSA-10 4.0", + ssdp.ATTR_UPNP_SERIAL: None, + ssdp.ATTR_UPNP_UDN: "uuid:e61de70c-2250-1c22-0080-0005cdf512be", + }, + ) + + @pytest.fixture(name="quick_selects") def quick_selects_fixture() -> dict[int, str]: """Create a dict of quick selects for testing.""" diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 7b737d7bb4b..464b62df157 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -1,14 +1,10 @@ """Tests for the Heos config flow module.""" -from unittest.mock import patch -from urllib.parse import urlparse - from pyheos import HeosError from homeassistant.components import heos, ssdp -from homeassistant.components.heos.config_flow import HeosFlowHandler -from homeassistant.components.heos.const import DATA_DISCOVERED_HOSTS, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER +from homeassistant.components.heos.const import DOMAIN +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,18 +13,20 @@ from homeassistant.data_entry_flow import FlowResultType async def test_flow_aborts_already_setup(hass: HomeAssistant, config_entry) -> None: """Test flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" async def test_no_host_shows_form(hass: HomeAssistant) -> None: """Test form is shown when host not provided.""" - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} @@ -45,73 +43,69 @@ async def test_cannot_connect_shows_error_form(hass: HomeAssistant, controller) assert result["errors"][CONF_HOST] == "cannot_connect" assert controller.connect.call_count == 1 assert controller.disconnect.call_count == 1 - controller.connect.reset_mock() - controller.disconnect.reset_mock() async def test_create_entry_when_host_valid(hass: HomeAssistant, controller) -> None: """Test result type is create entry when host is valid.""" data = {CONF_HOST: "127.0.0.1"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == data - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "Controller (127.0.0.1)" + assert result["data"] == data + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 async def test_create_entry_when_friendly_name_valid( hass: HomeAssistant, controller ) -> None: """Test result type is create entry when friendly name is valid.""" - hass.data[DATA_DISCOVERED_HOSTS] = {"Office (127.0.0.1)": "127.0.0.1"} + hass.data[DOMAIN] = {"Office (127.0.0.1)": "127.0.0.1"} data = {CONF_HOST: "Office (127.0.0.1)"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == {CONF_HOST: "127.0.0.1"} - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 - assert DATA_DISCOVERED_HOSTS not in hass.data + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "Controller (127.0.0.1)" + assert result["data"] == {CONF_HOST: "127.0.0.1"} + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 + assert DOMAIN not in hass.data async def test_discovery_shows_create_form( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo + hass: HomeAssistant, + controller, + discovery_data: ssdp.SsdpServiceInfo, + discovery_data_bedroom: ssdp.SsdpServiceInfo, ) -> None: - """Test discovery shows form to confirm setup and subsequent abort.""" + """Test discovery shows form to confirm setup.""" - await hass.config_entries.flow.async_init( + # Single discovered host shows form for user to finish setup. + result = await hass.config_entries.flow.async_init( heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Office (127.0.0.1)": "127.0.0.1"} + assert hass.data[DOMAIN] == {"Office (127.0.0.1)": "127.0.0.1"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + # Subsequent discovered hosts append to discovered hosts and abort. + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data_bedroom ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == { + assert hass.data[DOMAIN] == { "Office (127.0.0.1)": "127.0.0.1", "Bedroom (127.0.0.2)": "127.0.0.2", } + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_in_progress" async def test_discovery_flow_aborts_already_setup( @@ -119,41 +113,10 @@ async def test_discovery_flow_aborts_already_setup( ) -> None: """Test discovery flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_ssdp(discovery_data) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - - -async def test_discovery_sets_the_unique_id( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo -) -> None: - """Test discovery sets the unique id.""" - - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data - ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Bedroom (127.0.0.2)": "127.0.0.2"} - - -async def test_import_sets_the_unique_id(hass: HomeAssistant, controller) -> None: - """Test import sets the unique id.""" - - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "127.0.0.2"}, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index 04b745135d4..8d2e3b68a22 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -13,40 +13,11 @@ from homeassistant.components.heos import ( async_unload_entry, ) from homeassistant.components.heos.const import DOMAIN -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component -async def test_async_setup_creates_entry(hass: HomeAssistant, config) -> None: - """Test component setup creates entry from config.""" - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.1)" - assert entry.data == {CONF_HOST: "127.0.0.1"} - assert entry.unique_id == DOMAIN - - -async def test_async_setup_updates_entry( - hass: HomeAssistant, config_entry, config, controller -) -> None: - """Test component setup updates entry from config.""" - config[DOMAIN][CONF_HOST] = "127.0.0.2" - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.2)" - assert entry.data == {CONF_HOST: "127.0.0.2"} - assert entry.unique_id == DOMAIN - - async def test_async_setup_returns_true( hass: HomeAssistant, config_entry, config ) -> None: From 3d93561e0a69c149a6f000882e82fd1e1422d0d6 Mon Sep 17 00:00:00 2001 From: Jan Rieger <271149+jrieger@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:47:39 +0100 Subject: [PATCH 0617/1198] Remove `native_unit_of_measurement` from rfxtrx counters (#133108) --- homeassistant/components/rfxtrx/sensor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index cc195c9944e..4f8ae9767e2 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -182,13 +182,11 @@ SENSOR_TYPES = ( key="Count", translation_key="count", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Counter value", translation_key="counter_value", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Chill", From f7b6f4b9274619a6bb97da8b93b63f4cbdbd388c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:48:24 +0100 Subject: [PATCH 0618/1198] Replace functools.partial with ServiceCall.hass in knx (#133111) --- homeassistant/components/knx/services.py | 37 +++++++++++------------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index 113be9709ee..6c392902737 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -2,7 +2,6 @@ from __future__ import annotations -from functools import partial import logging from typing import TYPE_CHECKING @@ -47,14 +46,14 @@ def register_knx_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, SERVICE_KNX_SEND, - partial(service_send_to_knx_bus, hass), + service_send_to_knx_bus, schema=SERVICE_KNX_SEND_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_KNX_READ, - partial(service_read_to_knx_bus, hass), + service_read_to_knx_bus, schema=SERVICE_KNX_READ_SCHEMA, ) @@ -62,7 +61,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EVENT_REGISTER, - partial(service_event_register_modify, hass), + service_event_register_modify, schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA, ) @@ -70,7 +69,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EXPOSURE_REGISTER, - partial(service_exposure_register_modify, hass), + service_exposure_register_modify, schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA, ) @@ -78,7 +77,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_RELOAD, - partial(service_reload_integration, hass), + service_reload_integration, ) @@ -103,9 +102,9 @@ SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema( ) -async def service_event_register_modify(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_event_register_modify(call: ServiceCall) -> None: """Service for adding or removing a GroupAddress to the knx_event filter.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] group_addresses = list(map(parse_device_group_address, attr_address)) @@ -156,11 +155,9 @@ SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA = vol.Any( ) -async def service_exposure_register_modify( - hass: HomeAssistant, call: ServiceCall -) -> None: +async def service_exposure_register_modify(call: ServiceCall) -> None: """Service for adding or removing an exposure to KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) group_address = call.data[KNX_ADDRESS] @@ -223,9 +220,9 @@ SERVICE_KNX_SEND_SCHEMA = vol.Any( ) -async def service_send_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_send_to_knx_bus(call: ServiceCall) -> None: """Service for sending an arbitrary KNX message to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] attr_payload = call.data[SERVICE_KNX_ATTR_PAYLOAD] @@ -271,9 +268,9 @@ SERVICE_KNX_READ_SCHEMA = vol.Schema( ) -async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_read_to_knx_bus(call: ServiceCall) -> None: """Service for sending a GroupValueRead telegram to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) for address in call.data[KNX_ADDRESS]: telegram = Telegram( @@ -284,8 +281,8 @@ async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> Non await knx_module.xknx.telegrams.put(telegram) -async def service_reload_integration(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_reload_integration(call: ServiceCall) -> None: """Reload the integration.""" - knx_module = get_knx_module(hass) - await hass.config_entries.async_reload(knx_module.entry.entry_id) - hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) + knx_module = get_knx_module(call.hass) + await call.hass.config_entries.async_reload(knx_module.entry.entry_id) + call.hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) From 8b579d83ce32859fb054013254645571ba3c9461 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:50:10 +0100 Subject: [PATCH 0619/1198] Add data/data_description translation checks (#131705) --- tests/components/conftest.py | 38 ++++++++++++++++++++++ tests/components/onkyo/test_config_flow.py | 9 +++++ 2 files changed, 47 insertions(+) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 71c3b14050d..ac30d105299 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncGenerator, Callable, Generator +from functools import lru_cache from importlib.util import find_spec from pathlib import Path import string @@ -37,6 +38,7 @@ from homeassistant.data_entry_flow import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.translation import async_get_translations +from homeassistant.util import yaml if TYPE_CHECKING: from homeassistant.components.hassio import AddonManager @@ -619,6 +621,26 @@ def ignore_translations() -> str | list[str]: return [] +@lru_cache +def _get_integration_quality_scale(integration: str) -> dict[str, Any]: + """Get the quality scale for an integration.""" + try: + return yaml.load_yaml_dict( + f"homeassistant/components/{integration}/quality_scale.yaml" + ).get("rules", {}) + except FileNotFoundError: + return {} + + +def _get_integration_quality_scale_rule(integration: str, rule: str) -> str: + """Get the quality scale for an integration.""" + quality_scale = _get_integration_quality_scale(integration) + if not quality_scale or rule not in quality_scale: + return "todo" + status = quality_scale[rule] + return status if isinstance(status, str) else status["status"] + + async def _check_config_flow_result_translations( manager: FlowManager, flow: FlowHandler, @@ -650,6 +672,9 @@ async def _check_config_flow_result_translations( setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) if result["type"] is FlowResultType.FORM: + iqs_config_flow = _get_integration_quality_scale_rule( + integration, "config-flow" + ) if step_id := result.get("step_id"): # neither title nor description are required # - title defaults to integration name @@ -664,6 +689,19 @@ async def _check_config_flow_result_translations( result["description_placeholders"], translation_required=False, ) + if iqs_config_flow == "done" and (data_schema := result["data_schema"]): + # data and data_description are compulsory + for data_key in data_schema.schema: + for header in ("data", "data_description"): + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}.{header}.{data_key}", + result["description_placeholders"], + ) + if errors := result.get("errors"): for error in errors.values(): await _validate_translation( diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index f230ab124bd..a9d6f072559 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -503,6 +503,15 @@ async def test_import_success( } +@pytest.mark.parametrize( + "ignore_translations", + [ + [ # The schema is dynamically created from input sources + "component.onkyo.options.step.init.data.TV", + "component.onkyo.options.step.init.data_description.TV", + ] + ], +) async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test options flow.""" From 8cde40499768bfb3c17a63f143296d8fdbab5c0d Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 13 Dec 2024 10:05:46 +0100 Subject: [PATCH 0620/1198] Raise issue for deprecated imperial unit system (#130979) --- .../components/homeassistant/strings.json | 4 +++ homeassistant/core_config.py | 31 +++++++++++++++-- homeassistant/util/unit_system.py | 1 - tests/test_core_config.py | 24 +++++++++++++ tests/util/test_unit_system.py | 34 +++++++++++++++++++ 5 files changed, 91 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 52b330bfbc8..3283d480fdd 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -10,6 +10,10 @@ "title": "The country has not been configured", "description": "No country has been configured, please update the configuration by clicking on the \"learn more\" button below." }, + "imperial_unit_system": { + "title": "The imperial unit system is deprecated", + "description": "The imperial unit system is deprecated and your system is currently using us customary. Please update your configuration to use the us customary unit system and reload the core configuration to fix this issue." + }, "deprecated_yaml": { "title": "The {integration_title} YAML configuration is being removed", "description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py index 430a882ecb9..38ca07e8f31 100644 --- a/homeassistant/core_config.py +++ b/homeassistant/core_config.py @@ -68,11 +68,11 @@ from .util.hass_dict import HassKey from .util.package import is_docker_env from .util.unit_system import ( _CONF_UNIT_SYSTEM_IMPERIAL, + _CONF_UNIT_SYSTEM_METRIC, _CONF_UNIT_SYSTEM_US_CUSTOMARY, METRIC_SYSTEM, UnitSystem, get_unit_system, - validate_unit_system, ) # Typing imports that create a circular dependency @@ -188,6 +188,26 @@ _CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( ) +def _raise_issue_if_imperial_unit_system( + hass: HomeAssistant, config: dict[str, Any] +) -> dict[str, Any]: + if config.get(CONF_UNIT_SYSTEM) == _CONF_UNIT_SYSTEM_IMPERIAL: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "imperial_unit_system", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="imperial_unit_system", + ) + config[CONF_UNIT_SYSTEM] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + else: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "imperial_unit_system") + + return config + + def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: if currency not in HISTORIC_CURRENCIES: ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") @@ -249,7 +269,11 @@ CORE_CONFIG_SCHEMA = vol.All( CONF_ELEVATION: vol.Coerce(int), CONF_RADIUS: cv.positive_int, vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, - CONF_UNIT_SYSTEM: validate_unit_system, + CONF_UNIT_SYSTEM: vol.Any( + _CONF_UNIT_SYSTEM_METRIC, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + _CONF_UNIT_SYSTEM_IMPERIAL, + ), CONF_TIME_ZONE: cv.time_zone, vol.Optional(CONF_INTERNAL_URL): cv.url, vol.Optional(CONF_EXTERNAL_URL): cv.url, @@ -333,6 +357,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non # so we need to run it in an executor job. config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) + # Check if we need to raise an issue for imperial unit system + config = _raise_issue_if_imperial_unit_system(hass, config) + # Only load auth during startup. if not hasattr(hass, "auth"): if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index c812dd38230..15993cbae47 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -233,7 +233,6 @@ def _deprecated_unit_system(value: str) -> str: """Convert deprecated unit system.""" if value == _CONF_UNIT_SYSTEM_IMPERIAL: - # need to add warning in 2023.1 return _CONF_UNIT_SYSTEM_US_CUSTOMARY return value diff --git a/tests/test_core_config.py b/tests/test_core_config.py index cd77e3608dd..dae50bae097 100644 --- a/tests/test_core_config.py +++ b/tests/test_core_config.py @@ -1080,3 +1080,27 @@ async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: ), ): await hass.config.set_time_zone("America/New_York") + + +async def test_core_config_schema_imperial_unit( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + issue = issue_registry.async_get_issue("homeassistant", "imperial_unit_system") + assert issue diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index b2c604acbcf..ddefe92de42 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -24,6 +24,8 @@ from homeassistant.const import ( UnitOfVolume, UnitOfVolumetricFlux, ) +from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.util.unit_system import ( # pylint: disable=hass-deprecated-import _CONF_UNIT_SYSTEM_IMPERIAL, @@ -877,3 +879,35 @@ def test_imperial_converted_units(device_class: SensorDeviceClass) -> None: assert (device_class, unit) not in unit_system._conversions continue assert (device_class, unit) in unit_system._conversions + + +async def test_imperial_deprecated_log_warning( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test deprecated imperial unit system logs warning.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Home" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.currency == "USD" + assert hass.config.country == "US" + assert hass.config.language == "en" + assert hass.config.radius == 150 From fb5cca877bead93f5313757578563743c2ed028f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 10:12:35 +0100 Subject: [PATCH 0621/1198] Fix failing CI due to Russound Rio incorrect IQS (#133118) --- homeassistant/components/russound_rio/quality_scale.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 2d396892aa8..3a5e8f9adb7 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -11,7 +11,10 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: done + config-flow: + status: todo + comment: | + The data_description fields in translations are missing. dependency-transparency: done docs-actions: status: exempt From c0ef60bb98cbde57715a4edfa7dc47d9d168aedd Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 10:22:46 +0100 Subject: [PATCH 0622/1198] Bump aiowithings to 3.1.4 (#133117) --- homeassistant/components/withings/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 57d4bafdc7b..886eb66f5e0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "requirements": ["aiowithings==3.1.3"] + "requirements": ["aiowithings==3.1.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index cc715c895f9..66dfa359577 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -417,7 +417,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7094270a7a6..5e0705b7358 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -399,7 +399,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 From 7f3373d2337560e8fea4524bcd5140cbd53a88d0 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 13 Dec 2024 01:27:35 -0800 Subject: [PATCH 0623/1198] Add a quality scale for Google Tasks (#131497) Co-authored-by: Joost Lekkerkerker --- .../google_tasks/quality_scale.yaml | 78 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/google_tasks/quality_scale.yaml diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml new file mode 100644 index 00000000000..b4159b30145 --- /dev/null +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -0,0 +1,78 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: todo + common-modules: + status: exempt + comment: | + The integration has a coordinator.py and no base entities. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register any actions. + appropriate-polling: done + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: + status: todo + comment: | + The integration refreshes the access token, but does not poll the API. The + setup can be changed to request the list of todo lists in setup instead + of during platform setup. + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: + status: exempt + comment: The integration does not register any actions. + runtime-data: done + + # Silver + log-when-unavailable: done + config-entry-unloading: done + reauthentication-flow: + status: todo + comment: Missing a test that reauthenticates with the wrong account + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: done + parallel-updates: todo + test-coverage: + status: todo + comment: Test coverage for __init__.py is not above 95% yet + docs-configuration-parameters: todo + entity-unavailable: done + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index f3b285c8485..23721d31fec 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -457,7 +457,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "google_maps", "google_pubsub", "google_sheets", - "google_tasks", "google_translate", "google_travel_time", "google_wifi", From 91f7afc2c5fb9aa4a91fd8d5141838da5792d805 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 13 Dec 2024 10:40:23 +0100 Subject: [PATCH 0624/1198] Cookidoo reauth config flow for silver (#133110) * reauth * add check for duplicate email in reauth * fix reauth double email check * parametrize tests * check reauth double entry data as well --- .../components/cookidoo/config_flow.py | 34 +++++ .../components/cookidoo/coordinator.py | 2 +- .../components/cookidoo/manifest.json | 2 +- .../components/cookidoo/quality_scale.yaml | 2 +- .../components/cookidoo/strings.json | 12 ++ tests/components/cookidoo/test_config_flow.py | 124 ++++++++++++++++++ tests/components/cookidoo/test_init.py | 2 +- 7 files changed, 174 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index ce7ad9fde87..d523de96b01 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -102,6 +102,40 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + reauth_entry = self._get_reauth_entry() + + if user_input is not None: + if not ( + errors := await self.validate_input({**reauth_entry.data, **user_input}) + ): + if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema(AUTH_DATA_SCHEMA), + suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, + ), + errors=errors, + ) + async def generate_country_schema(self) -> None: """Generate country schema.""" self.COUNTRY_DATA_SCHEMA = { diff --git a/homeassistant/components/cookidoo/coordinator.py b/homeassistant/components/cookidoo/coordinator.py index 23a133ea16f..ad86d1fb9f1 100644 --- a/homeassistant/components/cookidoo/coordinator.py +++ b/homeassistant/components/cookidoo/coordinator.py @@ -63,7 +63,7 @@ class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]): translation_key="setup_request_exception", ) from e except CookidooAuthException as e: - raise UpdateFailed( + raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="setup_authentication_exception", translation_placeholders={ diff --git a/homeassistant/components/cookidoo/manifest.json b/homeassistant/components/cookidoo/manifest.json index 7e9e86f9d9d..59d58200fdf 100644 --- a/homeassistant/components/cookidoo/manifest.json +++ b/homeassistant/components/cookidoo/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cookidoo", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["cookidoo-api==0.10.0"] } diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml index 7b2bbb7592b..25069c87c46 100644 --- a/homeassistant/components/cookidoo/quality_scale.yaml +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -38,7 +38,7 @@ rules: action-exceptions: status: done comment: Only providing todo actions - reauthentication-flow: todo + reauthentication-flow: done parallel-updates: done test-coverage: done integration-owner: done diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 2c518f472d5..126205fcf2f 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -22,6 +22,18 @@ "data_description": { "language": "Pick your language for the Cookidoo content." } + }, + "reauth_confirm": { + "title": "Login again to Cookidoo", + "description": "Please log in to Cookidoo again to continue using this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::cookidoo::config::step::user::data_description::email%]", + "password": "[%key:component::cookidoo::config::step::user::data_description::password%]" + } } }, "error": { diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py index 0da8afe7d07..cfdc284dbfe 100644 --- a/tests/components/cookidoo/test_config_flow.py +++ b/tests/components/cookidoo/test_config_flow.py @@ -180,3 +180,127 @@ async def test_flow_user_init_data_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_flow_reauth( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reauth_error_and_recover( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + raise_error, + text_error, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_cookidoo_client.login.side_effect = raise_error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": text_error} + + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("new_email", "saved_email", "result_reason"), + [ + (EMAIL, EMAIL, "reauth_successful"), + ("another-email", EMAIL, "already_configured"), + ], +) +async def test_flow_reauth_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + new_email: str, + saved_email: str, + result_reason: str, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + another_cookidoo_config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "another-email", + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + ) + + another_cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: new_email, CONF_PASSWORD: PASSWORD}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == result_reason + assert cookidoo_config_entry.data[CONF_EMAIL] == saved_email diff --git a/tests/components/cookidoo/test_init.py b/tests/components/cookidoo/test_init.py index c73295bcd96..b1b9b880526 100644 --- a/tests/components/cookidoo/test_init.py +++ b/tests/components/cookidoo/test_init.py @@ -35,7 +35,7 @@ async def test_load_unload( ("exception", "status"), [ (CookidooRequestException, ConfigEntryState.SETUP_RETRY), - (CookidooAuthException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), ], ) async def test_init_failure( From c0f6535d1105b7cbf00970ce0dade7cbcf597ab3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Fri, 13 Dec 2024 11:11:47 +0100 Subject: [PATCH 0625/1198] Fix typo in `WaterHeaterEntityDescription` name (#132888) --- homeassistant/components/water_heater/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 67ce3a97fd1..cac0a365f74 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -129,7 +129,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.data[DATA_COMPONENT].async_unload_entry(entry) -class WaterHeaterEntityEntityDescription(EntityDescription, frozen_or_thawed=True): +class WaterHeaterEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes water heater entities.""" @@ -152,7 +152,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): {ATTR_OPERATION_LIST, ATTR_MIN_TEMP, ATTR_MAX_TEMP} ) - entity_description: WaterHeaterEntityEntityDescription + entity_description: WaterHeaterEntityDescription _attr_current_operation: str | None = None _attr_current_temperature: float | None = None _attr_is_away_mode_on: bool | None = None From 7e2d3eb482f39ad9827bbb1d3d5763ec16f5309a Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 13 Dec 2024 11:59:55 +0100 Subject: [PATCH 0626/1198] Add contact vip info to fritzbox_callmonitor sensor (#132913) --- .../components/fritzbox_callmonitor/base.py | 44 ++++++++++++++----- .../components/fritzbox_callmonitor/sensor.py | 27 +++++++----- .../fritzbox_callmonitor/strings.json | 3 +- 3 files changed, 52 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/fritzbox_callmonitor/base.py b/homeassistant/components/fritzbox_callmonitor/base.py index 2816880a1b2..3c8714624e7 100644 --- a/homeassistant/components/fritzbox_callmonitor/base.py +++ b/homeassistant/components/fritzbox_callmonitor/base.py @@ -3,6 +3,7 @@ from __future__ import annotations from contextlib import suppress +from dataclasses import dataclass from datetime import timedelta import logging import re @@ -19,12 +20,33 @@ _LOGGER = logging.getLogger(__name__) MIN_TIME_PHONEBOOK_UPDATE = timedelta(hours=6) +@dataclass +class Contact: + """Store details for one phonebook contact.""" + + name: str + numbers: list[str] + vip: bool + + def __init__( + self, name: str, numbers: list[str] | None = None, category: str | None = None + ) -> None: + """Initialize the class.""" + self.name = name + self.numbers = [re.sub(REGEX_NUMBER, "", nr) for nr in numbers or ()] + self.vip = category == "1" + + +unknown_contact = Contact(UNKNOWN_NAME) + + class FritzBoxPhonebook: """Connects to a FritzBox router and downloads its phone book.""" fph: FritzPhonebook phonebook_dict: dict[str, list[str]] - number_dict: dict[str, str] + contacts: list[Contact] + number_dict: dict[str, Contact] def __init__( self, @@ -56,27 +78,27 @@ class FritzBoxPhonebook: if self.phonebook_id is None: return - self.phonebook_dict = self.fph.get_all_names(self.phonebook_id) - self.number_dict = { - re.sub(REGEX_NUMBER, "", nr): name - for name, nrs in self.phonebook_dict.items() - for nr in nrs - } + self.fph.get_all_name_numbers(self.phonebook_id) + self.contacts = [ + Contact(c.name, c.numbers, getattr(c, "category", None)) + for c in self.fph.phonebook.contacts + ] + self.number_dict = {nr: c for c in self.contacts for nr in c.numbers} _LOGGER.debug("Fritz!Box phone book successfully updated") def get_phonebook_ids(self) -> list[int]: """Return list of phonebook ids.""" return self.fph.phonebook_ids # type: ignore[no-any-return] - def get_name(self, number: str) -> str: - """Return a name for a given phone number.""" + def get_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" number = re.sub(REGEX_NUMBER, "", str(number)) with suppress(KeyError): return self.number_dict[number] if not self.prefixes: - return UNKNOWN_NAME + return unknown_contact for prefix in self.prefixes: with suppress(KeyError): @@ -84,4 +106,4 @@ class FritzBoxPhonebook: with suppress(KeyError): return self.number_dict[prefix + number.lstrip("0")] - return UNKNOWN_NAME + return unknown_contact diff --git a/homeassistant/components/fritzbox_callmonitor/sensor.py b/homeassistant/components/fritzbox_callmonitor/sensor.py index 668369c35a7..df18ae5702a 100644 --- a/homeassistant/components/fritzbox_callmonitor/sensor.py +++ b/homeassistant/components/fritzbox_callmonitor/sensor.py @@ -20,7 +20,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzBoxCallMonitorConfigEntry -from .base import FritzBoxPhonebook +from .base import Contact, FritzBoxPhonebook from .const import ( ATTR_PREFIXES, CONF_PHONEBOOK, @@ -96,7 +96,7 @@ class FritzBoxCallSensor(SensorEntity): self._host = host self._port = port self._monitor: FritzBoxCallMonitor | None = None - self._attributes: dict[str, str | list[str]] = {} + self._attributes: dict[str, str | list[str] | bool] = {} self._attr_translation_placeholders = {"phonebook_name": phonebook_name} self._attr_unique_id = unique_id @@ -152,20 +152,20 @@ class FritzBoxCallSensor(SensorEntity): """Set the state.""" self._attr_native_value = state - def set_attributes(self, attributes: Mapping[str, str]) -> None: + def set_attributes(self, attributes: Mapping[str, str | bool]) -> None: """Set the state attributes.""" self._attributes = {**attributes} @property - def extra_state_attributes(self) -> dict[str, str | list[str]]: + def extra_state_attributes(self) -> dict[str, str | list[str] | bool]: """Return the state attributes.""" if self._prefixes: self._attributes[ATTR_PREFIXES] = self._prefixes return self._attributes - def number_to_name(self, number: str) -> str: - """Return a name for a given phone number.""" - return self._fritzbox_phonebook.get_name(number) + def number_to_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" + return self._fritzbox_phonebook.get_contact(number) def update(self) -> None: """Update the phonebook if it is defined.""" @@ -225,35 +225,42 @@ class FritzBoxCallMonitor: df_in = "%d.%m.%y %H:%M:%S" df_out = "%Y-%m-%dT%H:%M:%S" isotime = datetime.strptime(line[0], df_in).strftime(df_out) + att: dict[str, str | bool] if line[1] == FritzState.RING: self._sensor.set_state(CallState.RINGING) + contact = self._sensor.number_to_contact(line[3]) att = { "type": "incoming", "from": line[3], "to": line[4], "device": line[5], "initiated": isotime, - "from_name": self._sensor.number_to_name(line[3]), + "from_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CALL: self._sensor.set_state(CallState.DIALING) + contact = self._sensor.number_to_contact(line[5]) att = { "type": "outgoing", "from": line[4], "to": line[5], "device": line[6], "initiated": isotime, - "to_name": self._sensor.number_to_name(line[5]), + "to_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CONNECT: self._sensor.set_state(CallState.TALKING) + contact = self._sensor.number_to_contact(line[4]) att = { "with": line[4], "device": line[3], "accepted": isotime, - "with_name": self._sensor.number_to_name(line[4]), + "with_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.DISCONNECT: diff --git a/homeassistant/components/fritzbox_callmonitor/strings.json b/homeassistant/components/fritzbox_callmonitor/strings.json index e935549035c..437b218a8e2 100644 --- a/homeassistant/components/fritzbox_callmonitor/strings.json +++ b/homeassistant/components/fritzbox_callmonitor/strings.json @@ -78,7 +78,8 @@ "accepted": { "name": "Accepted" }, "with_name": { "name": "With name" }, "duration": { "name": "Duration" }, - "closed": { "name": "Closed" } + "closed": { "name": "Closed" }, + "vip": { "name": "Important" } } } } From 81c8d7153b7277c3ddd28af6a0870d854025b83e Mon Sep 17 00:00:00 2001 From: Martijn Russchen Date: Fri, 13 Dec 2024 12:50:50 +0100 Subject: [PATCH 0627/1198] Push Nibe package to 2.14.0 (#133125) --- homeassistant/components/nibe_heatpump/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index 407cdfcfd57..049ba905f04 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.13.0"] + "requirements": ["nibe==2.14.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 66dfa359577..3c2df95f57f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1462,7 +1462,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.13.0 +nibe==2.14.0 # homeassistant.components.nice_go nice-go==0.3.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5e0705b7358..53be7b9893c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1225,7 +1225,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.13.0 +nibe==2.14.0 # homeassistant.components.nice_go nice-go==0.3.10 From d65807324627b15fbbf6fd4553ab9eac67a5cd47 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Fri, 13 Dec 2024 13:01:55 +0100 Subject: [PATCH 0628/1198] Make Twitch sensor state and attributes translatable (#133127) --- homeassistant/components/twitch/sensor.py | 6 ++- homeassistant/components/twitch/strings.json | 42 ++++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index bd5fc509989..f78d33ea461 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -49,6 +49,8 @@ class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): """Representation of a Twitch channel.""" _attr_translation_key = "channel" + _attr_device_class = SensorDeviceClass.ENUM + _attr_options = [STATE_OFFLINE, STATE_STREAMING] def __init__(self, coordinator: TwitchCoordinator, channel_id: str) -> None: """Initialize the sensor.""" @@ -82,8 +84,8 @@ class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): ATTR_TITLE: channel.title, ATTR_STARTED_AT: channel.started_at, ATTR_VIEWERS: channel.viewers, + ATTR_SUBSCRIPTION: False, } - resp[ATTR_SUBSCRIPTION] = False if channel.subscribed is not None: resp[ATTR_SUBSCRIPTION] = channel.subscribed resp[ATTR_SUBSCRIPTION_GIFTED] = channel.subscription_gifted diff --git a/homeassistant/components/twitch/strings.json b/homeassistant/components/twitch/strings.json index bbe46526c36..7271b81e924 100644 --- a/homeassistant/components/twitch/strings.json +++ b/homeassistant/components/twitch/strings.json @@ -16,5 +16,47 @@ "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" } + }, + "entity": { + "sensor": { + "channel": { + "state": { + "streaming": "Streaming", + "offline": "Offline" + }, + "state_attributes": { + "followers": { + "name": "Followers" + }, + "game": { + "name": "Game" + }, + "title": { + "name": "Title" + }, + "started_at": { + "name": "Started at" + }, + "viewers": { + "name": "Viewers" + }, + "subscribed": { + "name": "Subscribed" + }, + "subscription_is_gifted": { + "name": "Subscription is gifted" + }, + "subscription_tier": { + "name": "Subscription tier" + }, + "following": { + "name": "Following" + }, + "following_since": { + "name": "Following since" + } + } + } + } } } From 684667e8e733136ada08de57a975ec938a44114b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 13:24:46 +0100 Subject: [PATCH 0629/1198] Update open-meteo to v0.3.2 (#133122) --- homeassistant/components/open_meteo/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/open_meteo/manifest.json b/homeassistant/components/open_meteo/manifest.json index abdb59a48d0..a2f2a724ad5 100644 --- a/homeassistant/components/open_meteo/manifest.json +++ b/homeassistant/components/open_meteo/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/open_meteo", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["open-meteo==0.3.1"] + "requirements": ["open-meteo==0.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3c2df95f57f..3bb1faea169 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1535,7 +1535,7 @@ onvif-zeep-async==3.1.13 open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 53be7b9893c..a4f146fbc56 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1283,7 +1283,7 @@ onvif-zeep-async==3.1.13 open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 From f816a0667cfb3761d00696a41525a146033f137e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:28:11 +0100 Subject: [PATCH 0630/1198] Reduce functools.partial with ServiceCall.hass in energyzero (#133134) --- homeassistant/components/energyzero/services.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index 286735895ad..c47958b670f 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -83,12 +83,12 @@ def __serialize_prices(prices: Electricity | Gas) -> ServiceResponse: } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EnergyZeroDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: EnergyZeroConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EnergyZeroConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -113,10 +113,9 @@ def __get_coordinator( async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -151,14 +150,14 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY), + partial(__get_prices, price_type=PriceType.ENERGY), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From c7adc984086963a23f8d7f65ed4402da19b75d6f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:28:54 +0100 Subject: [PATCH 0631/1198] Replace functools.partial with ServiceCall.hass in unifiprotect (#133131) --- .../components/unifiprotect/services.py | 93 +++++++++---------- 1 file changed, 45 insertions(+), 48 deletions(-) diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 9c045164d6d..fc438240839 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import functools from typing import Any, cast from pydantic.v1 import ValidationError @@ -88,9 +87,9 @@ def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiCl @callback -def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) +def _async_get_ufp_camera(call: ServiceCall) -> Camera: + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() camera_entity = entity_registry.async_get(entity_id) @@ -98,30 +97,27 @@ def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: assert camera_entity.device_id is not None camera_mac = _async_unique_id_to_mac(camera_entity.unique_id) - instance = _async_get_ufp_instance(hass, camera_entity.device_id) + instance = _async_get_ufp_instance(call.hass, camera_entity.device_id) return cast(Camera, instance.bootstrap.get_device_from_mac(camera_mac)) @callback -def _async_get_protect_from_call( - hass: HomeAssistant, call: ServiceCall -) -> set[ProtectApiClient]: +def _async_get_protect_from_call(call: ServiceCall) -> set[ProtectApiClient]: return { - _async_get_ufp_instance(hass, device_id) + _async_get_ufp_instance(call.hass, device_id) for device_id in async_extract_referenced_entity_ids( - hass, call + call.hass, call ).referenced_devices } async def _async_service_call_nvr( - hass: HomeAssistant, call: ServiceCall, method: str, *args: Any, **kwargs: Any, ) -> None: - instances = _async_get_protect_from_call(hass, call) + instances = _async_get_protect_from_call(call) try: await asyncio.gather( *(getattr(i.bootstrap.nvr, method)(*args, **kwargs) for i in instances) @@ -130,23 +126,23 @@ async def _async_service_call_nvr( raise HomeAssistantError(str(err)) from err -async def add_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def add_doorbell_text(call: ServiceCall) -> None: """Add a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "add_custom_doorbell_message", message) + await _async_service_call_nvr(call, "add_custom_doorbell_message", message) -async def remove_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_doorbell_text(call: ServiceCall) -> None: """Remove a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "remove_custom_doorbell_message", message) + await _async_service_call_nvr(call, "remove_custom_doorbell_message", message) -async def remove_privacy_zone(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_privacy_zone(call: ServiceCall) -> None: """Remove privacy zone from camera.""" name: str = call.data[ATTR_NAME] - camera = _async_get_ufp_camera(hass, call) + camera = _async_get_ufp_camera(call) remove_index: int | None = None for index, zone in enumerate(camera.privacy_zones): @@ -171,10 +167,10 @@ def _async_unique_id_to_mac(unique_id: str) -> str: return unique_id.split("_")[0] -async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> None: +async def set_chime_paired_doorbells(call: ServiceCall) -> None: """Set paired doorbells on chime.""" - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() chime_button = entity_registry.async_get(entity_id) @@ -182,13 +178,13 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> assert chime_button.device_id is not None chime_mac = _async_unique_id_to_mac(chime_button.unique_id) - instance = _async_get_ufp_instance(hass, chime_button.device_id) + instance = _async_get_ufp_instance(call.hass, chime_button.device_id) chime = instance.bootstrap.get_device_from_mac(chime_mac) chime = cast(Chime, chime) assert chime is not None call.data = ReadOnlyDict(call.data.get("doorbells") or {}) - doorbell_refs = async_extract_referenced_entity_ids(hass, call) + doorbell_refs = async_extract_referenced_entity_ids(call.hass, call) doorbell_ids: set[str] = set() for camera_id in doorbell_refs.referenced | doorbell_refs.indirectly_referenced: doorbell_sensor = entity_registry.async_get(camera_id) @@ -209,31 +205,32 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> await chime.save_device(data_before_changed) +SERVICES = [ + ( + SERVICE_ADD_DOORBELL_TEXT, + add_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_REMOVE_DOORBELL_TEXT, + remove_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_SET_CHIME_PAIRED, + set_chime_paired_doorbells, + CHIME_PAIRED_SCHEMA, + ), + ( + SERVICE_REMOVE_PRIVACY_ZONE, + remove_privacy_zone, + REMOVE_PRIVACY_ZONE_SCHEMA, + ), +] + + def async_setup_services(hass: HomeAssistant) -> None: """Set up the global UniFi Protect services.""" - services = [ - ( - SERVICE_ADD_DOORBELL_TEXT, - functools.partial(add_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_REMOVE_DOORBELL_TEXT, - functools.partial(remove_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_SET_CHIME_PAIRED, - functools.partial(set_chime_paired_doorbells, hass), - CHIME_PAIRED_SCHEMA, - ), - ( - SERVICE_REMOVE_PRIVACY_ZONE, - functools.partial(remove_privacy_zone, hass), - REMOVE_PRIVACY_ZONE_SCHEMA, - ), - ] - for name, method, schema in services: - if hass.services.has_service(DOMAIN, name): - continue + + for name, method, schema in SERVICES: hass.services.async_register(DOMAIN, name, method, schema=schema) From 4a5e47d2f03089afe19edde020678d9e1da04bef Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:29:42 +0100 Subject: [PATCH 0632/1198] Replace functools.partial with ServiceCall.hass in tibber (#133132) --- homeassistant/components/tibber/services.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tibber/services.py b/homeassistant/components/tibber/services.py index 5033cda11d0..938e96b9917 100644 --- a/homeassistant/components/tibber/services.py +++ b/homeassistant/components/tibber/services.py @@ -4,7 +4,6 @@ from __future__ import annotations import datetime as dt from datetime import datetime -from functools import partial from typing import Any, Final import voluptuous as vol @@ -33,8 +32,8 @@ SERVICE_SCHEMA: Final = vol.Schema( ) -async def __get_prices(call: ServiceCall, *, hass: HomeAssistant) -> ServiceResponse: - tibber_connection = hass.data[DOMAIN] +async def __get_prices(call: ServiceCall) -> ServiceResponse: + tibber_connection = call.hass.data[DOMAIN] start = __get_date(call.data.get(ATTR_START), "start") end = __get_date(call.data.get(ATTR_END), "end") @@ -94,7 +93,7 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, PRICE_SERVICE_NAME, - partial(__get_prices, hass=hass), + __get_prices, schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From a131497e1f9a6c9c49989b245f21ccb57e95b2bd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:30:05 +0100 Subject: [PATCH 0633/1198] Reduce functools.partial with ServiceCall.hass in easyenergy (#133133) --- homeassistant/components/easyenergy/services.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/easyenergy/services.py b/homeassistant/components/easyenergy/services.py index cb5424496ac..f5ee89d5325 100644 --- a/homeassistant/components/easyenergy/services.py +++ b/homeassistant/components/easyenergy/services.py @@ -86,12 +86,12 @@ def __serialize_prices(prices: list[dict[str, float | datetime]]) -> ServiceResp } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EasyEnergyDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EasyEnergyDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: EasyEnergyConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EasyEnergyConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -116,11 +116,10 @@ def __get_coordinator( async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: """Get prices from easyEnergy.""" - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -156,21 +155,21 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_USAGE_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_USAGE), + partial(__get_prices, price_type=PriceType.ENERGY_USAGE), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_RETURN_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_RETURN), + partial(__get_prices, price_type=PriceType.ENERGY_RETURN), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From b4e065d33191930917be5ca1cf44737a3cf8c19d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 13:30:22 +0100 Subject: [PATCH 0634/1198] Bump yt-dlp to 2024.12.13 (#133129) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 195dc678bc2..21c07607573 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.06"], + "requirements": ["yt-dlp[default]==2024.12.13"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 3bb1faea169..5adb0fb74de 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3074,7 +3074,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a4f146fbc56..8e5cdf569b3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2466,7 +2466,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 From fe46fd24bd77465e1f20acdbd7991c85375a4226 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 13 Dec 2024 13:34:17 +0100 Subject: [PATCH 0635/1198] Improve data description and title for Cookidoo integration (#133106) * fix data description typo for cookidoo * use placeholder for cookidoo as it is non-translatable * set title of language step * fix for reauth * fix reauth --- homeassistant/components/cookidoo/config_flow.py | 3 +++ homeassistant/components/cookidoo/strings.json | 16 ++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index d523de96b01..58e99a70907 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -79,6 +79,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): ), suggested_values=user_input, ), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) @@ -99,6 +100,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="language", data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) @@ -133,6 +135,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema(AUTH_DATA_SCHEMA), suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, ), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 126205fcf2f..19f709ddaf8 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -2,30 +2,30 @@ "config": { "step": { "user": { - "title": "Login to Cookidoo", + "title": "Login to {cookidoo}", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]", "country": "Country" }, "data_description": { - "email": "Email used access your Cookidoo account.", - "password": "Password used access your Cookidoo account.", - "country": "Pick your language for the Cookidoo content." + "email": "Email used to access your {cookidoo} account.", + "password": "Password used to access your {cookidoo} account.", + "country": "Pick your language for the {cookidoo} content." } }, "language": { - "title": "Login to Cookidoo", + "title": "Set language for {cookidoo}", "data": { "language": "[%key:common::config_flow::data::language%]" }, "data_description": { - "language": "Pick your language for the Cookidoo content." + "language": "Pick your language for the {cookidoo} content." } }, "reauth_confirm": { - "title": "Login again to Cookidoo", - "description": "Please log in to Cookidoo again to continue using this integration.", + "title": "Login again to {cookidoo}", + "description": "Please log in to {cookidoo} again to continue using this integration.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" From 5d8e99731954e95a5b23054e87a95c0af6e0e0eb Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 13:49:00 +0100 Subject: [PATCH 0636/1198] Bump velbusaio to 2024.12.2 (#133130) * Bump velbusaio to 2024.12.2 * mistakely pushed this file --- homeassistant/components/velbus/__init__.py | 4 +++- homeassistant/components/velbus/config_flow.py | 2 +- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index f8426bc4130..6afcc20cc0f 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -52,7 +52,7 @@ async def velbus_connect_task( ) -> None: """Task to offload the long running connect.""" try: - await controller.connect() + await controller.start() except ConnectionError as ex: raise PlatformNotReady( f"Connection error while connecting to Velbus {entry_id}: {ex}" @@ -85,6 +85,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) + await controller.connect() + task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) entry.runtime_data = VelbusData(controller=controller, connect_task=task) diff --git a/homeassistant/components/velbus/config_flow.py b/homeassistant/components/velbus/config_flow.py index 0b47dfe6498..26e2fafabbc 100644 --- a/homeassistant/components/velbus/config_flow.py +++ b/homeassistant/components/velbus/config_flow.py @@ -35,7 +35,7 @@ class VelbusConfigFlow(ConfigFlow, domain=DOMAIN): """Try to connect to the velbus with the port specified.""" try: controller = velbusaio.controller.Velbus(prt) - await controller.connect(True) + await controller.connect() await controller.stop() except VelbusConnectionFailed: self._errors[CONF_PORT] = "cannot_connect" diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 600370f87d9..90981c426f9 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.12.1"], + "requirements": ["velbus-aio==2024.12.2"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 5adb0fb74de..219094c0a28 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2943,7 +2943,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.1 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8e5cdf569b3..46a7d4b29b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2356,7 +2356,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.1 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 From 579ac01eb1b1dd4caac84e0e5b791f5cfee2fdec Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Fri, 13 Dec 2024 15:26:02 +0100 Subject: [PATCH 0637/1198] Fix typos in devolo Home Network tests (#133139) --- tests/components/devolo_home_network/test_config_flow.py | 2 +- tests/components/devolo_home_network/test_update.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5234d0f073e..28e9059d588 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -139,7 +139,7 @@ async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("info") -async def test_abort_if_configued(hass: HomeAssistant) -> None: +async def test_abort_if_configured(hass: HomeAssistant) -> None: """Test we abort config flow if already configured.""" serial_number = DISCOVERY_INFO.properties["SN"] entry = MockConfigEntry( diff --git a/tests/components/devolo_home_network/test_update.py b/tests/components/devolo_home_network/test_update.py index 7f70524fa5b..4fe7a173309 100644 --- a/tests/components/devolo_home_network/test_update.py +++ b/tests/components/devolo_home_network/test_update.py @@ -141,7 +141,7 @@ async def test_device_failure_update( async def test_auth_failed(hass: HomeAssistant, mock_device: MockDevice) -> None: - """Test updating unautherized triggers the reauth flow.""" + """Test updating unauthorized triggers the reauth flow.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() state_key = f"{PLATFORM}.{device_name}_firmware" From 067daad70eea56a457360e51199efa2f24476fd5 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Fri, 13 Dec 2024 15:29:34 +0100 Subject: [PATCH 0638/1198] Set quality scale to silver for Powerfox integration (#133095) --- homeassistant/components/powerfox/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/powerfox/manifest.json b/homeassistant/components/powerfox/manifest.json index a7285bb213f..7083ffe8de7 100644 --- a/homeassistant/components/powerfox/manifest.json +++ b/homeassistant/components/powerfox/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/powerfox", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["powerfox==1.0.0"], "zeroconf": [ { From 8080ad14bffd4f975c1e2c6cf007891194fe1909 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:34:02 +0100 Subject: [PATCH 0639/1198] Add warning when light entities do not provide kelvin attributes or properties (#132723) --- homeassistant/components/light/__init__.py | 73 +++++++++++++++++++--- homeassistant/components/light/const.py | 5 ++ tests/components/light/common.py | 6 +- tests/components/light/test_init.py | 72 ++++++++++++++++++++- 4 files changed, 143 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 121732c918f..d4b38b498f3 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -32,6 +32,7 @@ from homeassistant.helpers.deprecation import ( ) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass import homeassistant.util.color as color_util @@ -41,6 +42,8 @@ from .const import ( # noqa: F401 COLOR_MODES_COLOR, DATA_COMPONENT, DATA_PROFILES, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN, SCAN_INTERVAL, VALID_COLOR_MODES, @@ -863,17 +866,15 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): entity_description: LightEntityDescription _attr_brightness: int | None = None _attr_color_mode: ColorMode | str | None = None - _attr_color_temp: int | None = None _attr_color_temp_kelvin: int | None = None _attr_effect_list: list[str] | None = None _attr_effect: str | None = None _attr_hs_color: tuple[float, float] | None = None - # Default to the Philips Hue value that HA has always assumed - # https://developers.meethue.com/documentation/core-concepts + # We cannot set defaults without causing breaking changes until mireds + # are fully removed. Until then, developers can explicitly + # use DEFAULT_MIN_KELVIN and DEFAULT_MAX_KELVIN _attr_max_color_temp_kelvin: int | None = None _attr_min_color_temp_kelvin: int | None = None - _attr_max_mireds: int = 500 # 2000 K - _attr_min_mireds: int = 153 # 6500 K _attr_rgb_color: tuple[int, int, int] | None = None _attr_rgbw_color: tuple[int, int, int, int] | None = None _attr_rgbww_color: tuple[int, int, int, int, int] | None = None @@ -881,6 +882,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_supported_features: LightEntityFeature = LightEntityFeature(0) _attr_xy_color: tuple[float, float] | None = None + # Deprecated, see https://github.com/home-assistant/core/pull/79591 + _attr_color_temp: Final[int | None] = None + _attr_max_mireds: Final[int] = 500 # = 2000 K + _attr_min_mireds: Final[int] = 153 # = 6535.94 K (~ 6500 K) + __color_mode_reported = False @cached_property @@ -956,32 +962,70 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the rgbww color value [int, int, int, int, int].""" return self._attr_rgbww_color + @final @cached_property def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" + """Return the CT color value in mireds. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_color_temp @property def color_temp_kelvin(self) -> int | None: """Return the CT color value in Kelvin.""" if self._attr_color_temp_kelvin is None and (color_temp := self.color_temp): + report_usage( + "is using mireds for current light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_color_temp_kelvin` or override the kelvin property " + "`color_temp_kelvin` (see " + "https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(color_temp) return self._attr_color_temp_kelvin + @final @cached_property def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" + """Return the coldest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_min_mireds + @final @cached_property def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" + """Return the warmest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_max_mireds @property def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if self._attr_min_color_temp_kelvin is None: + report_usage( + "is using mireds for warmest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_min_color_temp_kelvin` or override the kelvin property " + "`min_color_temp_kelvin`, possibly with default DEFAULT_MIN_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.max_mireds) return self._attr_min_color_temp_kelvin @@ -989,6 +1033,19 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if self._attr_max_color_temp_kelvin is None: + report_usage( + "is using mireds for coldest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_max_color_temp_kelvin` or override the kelvin property " + "`max_color_temp_kelvin`, possibly with default DEFAULT_MAX_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.min_mireds) return self._attr_max_color_temp_kelvin diff --git a/homeassistant/components/light/const.py b/homeassistant/components/light/const.py index 19b8734038e..d27750a950d 100644 --- a/homeassistant/components/light/const.py +++ b/homeassistant/components/light/const.py @@ -66,3 +66,8 @@ COLOR_MODES_COLOR = { ColorMode.RGBWW, ColorMode.XY, } + +# Default to the Philips Hue value that HA has always assumed +# https://developers.meethue.com/documentation/core-concepts +DEFAULT_MIN_KELVIN = 2000 # 500 mireds +DEFAULT_MAX_KELVIN = 6535 # 153 mireds diff --git a/tests/components/light/common.py b/tests/components/light/common.py index d696c7ab8cf..b29ac0c7c89 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -21,6 +21,8 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN, ColorMode, LightEntity, @@ -153,8 +155,8 @@ TURN_ON_ARG_TO_COLOR_MODE = { class MockLight(MockToggleEntity, LightEntity): """Mock light class.""" - _attr_max_color_temp_kelvin = 6500 - _attr_min_color_temp_kelvin = 2000 + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN supported_features = LightEntityFeature(0) brightness = None diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index bf09774073b..713ce553ae6 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -20,6 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, Unauthorized +from homeassistant.helpers import frame from homeassistant.setup import async_setup_component import homeassistant.util.color as color_util @@ -1209,7 +1210,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "hs_color": None, "rgb_color": None, "xy_color": None, - "max_color_temp_kelvin": 6500, + "max_color_temp_kelvin": 6535, "max_mireds": 500, "min_color_temp_kelvin": 2000, "min_mireds": 153, @@ -1842,7 +1843,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert entity1.min_mireds == 153 assert entity1.max_mireds == 500 assert entity1.min_color_temp_kelvin == 2000 - assert entity1.max_color_temp_kelvin == 6500 + assert entity1.max_color_temp_kelvin == 6535 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1855,7 +1856,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert state.attributes["min_mireds"] == 153 assert state.attributes["max_mireds"] == 500 assert state.attributes["min_color_temp_kelvin"] == 2000 - assert state.attributes["max_color_temp_kelvin"] == 6500 + assert state.attributes["max_color_temp_kelvin"] == 6535 state = hass.states.get(entity1.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] @@ -2547,6 +2548,71 @@ def test_report_invalid_color_modes( assert (expected_warning in caplog.text) is warning_expected +@pytest.mark.parametrize( + ("attributes", "expected_warnings", "expected_values"), + [ + ( + { + "_attr_color_temp_kelvin": 4000, + "_attr_min_color_temp_kelvin": 3000, + "_attr_max_color_temp_kelvin": 5000, + }, + {"current": False, "warmest": False, "coldest": False}, + # Just highlighting that the attributes match the + # converted kelvin values, not the mired properties + (3000, 4000, 5000, 200, 250, 333, 153, None, 500), + ), + ( + {"_attr_color_temp": 350, "_attr_min_mireds": 300, "_attr_max_mireds": 400}, + {"current": True, "warmest": True, "coldest": True}, + (2500, 2857, 3333, 300, 350, 400, 300, 350, 400), + ), + ( + {}, + {"current": False, "warmest": True, "coldest": True}, + (2000, None, 6535, 153, None, 500, 153, None, 500), + ), + ], + ids=["with_kelvin", "with_mired_values", "with_mired_defaults"], +) +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +def test_missing_kelvin_property_warnings( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict[str, int | None], + expected_warnings: dict[str, bool], + expected_values: tuple[int, int | None, int], +) -> None: + """Test missing kelvin properties.""" + + class MockLightEntityEntity(light.LightEntity): + _attr_color_mode = light.ColorMode.COLOR_TEMP + _attr_is_on = True + _attr_supported_features = light.LightEntityFeature.EFFECT + _attr_supported_color_modes = {light.ColorMode.COLOR_TEMP} + platform = MockEntityPlatform(hass, platform_name="test") + + entity = MockLightEntityEntity() + for k, v in attributes.items(): + setattr(entity, k, v) + + state = entity._async_calculate_state() + for warning, expected in expected_warnings.items(): + assert ( + f"is using mireds for {warning} light color temperature" in caplog.text + ) is expected, f"Expected {expected} for '{warning}'" + + assert state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] == expected_values[0] + assert state.attributes[light.ATTR_COLOR_TEMP_KELVIN] == expected_values[1] + assert state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] == expected_values[2] + assert state.attributes[light.ATTR_MIN_MIREDS] == expected_values[3] + assert state.attributes[light.ATTR_COLOR_TEMP] == expected_values[4] + assert state.attributes[light.ATTR_MAX_MIREDS] == expected_values[5] + assert entity.min_mireds == expected_values[6] + assert entity.color_temp == expected_values[7] + assert entity.max_mireds == expected_values[8] + + @pytest.mark.parametrize( "module", [light], From d6c81830a41d4904127725f33a338a80de8839ad Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:42:40 +0100 Subject: [PATCH 0640/1198] Fix missing password for slide_local (#133142) --- homeassistant/components/slide_local/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/slide_local/coordinator.py b/homeassistant/components/slide_local/coordinator.py index c7542a4b813..e5311967198 100644 --- a/homeassistant/components/slide_local/coordinator.py +++ b/homeassistant/components/slide_local/coordinator.py @@ -47,7 +47,7 @@ class SlideCoordinator(DataUpdateCoordinator[dict[str, Any]]): self.api_version = entry.data[CONF_API_VERSION] self.mac = entry.data[CONF_MAC] self.host = entry.data[CONF_HOST] - self.password = entry.data[CONF_PASSWORD] + self.password = entry.data[CONF_PASSWORD] if self.api_version == 1 else "" async def _async_setup(self) -> None: """Do initialization logic for Slide coordinator.""" From 5f91676df07bd4b9ff355564f3018dfc6b99fbe3 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:02:13 +0100 Subject: [PATCH 0641/1198] Bump PyViCare to 2.38.0 (#133126) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 8ce996ab81d..0bb5594e829 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.35.0"] + "requirements": ["PyViCare==2.38.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 219094c0a28..07261f2673f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 46a7d4b29b0..4b39c915e97 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From f03f24f0361e93baa6d68971abff142c3e78ec05 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 16:05:20 +0100 Subject: [PATCH 0642/1198] Velbus test before setup (#133069) * Velbus test before setup * Update homeassistant/components/velbus/__init__.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Add the connect named argument to make it clear we are testing the connection * Correctly cleanup after the test * Sync code for velbusaio 2024.12.2 * follow up * rename connect_task to scan_task --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/velbus/__init__.py | 18 +++++++++++------- .../components/velbus/binary_sensor.py | 2 +- homeassistant/components/velbus/button.py | 2 +- homeassistant/components/velbus/climate.py | 2 +- homeassistant/components/velbus/cover.py | 2 +- homeassistant/components/velbus/light.py | 2 +- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 2 +- homeassistant/components/velbus/sensor.py | 2 +- homeassistant/components/velbus/switch.py | 2 +- 10 files changed, 20 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 6afcc20cc0f..ad1c35a124b 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -9,11 +9,12 @@ import os import shutil from velbusaio.controller import Velbus +from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PORT, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady +from homeassistant.exceptions import ConfigEntryNotReady, PlatformNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.storage import STORAGE_DIR from homeassistant.helpers.typing import ConfigType @@ -44,13 +45,13 @@ class VelbusData: """Runtime data for the Velbus config entry.""" controller: Velbus - connect_task: asyncio.Task + scan_task: asyncio.Task -async def velbus_connect_task( +async def velbus_scan_task( controller: Velbus, hass: HomeAssistant, entry_id: str ) -> None: - """Task to offload the long running connect.""" + """Task to offload the long running scan.""" try: await controller.start() except ConnectionError as ex: @@ -85,10 +86,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) - await controller.connect() + try: + await controller.connect() + except VelbusConnectionFailed as error: + raise ConfigEntryNotReady("Cannot connect to Velbus") from error - task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) - entry.runtime_data = VelbusData(controller=controller, connect_task=task) + task = hass.async_create_task(velbus_scan_task(controller, hass, entry.entry_id)) + entry.runtime_data = VelbusData(controller=controller, scan_task=task) _migrate_device_identifiers(hass, entry.entry_id) diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index dd65ff7d50d..584f28e394a 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -16,7 +16,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusBinarySensor(channel) for channel in entry.runtime_data.controller.get_all_binary_sensor() diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index 2b908c188b8..910ae59b69e 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -22,7 +22,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusButton(channel) for channel in entry.runtime_data.controller.get_all_button() diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index fa8391d4199..e9128ef7de1 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -27,7 +27,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusClimate(channel) for channel in entry.runtime_data.controller.get_all_climate() diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 7850e7b1895..9257dd3f36f 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -24,7 +24,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusCover(channel) for channel in entry.runtime_data.controller.get_all_cover() diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index 0df4f70d753..afe3104aa9a 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -35,7 +35,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task entities: list[Entity] = [ VelbusLight(channel) for channel in entry.runtime_data.controller.get_all_light() diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index ab2df68f973..37e55fee19c 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -25,7 +25,7 @@ rules: has-entity-name: todo runtime-data: done test-before-configure: done - test-before-setup: todo + test-before-setup: done unique-config-entry: status: todo comment: | diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index f0ad509270c..c0a0a5f532d 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -17,7 +17,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus select based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusSelect(channel) for channel in entry.runtime_data.controller.get_all_select() diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index 598287839c1..2c341ea851d 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -22,7 +22,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task entities = [] for channel in entry.runtime_data.controller.get_all_sensor(): entities.append(VelbusSensor(channel)) diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index f3bd009d25e..dccb0a02ffa 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusSwitch(channel) for channel in entry.runtime_data.controller.get_all_switch() From 97da8481d282eea927dcc26fd36a0e75f9c42214 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 13 Dec 2024 16:11:45 +0100 Subject: [PATCH 0643/1198] Add reconfigure flow to MQTT (#132246) * Add reconfigure flow for MQTT integration * Add test and translation strings * Update quality scale configuration * Do not cache ConfigEntry in flow * Make sorce condition explictit * Rework from suggested changes * Do not allow reconfigure_entry and reconfigure_entry_data to be `None` --- homeassistant/components/mqtt/config_flow.py | 34 +++++++++-- .../components/mqtt/quality_scale.yaml | 4 +- homeassistant/components/mqtt/strings.json | 1 + tests/components/mqtt/test_config_flow.py | 56 +++++++++++++++++++ 4 files changed, 88 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 34d43ad87f3..ad3f3d35457 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -18,6 +18,7 @@ import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -469,24 +470,41 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} fields: OrderedDict[Any, Any] = OrderedDict() validated_user_input: dict[str, Any] = {} + broker_config: dict[str, Any] = {} + if is_reconfigure := (self.source == SOURCE_RECONFIGURE): + reconfigure_entry = self._get_reconfigure_entry() if await async_get_broker_settings( self, fields, - None, + reconfigure_entry.data if is_reconfigure else None, user_input, validated_user_input, errors, ): + if is_reconfigure: + broker_config.update( + update_password_from_user_input( + reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input + ), + ) + else: + broker_config = validated_user_input + can_connect = await self.hass.async_add_executor_job( try_connection, - validated_user_input, + broker_config, ) if can_connect: + if is_reconfigure: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=broker_config, + ) validated_user_input[CONF_DISCOVERY] = DEFAULT_DISCOVERY return self.async_create_entry( - title=validated_user_input[CONF_BROKER], - data=validated_user_input, + title=broker_config[CONF_BROKER], + data=broker_config, ) errors["base"] = "cannot_connect" @@ -495,6 +513,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="broker", data_schema=vol.Schema(fields), errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + return await self.async_step_broker() + async def async_step_hassio( self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: @@ -547,7 +571,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): def __init__(self) -> None: """Initialize MQTT options flow.""" - self.broker_config: dict[str, str | int] = {} + self.broker_config: dict[str, Any] = {} async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the MQTT options.""" diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index d1730d8d2fe..f31d3e25d15 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -90,9 +90,9 @@ rules: This is not possible because the integrations generates entities based on a user supplied config or discovery. reconfiguration-flow: - status: exempt + status: done comment: > - This integration is reconfigured via options flow. + This integration can also be reconfigured via options flow. dynamic-devices: status: done comment: | diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 4d23007e51b..c062c111487 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -101,6 +101,7 @@ "addon_connection_failed": "Failed to connect to the {addon} add-on. Check the add-on status and try again later.", "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" }, "error": { diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index e99063b088b..fc1221956de 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2216,3 +2216,59 @@ async def test_change_websockets_transport_to_tcp( mqtt.CONF_DISCOVERY: True, mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", } + + +@pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_path", + } + ], +) +async def test_reconfigure_flow_form( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test reconfigure flow.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + result = await hass.config_entries.flow.async_init( + mqtt.DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + "show_advanced_options": True, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: '{"header_1": "custom_header1"}', + mqtt.CONF_WS_PATH: "/some_new_path", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_new_path", + } + await hass.async_block_till_done(wait_background_tasks=True) From 1fbe880c5fac6554128d4d2d4630c984adb8412c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:52:47 +0100 Subject: [PATCH 0644/1198] Deprecate light constants (#132680) * Deprecate light constants * Reference deprecated values in MQTT light * Reference deprecated values in test_recorder * Adjust * Adjust * Add specific test --- homeassistant/components/light/__init__.py | 104 +++++++++++------- .../components/light/reproduce_state.py | 11 +- .../components/mqtt/light/schema_basic.py | 12 +- tests/components/light/test_init.py | 87 ++++++++++++++- tests/components/light/test_recorder.py | 12 +- 5 files changed, 168 insertions(+), 58 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index d4b38b498f3..33bd259469b 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -186,16 +186,26 @@ ATTR_RGBW_COLOR = "rgbw_color" ATTR_RGBWW_COLOR = "rgbww_color" ATTR_XY_COLOR = "xy_color" ATTR_HS_COLOR = "hs_color" -ATTR_COLOR_TEMP = "color_temp" # Deprecated in HA Core 2022.11 -ATTR_KELVIN = "kelvin" # Deprecated in HA Core 2022.11 -ATTR_MIN_MIREDS = "min_mireds" # Deprecated in HA Core 2022.11 -ATTR_MAX_MIREDS = "max_mireds" # Deprecated in HA Core 2022.11 ATTR_COLOR_TEMP_KELVIN = "color_temp_kelvin" ATTR_MIN_COLOR_TEMP_KELVIN = "min_color_temp_kelvin" ATTR_MAX_COLOR_TEMP_KELVIN = "max_color_temp_kelvin" ATTR_COLOR_NAME = "color_name" ATTR_WHITE = "white" +# Deprecated in HA Core 2022.11 +_DEPRECATED_ATTR_COLOR_TEMP: Final = DeprecatedConstant( + "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_KELVIN: Final = DeprecatedConstant( + "kelvin", "ATTR_COLOR_TEMP_KELVIN", "2026.1" +) +_DEPRECATED_ATTR_MIN_MIREDS: Final = DeprecatedConstant( + "min_mireds", "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_MAX_MIREDS: Final = DeprecatedConstant( + "max_mireds", "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", "2026.1" +) + # Brightness of the light, 0..255 or percentage ATTR_BRIGHTNESS = "brightness" ATTR_BRIGHTNESS_PCT = "brightness_pct" @@ -240,11 +250,11 @@ LIGHT_TURN_ON_SCHEMA: VolDictType = { vol.Exclusive(ATTR_BRIGHTNESS_STEP, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP, vol.Exclusive(ATTR_BRIGHTNESS_STEP_PCT, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP_PCT, vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string, - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All( + vol.Exclusive(_DEPRECATED_ATTR_COLOR_TEMP.value, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1) ), vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): cv.positive_int, - vol.Exclusive(ATTR_KELVIN, COLOR_GROUP): cv.positive_int, + vol.Exclusive(_DEPRECATED_ATTR_KELVIN.value, COLOR_GROUP): cv.positive_int, vol.Exclusive(ATTR_HS_COLOR, COLOR_GROUP): vol.All( vol.Coerce(tuple), vol.ExactSequence( @@ -307,19 +317,29 @@ def preprocess_turn_on_alternatives( _LOGGER.warning("Got unknown color %s, falling back to white", color_name) params[ATTR_RGB_COLOR] = (255, 255, 255) - if (mired := params.pop(ATTR_COLOR_TEMP, None)) is not None: + if (mired := params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None)) is not None: + _LOGGER.warning( + "Got `color_temp` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) kelvin = color_util.color_temperature_mired_to_kelvin(mired) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) - if (kelvin := params.pop(ATTR_KELVIN, None)) is not None: + if (kelvin := params.pop(_DEPRECATED_ATTR_KELVIN.value, None)) is not None: + _LOGGER.warning( + "Got `kelvin` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) if (kelvin := params.pop(ATTR_COLOR_TEMP_KELVIN, None)) is not None: mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) brightness_pct = params.pop(ATTR_BRIGHTNESS_PCT, None) @@ -361,7 +381,7 @@ def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[st if not brightness_supported(supported_color_modes): params.pop(ATTR_BRIGHTNESS, None) if ColorMode.COLOR_TEMP not in supported_color_modes: - params.pop(ATTR_COLOR_TEMP, None) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None) params.pop(ATTR_COLOR_TEMP_KELVIN, None) if ColorMode.HS not in supported_color_modes: params.pop(ATTR_HS_COLOR, None) @@ -443,7 +463,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: and ColorMode.COLOR_TEMP not in supported_color_modes and ColorMode.RGBWW in supported_color_modes ): - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) brightness = params.get(ATTR_BRIGHTNESS, light.brightness) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( @@ -453,7 +473,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: light.max_color_temp_kelvin, ) elif ColorMode.COLOR_TEMP not in legacy_supported_color_modes: - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) if color_supported(legacy_supported_color_modes): params[ATTR_HS_COLOR] = color_util.color_temperature_to_hs( @@ -500,8 +520,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGB_COLOR in params and ColorMode.RGB not in supported_color_modes: rgb_color = params.pop(ATTR_RGB_COLOR) @@ -523,8 +545,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_XY_COLOR in params and ColorMode.XY not in supported_color_modes: xy_color = params.pop(ATTR_XY_COLOR) @@ -544,8 +568,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGBW_COLOR in params and ColorMode.RGBW not in supported_color_modes: rgbw_color = params.pop(ATTR_RGBW_COLOR) @@ -565,8 +591,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ( ATTR_RGBWW_COLOR in params and ColorMode.RGBWW not in supported_color_modes @@ -589,8 +617,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) # If white is set to True, set it to the light's brightness @@ -798,7 +828,7 @@ class Profiles: color_attributes = ( ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_HS_COLOR, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -846,13 +876,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): { ATTR_SUPPORTED_COLOR_MODES, ATTR_EFFECT_LIST, - ATTR_MIN_MIREDS, - ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS.value, + _DEPRECATED_ATTR_MAX_MIREDS.value, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -1072,16 +1102,16 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_MIN_COLOR_TEMP_KELVIN] = min_color_temp_kelvin data[ATTR_MAX_COLOR_TEMP_KELVIN] = max_color_temp_kelvin if not max_color_temp_kelvin: - data[ATTR_MIN_MIREDS] = None + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = None else: - data[ATTR_MIN_MIREDS] = color_util.color_temperature_kelvin_to_mired( - max_color_temp_kelvin + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(max_color_temp_kelvin) ) if not min_color_temp_kelvin: - data[ATTR_MAX_MIREDS] = None + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = None else: - data[ATTR_MAX_MIREDS] = color_util.color_temperature_kelvin_to_mired( - min_color_temp_kelvin + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(min_color_temp_kelvin) ) if LightEntityFeature.EFFECT in supported_features: data[ATTR_EFFECT_LIST] = self.effect_list @@ -1254,14 +1284,14 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): color_temp_kelvin = self.color_temp_kelvin data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin if color_temp_kelvin: - data[ATTR_COLOR_TEMP] = ( + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( color_util.color_temperature_kelvin_to_mired(color_temp_kelvin) ) else: - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None else: data[ATTR_COLOR_TEMP_KELVIN] = None - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None if color_supported(legacy_supported_color_modes) or color_temp_supported( legacy_supported_color_modes diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index a89209eb426..4e994ab791d 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -18,9 +18,9 @@ from homeassistant.core import Context, HomeAssistant, State from homeassistant.util import color as color_util from . import ( + _DEPRECATED_ATTR_COLOR_TEMP, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -41,7 +41,7 @@ ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -129,7 +129,12 @@ async def _async_reproduce_state( if (cm_attr_state := state.attributes.get(cm_attr.state_attr)) is None: if ( color_mode != ColorMode.COLOR_TEMP - or (mireds := state.attributes.get(ATTR_COLOR_TEMP)) is None + or ( + mireds := state.attributes.get( + _DEPRECATED_ATTR_COLOR_TEMP.value + ) + ) + is None ): _LOGGER.warning( "Color mode %s specified but attribute %s missing for: %s", diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 9cc50daa329..635c552f37e 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -9,17 +9,17 @@ from typing import Any, cast import voluptuous as vol from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -115,15 +115,15 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { ATTR_COLOR_MODE, ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MAX_MIREDS.value, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS.value, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 713ce553ae6..303bf68f68c 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -2623,17 +2623,34 @@ def test_all(module: ModuleType) -> None: @pytest.mark.parametrize( - ("constant_name", "constant_value"), - [("SUPPORT_BRIGHTNESS", 1), ("SUPPORT_COLOR_TEMP", 2), ("SUPPORT_COLOR", 16)], + ("constant_name", "constant_value", "constant_replacement"), + [ + ("SUPPORT_BRIGHTNESS", 1, "supported_color_modes"), + ("SUPPORT_COLOR_TEMP", 2, "supported_color_modes"), + ("SUPPORT_COLOR", 16, "supported_color_modes"), + ("ATTR_COLOR_TEMP", "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)"), + ("ATTR_KELVIN", "kelvin", "ATTR_COLOR_TEMP_KELVIN"), + ( + "ATTR_MIN_MIREDS", + "min_mireds", + "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", + ), + ( + "ATTR_MAX_MIREDS", + "max_mireds", + "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", + ), + ], ) -def test_deprecated_support_light_constants( +def test_deprecated_light_constants( caplog: pytest.LogCaptureFixture, constant_name: str, - constant_value: int, + constant_value: int | str, + constant_replacement: str, ) -> None: - """Test deprecated format constants.""" + """Test deprecated light constants.""" import_and_test_deprecated_constant( - caplog, light, constant_name, "supported_color_modes", constant_value, "2026.1" + caplog, light, constant_name, constant_replacement, constant_value, "2026.1" ) @@ -2663,3 +2680,61 @@ def test_deprecated_color_mode_constants_enums( import_and_test_deprecated_constant_enum( caplog, light, entity_feature, "COLOR_MODE_", "2026.1" ) + + +async def test_deprecated_turn_on_arguments( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test color temp conversion in service calls.""" + entity = MockLight("Test_ct", STATE_ON, {light.ColorMode.COLOR_TEMP}) + setup_test_component_platform(hass, light.DOMAIN, [entity]) + + assert await async_setup_component( + hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} + ) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp": 200, + }, + blocking=True, + ) + assert "Got `color_temp` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "kelvin": 5000, + }, + blocking=True, + ) + assert "Got `kelvin` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp_kelvin": 5000, + }, + blocking=True, + ) + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + assert "argument in `turn_on` service" not in caplog.text diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index f3f87ff6074..d53ece61170 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,17 +9,17 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -66,8 +66,8 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: - assert ATTR_MIN_MIREDS not in state.attributes - assert ATTR_MAX_MIREDS not in state.attributes + assert _DEPRECATED_ATTR_MIN_MIREDS.value not in state.attributes + assert _DEPRECATED_ATTR_MAX_MIREDS.value not in state.attributes assert ATTR_SUPPORTED_COLOR_MODES not in state.attributes assert ATTR_EFFECT_LIST not in state.attributes assert ATTR_FRIENDLY_NAME in state.attributes @@ -75,7 +75,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_BRIGHTNESS not in state.attributes assert ATTR_COLOR_MODE not in state.attributes - assert ATTR_COLOR_TEMP not in state.attributes + assert _DEPRECATED_ATTR_COLOR_TEMP.value not in state.attributes assert ATTR_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_EFFECT not in state.attributes assert ATTR_HS_COLOR not in state.attributes From a812b594aac3f274b9ba660b7d778e62d8b9d389 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 16:55:30 +0100 Subject: [PATCH 0645/1198] Fix Tailwind config entry typing in async_unload_entry signature (#133153) --- homeassistant/components/tailwind/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/tailwind/__init__.py b/homeassistant/components/tailwind/__init__.py index c48f5344763..b191d78f2a6 100644 --- a/homeassistant/components/tailwind/__init__.py +++ b/homeassistant/components/tailwind/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -38,6 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> bool: """Unload Tailwind config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) From 8b6495f456bf60252a9444d75db89efe5b50b781 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 13 Dec 2024 19:06:44 +0100 Subject: [PATCH 0646/1198] Bump ruff to 0.8.3 (#133163) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d65225f512..6ecae762dcd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.8.3 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index aa04dbeb6d0..dcddf267eb4 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.8.2 +ruff==0.8.3 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index a4f33c3ad40..369beb538ed 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.2 \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From e13fa8346a481fcf452ec89ff7d9d8fc6eb59b61 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 20:15:05 +0100 Subject: [PATCH 0647/1198] Update debugpy to 1.8.11 (#133169) --- homeassistant/components/debugpy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index c6e7f79be49..078af8c67a5 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.8"] + "requirements": ["debugpy==1.8.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index 07261f2673f..3fab70ecab3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -730,7 +730,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.8 +debugpy==1.8.11 # homeassistant.components.decora_wifi # decora-wifi==1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4b39c915e97..06fd689a0ff 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -626,7 +626,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.8 +debugpy==1.8.11 # homeassistant.components.ecovacs deebot-client==9.4.0 From 50b897bdaa780ed11a7b947ec898531584195b12 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 13 Dec 2024 13:59:46 -0600 Subject: [PATCH 0648/1198] Add STT error code for cloud authentication failure (#133170) --- .../components/assist_pipeline/pipeline.py | 6 +++ .../assist_pipeline/snapshots/test_init.ambr | 36 ++++++++++++++++ tests/components/assist_pipeline/test_init.py | 41 +++++++++++++++++++ 3 files changed, 83 insertions(+) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index f8f6be3a40f..7dda24c4023 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -16,6 +16,7 @@ import time from typing import Any, Literal, cast import wave +import hass_nabucasa import voluptuous as vol from homeassistant.components import ( @@ -918,6 +919,11 @@ class PipelineRun: ) except (asyncio.CancelledError, TimeoutError): raise # expected + except hass_nabucasa.auth.Unauthenticated as src_error: + raise SpeechToTextError( + code="cloud-auth-failed", + message="Home Assistant Cloud authentication failed", + ) from src_error except Exception as src_error: _LOGGER.exception("Unexpected error during speech-to-text") raise SpeechToTextError( diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index d3241b8ac1f..f63a28efbb7 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -387,6 +387,42 @@ }), ]) # --- +# name: test_pipeline_from_audio_stream_with_cloud_auth_fail + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'engine': 'stt.mock_stt', + 'metadata': dict({ + 'bit_rate': , + 'channel': , + 'codec': , + 'format': , + 'language': 'en-US', + 'sample_rate': , + }), + }), + 'type': , + }), + dict({ + 'data': dict({ + 'code': 'cloud-auth-failed', + 'message': 'Home Assistant Cloud authentication failed', + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_pipeline_language_used_instead_of_conversation_language list([ dict({ diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index a3e65766c34..d4cce4e2e98 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -8,6 +8,7 @@ import tempfile from unittest.mock import ANY, patch import wave +import hass_nabucasa import pytest from syrupy.assertion import SnapshotAssertion @@ -1173,3 +1174,43 @@ async def test_pipeline_language_used_instead_of_conversation_language( mock_async_converse.call_args_list[0].kwargs.get("language") == pipeline.language ) + + +async def test_pipeline_from_audio_stream_with_cloud_auth_fail( + hass: HomeAssistant, + mock_stt_provider_entity: MockSTTProviderEntity, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test creating a pipeline from an audio stream but the cloud authentication fails.""" + + events: list[assist_pipeline.PipelineEvent] = [] + + async def audio_data(): + yield b"audio" + + with patch.object( + mock_stt_provider_entity, + "async_process_audio_stream", + side_effect=hass_nabucasa.auth.Unauthenticated, + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) + + assert process_events(events) == snapshot + assert len(events) == 4 # run start, stt start, error, run end + assert events[2].type == assist_pipeline.PipelineEventType.ERROR + assert events[2].data["code"] == "cloud-auth-failed" From f06fda80234a8ac429dc4216ee4ddd7758d71e96 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 13 Dec 2024 14:19:43 -0600 Subject: [PATCH 0649/1198] Add response slot to HassRespond intent (#133162) --- homeassistant/components/intent/__init__.py | 16 +++++++++++++--- tests/components/intent/test_init.py | 11 +++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 1ffb8747d91..71ef40ad369 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -139,7 +139,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, TimerStatusIntentHandler()) intent.async_register(hass, GetCurrentDateIntentHandler()) intent.async_register(hass, GetCurrentTimeIntentHandler()) - intent.async_register(hass, HelloIntentHandler()) + intent.async_register(hass, RespondIntentHandler()) return True @@ -423,15 +423,25 @@ class GetCurrentTimeIntentHandler(intent.IntentHandler): return response -class HelloIntentHandler(intent.IntentHandler): +class RespondIntentHandler(intent.IntentHandler): """Responds with no action.""" intent_type = intent.INTENT_RESPOND description = "Returns the provided response with no action." + slot_schema = { + vol.Optional("response"): cv.string, + } + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: """Return the provided response, but take no action.""" - return intent_obj.create_response() + slots = self.async_validate_slots(intent_obj.slots) + response = intent_obj.create_response() + + if "response" in slots: + response.async_set_speech(slots["response"]["value"]) + + return response async def _async_process_intent( diff --git a/tests/components/intent/test_init.py b/tests/components/intent/test_init.py index 20c0f9d8d44..0db9682d0ad 100644 --- a/tests/components/intent/test_init.py +++ b/tests/components/intent/test_init.py @@ -466,3 +466,14 @@ async def test_intents_with_no_responses(hass: HomeAssistant) -> None: for intent_name in (intent.INTENT_NEVERMIND, intent.INTENT_RESPOND): response = await intent.async_handle(hass, "test", intent_name, {}) assert not response.speech + + +async def test_intents_respond_intent(hass: HomeAssistant) -> None: + """Test HassRespond intent with a response slot value.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + + response = await intent.async_handle( + hass, "test", intent.INTENT_RESPOND, {"response": {"value": "Hello World"}} + ) + assert response.speech["plain"]["speech"] == "Hello World" From 0c8db8c8d6e0049cdf830fd176ed1c07c8a78712 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 13 Dec 2024 22:29:18 +0100 Subject: [PATCH 0650/1198] Add eheimdigital integration (#126757) Co-authored-by: Franck Nijhof --- .strict-typing | 1 + CODEOWNERS | 2 + .../components/eheimdigital/__init__.py | 51 +++ .../components/eheimdigital/config_flow.py | 127 +++++++ .../components/eheimdigital/const.py | 17 + .../components/eheimdigital/coordinator.py | 78 +++++ .../components/eheimdigital/entity.py | 53 +++ .../components/eheimdigital/light.py | 127 +++++++ .../components/eheimdigital/manifest.json | 15 + .../eheimdigital/quality_scale.yaml | 70 ++++ .../components/eheimdigital/strings.json | 39 +++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + homeassistant/generated/zeroconf.py | 4 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/eheimdigital/__init__.py | 1 + tests/components/eheimdigital/conftest.py | 58 ++++ .../eheimdigital/snapshots/test_light.ambr | 316 ++++++++++++++++++ .../eheimdigital/test_config_flow.py | 212 ++++++++++++ tests/components/eheimdigital/test_init.py | 55 +++ tests/components/eheimdigital/test_light.py | 249 ++++++++++++++ 23 files changed, 1498 insertions(+) create mode 100644 homeassistant/components/eheimdigital/__init__.py create mode 100644 homeassistant/components/eheimdigital/config_flow.py create mode 100644 homeassistant/components/eheimdigital/const.py create mode 100644 homeassistant/components/eheimdigital/coordinator.py create mode 100644 homeassistant/components/eheimdigital/entity.py create mode 100644 homeassistant/components/eheimdigital/light.py create mode 100644 homeassistant/components/eheimdigital/manifest.json create mode 100644 homeassistant/components/eheimdigital/quality_scale.yaml create mode 100644 homeassistant/components/eheimdigital/strings.json create mode 100644 tests/components/eheimdigital/__init__.py create mode 100644 tests/components/eheimdigital/conftest.py create mode 100644 tests/components/eheimdigital/snapshots/test_light.ambr create mode 100644 tests/components/eheimdigital/test_config_flow.py create mode 100644 tests/components/eheimdigital/test_init.py create mode 100644 tests/components/eheimdigital/test_light.py diff --git a/.strict-typing b/.strict-typing index ade5d6afb7b..66dae130fb5 100644 --- a/.strict-typing +++ b/.strict-typing @@ -170,6 +170,7 @@ homeassistant.components.easyenergy.* homeassistant.components.ecovacs.* homeassistant.components.ecowitt.* homeassistant.components.efergy.* +homeassistant.components.eheimdigital.* homeassistant.components.electrasmart.* homeassistant.components.electric_kiwi.* homeassistant.components.elevenlabs.* diff --git a/CODEOWNERS b/CODEOWNERS index afd150ffb0c..06eb70c7576 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -387,6 +387,8 @@ build.json @home-assistant/supervisor /homeassistant/components/efergy/ @tkdrob /tests/components/efergy/ @tkdrob /homeassistant/components/egardia/ @jeroenterheerdt +/homeassistant/components/eheimdigital/ @autinerd +/tests/components/eheimdigital/ @autinerd /homeassistant/components/electrasmart/ @jafar-atili /tests/components/electrasmart/ @jafar-atili /homeassistant/components/electric_kiwi/ @mikey0000 diff --git a/homeassistant/components/eheimdigital/__init__.py b/homeassistant/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..cf08f45bed5 --- /dev/null +++ b/homeassistant/components/eheimdigital/__init__.py @@ -0,0 +1,51 @@ +"""The EHEIM Digital integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntry + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + +PLATFORMS = [Platform.LIGHT] + +type EheimDigitalConfigEntry = ConfigEntry[EheimDigitalUpdateCoordinator] + + +async def async_setup_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Set up EHEIM Digital from a config entry.""" + + coordinator = EheimDigitalUpdateCoordinator(hass) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Unload a config entry.""" + await entry.runtime_data.hub.close() + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_remove_config_entry_device( + hass: HomeAssistant, + config_entry: EheimDigitalConfigEntry, + device_entry: DeviceEntry, +) -> bool: + """Remove a config entry from a device.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and identifier[1] in config_entry.runtime_data.hub.devices + ) diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py new file mode 100644 index 00000000000..6994c6f65b5 --- /dev/null +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -0,0 +1,127 @@ +"""Config flow for EHEIM Digital.""" + +from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers import selector +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN, LOGGER + +CONFIG_SCHEMA = vol.Schema( + {vol.Required(CONF_HOST, default="eheimdigital.local"): selector.TextSelector()} +) + + +class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): + """The EHEIM Digital config flow.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + super().__init__() + self.data: dict[str, Any] = {} + self.main_device_added_event = asyncio.Event() + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self.data[CONF_HOST] = host = discovery_info.host + + self._async_abort_entries_match(self.data) + + hub = EheimDigitalHub( + host=host, + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await hub.close() + except (ClientError, TimeoutError): + return self.async_abort(reason="cannot_connect") + except Exception: # noqa: BLE001 + return self.async_abort(reason="unknown") + await self.async_set_unique_id(hub.main.mac_address) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self.data[CONF_HOST], + data={CONF_HOST: self.data[CONF_HOST]}, + ) + + self._set_confirm_only() + return self.async_show_form(step_id="discovery_confirm") + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + if user_input is None: + return self.async_show_form(step_id=SOURCE_USER, data_schema=CONFIG_SCHEMA) + + self._async_abort_entries_match(user_input) + errors: dict[str, str] = {} + hub = EheimDigitalHub( + host=user_input[CONF_HOST], + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await self.async_set_unique_id( + hub.main.mac_address, raise_on_progress=False + ) + await hub.close() + except (ClientError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + LOGGER.exception("Unknown exception occurred") + else: + self._abort_if_unique_id_configured() + return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) + return self.async_show_form( + step_id=SOURCE_USER, + data_schema=CONFIG_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/eheimdigital/const.py b/homeassistant/components/eheimdigital/const.py new file mode 100644 index 00000000000..5ed9303be40 --- /dev/null +++ b/homeassistant/components/eheimdigital/const.py @@ -0,0 +1,17 @@ +"""Constants for the EHEIM Digital integration.""" + +from logging import Logger, getLogger + +from eheimdigital.types import LightMode + +from homeassistant.components.light import EFFECT_OFF + +LOGGER: Logger = getLogger(__package__) +DOMAIN = "eheimdigital" + +EFFECT_DAYCL_MODE = "daycl_mode" + +EFFECT_TO_LIGHT_MODE = { + EFFECT_DAYCL_MODE: LightMode.DAYCL_MODE, + EFFECT_OFF: LightMode.MAN_MODE, +} diff --git a/homeassistant/components/eheimdigital/coordinator.py b/homeassistant/components/eheimdigital/coordinator.py new file mode 100644 index 00000000000..f122a1227c5 --- /dev/null +++ b/homeassistant/components/eheimdigital/coordinator.py @@ -0,0 +1,78 @@ +"""Data update coordinator for the EHEIM Digital integration.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +type AsyncSetupDeviceEntitiesCallback = Callable[[str], Coroutine[Any, Any, None]] + + +class EheimDigitalUpdateCoordinator( + DataUpdateCoordinator[dict[str, EheimDigitalDevice]] +): + """The EHEIM Digital data update coordinator.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the EHEIM Digital data update coordinator.""" + super().__init__( + hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL + ) + self.hub = EheimDigitalHub( + host=self.config_entry.data[CONF_HOST], + session=async_get_clientsession(hass), + loop=hass.loop, + receive_callback=self._async_receive_callback, + device_found_callback=self._async_device_found, + ) + self.known_devices: set[str] = set() + self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set() + + def add_platform_callback( + self, + async_setup_device_entities: AsyncSetupDeviceEntitiesCallback, + ) -> None: + """Add the setup callbacks from a specific platform.""" + self.platform_callbacks.add(async_setup_device_entities) + + async def _async_device_found( + self, device_address: str, device_type: EheimDeviceType + ) -> None: + """Set up a new device found. + + This function is called from the library whenever a new device is added. + """ + + if device_address not in self.known_devices: + for platform_callback in self.platform_callbacks: + await platform_callback(device_address) + + async def _async_receive_callback(self) -> None: + self.async_set_updated_data(self.hub.devices) + + async def _async_setup(self) -> None: + await self.hub.connect() + await self.hub.update() + + async def _async_update_data(self) -> dict[str, EheimDigitalDevice]: + try: + await self.hub.update() + except ClientError as ex: + raise UpdateFailed from ex + return self.data diff --git a/homeassistant/components/eheimdigital/entity.py b/homeassistant/components/eheimdigital/entity.py new file mode 100644 index 00000000000..c0f91a4b798 --- /dev/null +++ b/homeassistant/components/eheimdigital/entity.py @@ -0,0 +1,53 @@ +"""Base entity for EHEIM Digital.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from eheimdigital.device import EheimDigitalDevice + +from homeassistant.const import CONF_HOST +from homeassistant.core import callback +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + + +class EheimDigitalEntity[_DeviceT: EheimDigitalDevice]( + CoordinatorEntity[EheimDigitalUpdateCoordinator], ABC +): + """Represent a EHEIM Digital entity.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: EheimDigitalUpdateCoordinator, device: _DeviceT + ) -> None: + """Initialize a EHEIM Digital entity.""" + super().__init__(coordinator) + if TYPE_CHECKING: + # At this point at least one device is found and so there is always a main device set + assert isinstance(coordinator.hub.main, EheimDigitalDevice) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", + name=device.name, + connections={(CONNECTION_NETWORK_MAC, device.mac_address)}, + manufacturer="EHEIM", + model=device.device_type.model_name, + identifiers={(DOMAIN, device.mac_address)}, + suggested_area=device.aquarium_name, + sw_version=device.sw_version, + via_device=(DOMAIN, coordinator.hub.main.mac_address), + ) + self._device = device + self._device_address = device.mac_address + + @abstractmethod + def _async_update_attrs(self) -> None: ... + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() diff --git a/homeassistant/components/eheimdigital/light.py b/homeassistant/components/eheimdigital/light.py new file mode 100644 index 00000000000..a119e0bda8d --- /dev/null +++ b/homeassistant/components/eheimdigital/light.py @@ -0,0 +1,127 @@ +"""EHEIM Digital lights.""" + +from typing import Any + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.types import EheimDigitalClientError, LightMode + +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + EFFECT_OFF, + ColorMode, + LightEntity, + LightEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.color import brightness_to_value, value_to_brightness + +from . import EheimDigitalConfigEntry +from .const import EFFECT_DAYCL_MODE, EFFECT_TO_LIGHT_MODE +from .coordinator import EheimDigitalUpdateCoordinator +from .entity import EheimDigitalEntity + +BRIGHTNESS_SCALE = (1, 100) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: EheimDigitalConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the callbacks for the coordinator so lights can be added as devices are found.""" + coordinator = entry.runtime_data + + async def async_setup_device_entities(device_address: str) -> None: + """Set up the light entities for a device.""" + device = coordinator.hub.devices[device_address] + entities: list[EheimDigitalClassicLEDControlLight] = [] + + if isinstance(device, EheimDigitalClassicLEDControl): + for channel in range(2): + if len(device.tankconfig[channel]) > 0: + entities.append( + EheimDigitalClassicLEDControlLight(coordinator, device, channel) + ) + coordinator.known_devices.add(device.mac_address) + async_add_entities(entities) + + coordinator.add_platform_callback(async_setup_device_entities) + + for device_address in entry.runtime_data.hub.devices: + await async_setup_device_entities(device_address) + + +class EheimDigitalClassicLEDControlLight( + EheimDigitalEntity[EheimDigitalClassicLEDControl], LightEntity +): + """Represent a EHEIM Digital classicLEDcontrol light.""" + + _attr_supported_color_modes = {ColorMode.BRIGHTNESS} + _attr_color_mode = ColorMode.BRIGHTNESS + _attr_effect_list = [EFFECT_DAYCL_MODE] + _attr_supported_features = LightEntityFeature.EFFECT + _attr_translation_key = "channel" + + def __init__( + self, + coordinator: EheimDigitalUpdateCoordinator, + device: EheimDigitalClassicLEDControl, + channel: int, + ) -> None: + """Initialize an EHEIM Digital classicLEDcontrol light entity.""" + super().__init__(coordinator, device) + self._channel = channel + self._attr_translation_placeholders = {"channel_id": str(channel)} + self._attr_unique_id = f"{self._device_address}_{channel}" + self._async_update_attrs() + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return super().available and self._device.light_level[self._channel] is not None + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + if ATTR_EFFECT in kwargs: + await self._device.set_light_mode(EFFECT_TO_LIGHT_MODE[kwargs[ATTR_EFFECT]]) + return + if ATTR_BRIGHTNESS in kwargs: + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_on( + int(brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])), + self._channel, + ) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the light.""" + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_off(self._channel) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + def _async_update_attrs(self) -> None: + light_level = self._device.light_level[self._channel] + + self._attr_is_on = light_level > 0 if light_level is not None else None + self._attr_brightness = ( + value_to_brightness(BRIGHTNESS_SCALE, light_level) + if light_level is not None + else None + ) + self._attr_effect = ( + EFFECT_DAYCL_MODE + if self._device.light_mode == LightMode.DAYCL_MODE + else EFFECT_OFF + ) diff --git a/homeassistant/components/eheimdigital/manifest.json b/homeassistant/components/eheimdigital/manifest.json new file mode 100644 index 00000000000..159aecd6b6c --- /dev/null +++ b/homeassistant/components/eheimdigital/manifest.json @@ -0,0 +1,15 @@ +{ + "domain": "eheimdigital", + "name": "EHEIM Digital", + "codeowners": ["@autinerd"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/eheimdigital", + "integration_type": "hub", + "iot_class": "local_polling", + "loggers": ["eheimdigital"], + "quality_scale": "bronze", + "requirements": ["eheimdigital==1.0.3"], + "zeroconf": [ + { "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." } + ] +} diff --git a/homeassistant/components/eheimdigital/quality_scale.yaml b/homeassistant/components/eheimdigital/quality_scale.yaml new file mode 100644 index 00000000000..a56551a14f6 --- /dev/null +++ b/homeassistant/components/eheimdigital/quality_scale.yaml @@ -0,0 +1,70 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: No service actions implemented. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: This integration doesn't have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: This integration requires no authentication. + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/eheimdigital/strings.json b/homeassistant/components/eheimdigital/strings.json new file mode 100644 index 00000000000..0e6fa6a0814 --- /dev/null +++ b/homeassistant/components/eheimdigital/strings.json @@ -0,0 +1,39 @@ +{ + "config": { + "step": { + "discovery_confirm": { + "description": "[%key:common::config_flow::description::confirm_setup%]" + }, + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The host or IP address of your main device. Only needed to change if 'eheimdigital' doesn't work." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "entity": { + "light": { + "channel": { + "name": "Channel {channel_id}", + "state_attributes": { + "effect": { + "state": { + "daycl_mode": "Daycycle mode" + } + } + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 930bda4e81b..3b33d31a2a2 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -155,6 +155,7 @@ FLOWS = { "ecowitt", "edl21", "efergy", + "eheimdigital", "electrasmart", "electric_kiwi", "elevenlabs", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index ecbe3f0dcbf..1530e308e7d 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1524,6 +1524,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "eheimdigital": { + "name": "EHEIM Digital", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "electrasmart": { "name": "Electra Smart", "integration_type": "hub", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index b04e6ad6f52..e5b50841d11 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -524,6 +524,10 @@ ZEROCONF = { "domain": "bosch_shc", "name": "bosch shc*", }, + { + "domain": "eheimdigital", + "name": "eheimdigital._http._tcp.local.", + }, { "domain": "lektrico", "name": "lektrico*", diff --git a/mypy.ini b/mypy.ini index 2d8e0ea3f61..6daf54a8eb7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1455,6 +1455,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.eheimdigital.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.electrasmart.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 3fab70ecab3..7eab703836c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -809,6 +809,9 @@ ebusdpy==0.0.17 # homeassistant.components.ecoal_boiler ecoaliface==0.4.0 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 06fd689a0ff..2a785e363f7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -687,6 +687,9 @@ eagle100==0.1.1 # homeassistant.components.easyenergy easyenergy==2.1.2 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 diff --git a/tests/components/eheimdigital/__init__.py b/tests/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..1f608f868de --- /dev/null +++ b/tests/components/eheimdigital/__init__.py @@ -0,0 +1 @@ +"""Tests for the EHEIM Digital integration.""" diff --git a/tests/components/eheimdigital/conftest.py b/tests/components/eheimdigital/conftest.py new file mode 100644 index 00000000000..cdad628de6b --- /dev/null +++ b/tests/components/eheimdigital/conftest.py @@ -0,0 +1,58 @@ +"""Configurations for the EHEIM Digital tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType, LightMode +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "eheimdigital"}, unique_id="00:00:00:00:00:01" + ) + + +@pytest.fixture +def classic_led_ctrl_mock(): + """Mock a classicLEDcontrol device.""" + classic_led_ctrl_mock = MagicMock(spec=EheimDigitalClassicLEDControl) + classic_led_ctrl_mock.tankconfig = [["CLASSIC_DAYLIGHT"], []] + classic_led_ctrl_mock.mac_address = "00:00:00:00:00:01" + classic_led_ctrl_mock.device_type = ( + EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + classic_led_ctrl_mock.name = "Mock classicLEDcontrol+e" + classic_led_ctrl_mock.aquarium_name = "Mock Aquarium" + classic_led_ctrl_mock.light_mode = LightMode.DAYCL_MODE + classic_led_ctrl_mock.light_level = (10, 39) + return classic_led_ctrl_mock + + +@pytest.fixture +def eheimdigital_hub_mock(classic_led_ctrl_mock: MagicMock) -> Generator[AsyncMock]: + """Mock eheimdigital hub.""" + with ( + patch( + "homeassistant.components.eheimdigital.coordinator.EheimDigitalHub", + spec=EheimDigitalHub, + ) as eheimdigital_hub_mock, + patch( + "homeassistant.components.eheimdigital.config_flow.EheimDigitalHub", + new=eheimdigital_hub_mock, + ), + ): + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + eheimdigital_hub_mock.return_value.main = classic_led_ctrl_mock + yield eheimdigital_hub_mock diff --git a/tests/components/eheimdigital/snapshots/test_light.ambr b/tests/components/eheimdigital/snapshots/test_light.ambr new file mode 100644 index 00000000000..8df4745997e --- /dev/null +++ b/tests/components/eheimdigital/snapshots/test_light.ambr @@ -0,0 +1,316 @@ +# serializer version: 1 +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/eheimdigital/test_config_flow.py b/tests/components/eheimdigital/test_config_flow.py new file mode 100644 index 00000000000..e75cf31eb98 --- /dev/null +++ b/tests/components/eheimdigital/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests the config flow of EHEIM Digital.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohttp import ClientConnectionError +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address("192.0.2.1"), + ip_addresses=[ip_address("192.0.2.1")], + hostname="eheimdigital.local.", + name="eheimdigital._http._tcp.local.", + port=80, + type="_http._tcp.local.", + properties={}, +) + +USER_INPUT = {CONF_HOST: "eheimdigital"} + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_full_flow(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +async def test_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: AsyncMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_value} + + eheimdigital_hub_mock.return_value.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow( + hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == ZEROCONF_DISCOVERY.host + assert result["data"] == { + CONF_HOST: ZEROCONF_DISCOVERY.host, + } + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test zeroconf flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error_value + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_abort(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test flow abort on matching data or unique_id.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + result3 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {CONF_HOST: "eheimdigital2"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" diff --git a/tests/components/eheimdigital/test_init.py b/tests/components/eheimdigital/test_init.py new file mode 100644 index 00000000000..211a8b3b6fd --- /dev/null +++ b/tests/components/eheimdigital/test_init.py @@ -0,0 +1,55 @@ +"""Tests for the init module.""" + +from unittest.mock import MagicMock + +from eheimdigital.types import EheimDeviceType + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_remove_device( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test removing a device.""" + assert await async_setup_component(hass, "config", {}) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + mac_address: str = eheimdigital_hub_mock.return_value.main.mac_address + + device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, mac_address)}, + ) + assert device_entry is not None + + hass_client = await hass_ws_client(hass) + + # Do not allow to delete a connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert not response["success"] + + eheimdigital_hub_mock.return_value.devices = {} + + # Allow to delete a not connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert response["success"] diff --git a/tests/components/eheimdigital/test_light.py b/tests/components/eheimdigital/test_light.py new file mode 100644 index 00000000000..da224979c43 --- /dev/null +++ b/tests/components/eheimdigital/test_light.py @@ -0,0 +1,249 @@ +"""Tests for the light module.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from aiohttp import ClientError +from eheimdigital.types import EheimDeviceType, LightMode +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.eheimdigital.const import EFFECT_DAYCL_MODE +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + DOMAIN as LIGHT_DOMAIN, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.color import value_to_brightness + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.parametrize( + "tankconfig", + [ + [["CLASSIC_DAYLIGHT"], []], + [[], ["CLASSIC_DAYLIGHT"]], + [["CLASSIC_DAYLIGHT"], ["CLASSIC_DAYLIGHT"]], + ], +) +async def test_setup_classic_led_ctrl( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + tankconfig: list[list[str]], + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test light platform setup with different channels.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.tankconfig = tankconfig + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_dynamic_new_devices( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + classic_led_ctrl_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test light platform setup with at first no devices and dynamically adding a device.""" + mock_config_entry.add_to_hass(hass) + + eheimdigital_hub_mock.return_value.devices = {} + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert ( + len( + entity_registry.entities.get_entries_for_config_entry_id( + mock_config_entry.entry_id + ) + ) + == 0 + ) + + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("eheimdigital_hub_mock") +async def test_turn_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning off the light.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await mock_config_entry.runtime_data._async_device_found( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0"}, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_off.assert_awaited_once_with(0) + + +@pytest.mark.parametrize( + ("dim_input", "expected_dim_value"), + [ + (3, 1), + (255, 100), + (128, 50), + ], +) +async def test_turn_on_brightness( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, + dim_input: int, + expected_dim_value: int, +) -> None: + """Test turning on the light with different brightness values.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_BRIGHTNESS: dim_input, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_on.assert_awaited_once_with(expected_dim_value, 0) + + +async def test_turn_on_effect( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning on the light with an effect value.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.light_mode = LightMode.MAN_MODE + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_EFFECT: EFFECT_DAYCL_MODE, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.DAYCL_MODE) + + +async def test_state_update( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test the light state update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + classic_led_ctrl_mock.light_level = (20, 30) + + await eheimdigital_hub_mock.call_args.kwargs["receive_callback"]() + + assert (state := hass.states.get("light.mock_classicledcontrol_e_channel_0")) + assert state.attributes["brightness"] == value_to_brightness((1, 100), 20) + + +async def test_update_failed( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test an failed update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + eheimdigital_hub_mock.return_value.update.side_effect = ClientError + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("light.mock_classicledcontrol_e_channel_0").state + == STATE_UNAVAILABLE + ) From 1aabbec3dddaa3cc178a71d2957f478389f57cda Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 13 Dec 2024 16:37:26 -0500 Subject: [PATCH 0651/1198] Bump yalexs-ble to 2.5.4 (#133172) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 99dbbc0ed9c..ed2c8007ee8 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 474ed36e90c..2ed1f4b5c43 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 95d28cd5372..1472f9035ea 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.2"] + "requirements": ["yalexs-ble==2.5.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7eab703836c..4ce1c523171 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3055,7 +3055,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2a785e363f7..0f9d94e2272 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2450,7 +2450,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale From 165ca5140c408927cdeb14eeab44a20845dddffe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 13 Dec 2024 21:05:41 -0500 Subject: [PATCH 0652/1198] Bump uiprotect to 7.0.2 (#132975) --- .../components/unifiprotect/manifest.json | 2 +- .../components/unifiprotect/services.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/unifiprotect/conftest.py | 58 ++++++++++--------- .../unifiprotect/test_binary_sensor.py | 20 +++---- tests/components/unifiprotect/test_camera.py | 30 +++++----- tests/components/unifiprotect/test_event.py | 12 ++-- tests/components/unifiprotect/test_init.py | 2 +- tests/components/unifiprotect/test_light.py | 6 +- tests/components/unifiprotect/test_lock.py | 16 ++--- .../unifiprotect/test_media_player.py | 30 ++++++---- .../unifiprotect/test_media_source.py | 8 +-- tests/components/unifiprotect/test_number.py | 12 ++-- .../components/unifiprotect/test_recorder.py | 2 +- tests/components/unifiprotect/test_select.py | 20 +++---- tests/components/unifiprotect/test_sensor.py | 10 ++-- .../components/unifiprotect/test_services.py | 24 +++++--- tests/components/unifiprotect/test_switch.py | 18 +++--- tests/components/unifiprotect/test_text.py | 2 +- 20 files changed, 152 insertions(+), 126 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 9e8a0ea6c21..81ef72ec50d 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.8.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.0.2", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index fc438240839..35713efdf3d 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from typing import Any, cast -from pydantic.v1 import ValidationError +from pydantic import ValidationError from uiprotect.api import ProtectApiClient from uiprotect.data import Camera, Chime from uiprotect.exceptions import ClientError diff --git a/requirements_all.txt b/requirements_all.txt index 4ce1c523171..1e271ff1d57 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2905,7 +2905,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.8.0 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f9d94e2272..95d610361d9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2324,7 +2324,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.8.0 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index fad65c095df..3ed559b71ec 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -51,11 +51,11 @@ def mock_nvr(): nvr = NVR.from_unifi_dict(**data) # disable pydantic validation so mocking can happen - NVR.__config__.validate_assignment = False + NVR.model_config["validate_assignment"] = False yield nvr - NVR.__config__.validate_assignment = True + NVR.model_config["validate_assignment"] = True @pytest.fixture(name="ufp_config_entry") @@ -120,7 +120,11 @@ def mock_ufp_client(bootstrap: Bootstrap): client.base_url = "https://127.0.0.1" client.connection_host = IPv4Address("127.0.0.1") - client.get_nvr = AsyncMock(return_value=nvr) + + async def get_nvr(*args: Any, **kwargs: Any) -> NVR: + return client.bootstrap.nvr + + client.get_nvr = get_nvr client.get_bootstrap = AsyncMock(return_value=bootstrap) client.update = AsyncMock(return_value=bootstrap) client.async_disconnect_ws = AsyncMock() @@ -173,7 +177,7 @@ def camera_fixture(fixed_now: datetime): """Mock UniFi Protect Camera device.""" # disable pydantic validation so mocking can happen - Camera.__config__.validate_assignment = False + Camera.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_camera.json", integration=DOMAIN)) camera = Camera.from_unifi_dict(**data) @@ -181,23 +185,23 @@ def camera_fixture(fixed_now: datetime): yield camera - Camera.__config__.validate_assignment = True + Camera.model_config["validate_assignment"] = True @pytest.fixture(name="camera_all") def camera_all_fixture(camera: Camera): """Mock UniFi Protect Camera device.""" - all_camera = camera.copy() - all_camera.channels = [all_camera.channels[0].copy()] + all_camera = camera.model_copy() + all_camera.channels = [all_camera.channels[0].model_copy()] - medium_channel = all_camera.channels[0].copy() + medium_channel = all_camera.channels[0].model_copy() medium_channel.name = "Medium" medium_channel.id = 1 medium_channel.rtsp_alias = "test_medium_alias" all_camera.channels.append(medium_channel) - low_channel = all_camera.channels[0].copy() + low_channel = all_camera.channels[0].model_copy() low_channel.name = "Low" low_channel.id = 2 low_channel.rtsp_alias = "test_medium_alias" @@ -210,10 +214,10 @@ def camera_all_fixture(camera: Camera): def doorbell_fixture(camera: Camera, fixed_now: datetime): """Mock UniFi Protect Camera device (with chime).""" - doorbell = camera.copy() - doorbell.channels = [c.copy() for c in doorbell.channels] + doorbell = camera.model_copy() + doorbell.channels = [c.model_copy() for c in doorbell.channels] - package_channel = doorbell.channels[0].copy() + package_channel = doorbell.channels[0].model_copy() package_channel.name = "Package Camera" package_channel.id = 3 package_channel.fps = 2 @@ -247,8 +251,8 @@ def doorbell_fixture(camera: Camera, fixed_now: datetime): def unadopted_camera(camera: Camera): """Mock UniFi Protect Camera device (unadopted).""" - no_camera = camera.copy() - no_camera.channels = [c.copy() for c in no_camera.channels] + no_camera = camera.model_copy() + no_camera.channels = [c.model_copy() for c in no_camera.channels] no_camera.name = "Unadopted Camera" no_camera.is_adopted = False return no_camera @@ -259,19 +263,19 @@ def light_fixture(): """Mock UniFi Protect Light device.""" # disable pydantic validation so mocking can happen - Light.__config__.validate_assignment = False + Light.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_light.json", integration=DOMAIN)) yield Light.from_unifi_dict(**data) - Light.__config__.validate_assignment = True + Light.model_config["validate_assignment"] = True @pytest.fixture def unadopted_light(light: Light): """Mock UniFi Protect Light device (unadopted).""" - no_light = light.copy() + no_light = light.model_copy() no_light.name = "Unadopted Light" no_light.is_adopted = False return no_light @@ -282,12 +286,12 @@ def viewer(): """Mock UniFi Protect Viewport device.""" # disable pydantic validation so mocking can happen - Viewer.__config__.validate_assignment = False + Viewer.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_viewport.json", integration=DOMAIN)) yield Viewer.from_unifi_dict(**data) - Viewer.__config__.validate_assignment = True + Viewer.model_config["validate_assignment"] = True @pytest.fixture(name="sensor") @@ -295,7 +299,7 @@ def sensor_fixture(fixed_now: datetime): """Mock UniFi Protect Sensor device.""" # disable pydantic validation so mocking can happen - Sensor.__config__.validate_assignment = False + Sensor.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_sensor.json", integration=DOMAIN)) sensor: Sensor = Sensor.from_unifi_dict(**data) @@ -304,14 +308,14 @@ def sensor_fixture(fixed_now: datetime): sensor.alarm_triggered_at = fixed_now - timedelta(hours=1) yield sensor - Sensor.__config__.validate_assignment = True + Sensor.model_config["validate_assignment"] = True @pytest.fixture(name="sensor_all") def csensor_all_fixture(sensor: Sensor): """Mock UniFi Protect Sensor device.""" - all_sensor = sensor.copy() + all_sensor = sensor.model_copy() all_sensor.light_settings.is_enabled = True all_sensor.humidity_settings.is_enabled = True all_sensor.temperature_settings.is_enabled = True @@ -327,19 +331,19 @@ def doorlock_fixture(): """Mock UniFi Protect Doorlock device.""" # disable pydantic validation so mocking can happen - Doorlock.__config__.validate_assignment = False + Doorlock.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_doorlock.json", integration=DOMAIN)) yield Doorlock.from_unifi_dict(**data) - Doorlock.__config__.validate_assignment = True + Doorlock.model_config["validate_assignment"] = True @pytest.fixture def unadopted_doorlock(doorlock: Doorlock): """Mock UniFi Protect Light device (unadopted).""" - no_doorlock = doorlock.copy() + no_doorlock = doorlock.model_copy() no_doorlock.name = "Unadopted Lock" no_doorlock.is_adopted = False return no_doorlock @@ -350,12 +354,12 @@ def chime(): """Mock UniFi Protect Chime device.""" # disable pydantic validation so mocking can happen - Chime.__config__.validate_assignment = False + Chime.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_chime.json", integration=DOMAIN)) yield Chime.from_unifi_dict(**data) - Chime.__config__.validate_assignment = True + Chime.model_config["validate_assignment"] = True @pytest.fixture(name="fixed_now") diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index 31669aa62bb..3a8d5d952ce 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -305,7 +305,7 @@ async def test_binary_sensor_update_motion( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id @@ -352,7 +352,7 @@ async def test_binary_sensor_update_light_motion( api=ufp.api, ) - new_light = light.copy() + new_light = light.model_copy() new_light.is_pir_motion_detected = True new_light.last_motion_event_id = event.id @@ -386,7 +386,7 @@ async def test_binary_sensor_update_mount_type_window( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.WINDOW mock_msg = Mock() @@ -418,7 +418,7 @@ async def test_binary_sensor_update_mount_type_garage( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.GARAGE mock_msg = Mock() @@ -468,7 +468,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -501,7 +501,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -534,7 +534,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -611,7 +611,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -641,7 +641,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id @@ -680,7 +680,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 689352d8aa3..12b92beedd0 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -236,15 +236,15 @@ async def test_basic_setup( ) -> None: """Test working setup of unifiprotect entry.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] camera_high_only.name = "Test Camera 1" camera_high_only.channels[0].is_rtsp_enabled = True camera_high_only.channels[1].is_rtsp_enabled = False camera_high_only.channels[2].is_rtsp_enabled = False - camera_medium_only = camera_all.copy() - camera_medium_only.channels = [c.copy() for c in camera_all.channels] + camera_medium_only = camera_all.model_copy() + camera_medium_only.channels = [c.model_copy() for c in camera_all.channels] camera_medium_only.name = "Test Camera 2" camera_medium_only.channels[0].is_rtsp_enabled = False camera_medium_only.channels[1].is_rtsp_enabled = True @@ -252,8 +252,8 @@ async def test_basic_setup( camera_all.name = "Test Camera 3" - camera_no_channels = camera_all.copy() - camera_no_channels.channels = [c.copy() for c in camera_all.channels] + camera_no_channels = camera_all.model_copy() + camera_no_channels.channels = [c.model_copy() for c in camera_all.channels] camera_no_channels.name = "Test Camera 4" camera_no_channels.channels[0].is_rtsp_enabled = False camera_no_channels.channels[1].is_rtsp_enabled = False @@ -337,8 +337,8 @@ async def test_webrtc_support( camera_all: ProtectCamera, ) -> None: """Test webrtc support is available.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] camera_high_only.name = "Test Camera 1" camera_high_only.channels[0].is_rtsp_enabled = True camera_high_only.channels[1].is_rtsp_enabled = False @@ -355,7 +355,7 @@ async def test_adopt( ) -> None: """Test setting up camera with no camera channels.""" - camera1 = camera.copy() + camera1 = camera.model_copy() camera1.channels = [] await init_entry(hass, ufp, [camera1]) @@ -450,7 +450,7 @@ async def test_camera_interval_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -527,10 +527,10 @@ async def test_camera_ws_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True - no_camera = camera.copy() + no_camera = camera.model_copy() no_camera.is_adopted = False ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -563,7 +563,7 @@ async def test_camera_ws_update_offline( assert state and state.state == "idle" # camera goes offline - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.state = StateType.DISCONNECTED mock_msg = Mock() @@ -601,7 +601,7 @@ async def test_camera_enable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( @@ -623,7 +623,7 @@ async def test_camera_disable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index cc2195c1dba..6a26738f5e8 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -75,7 +75,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_ring_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -107,7 +107,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -137,7 +137,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -190,7 +190,7 @@ async def test_doorbell_nfc_scanned( metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_nfc_card_scanned_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -248,7 +248,7 @@ async def test_doorbell_fingerprint_identified( metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_fingerprint_identified_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -306,7 +306,7 @@ async def test_doorbell_fingerprint_not_identified( metadata={"fingerprint": {}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_fingerprint_identified_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} diff --git a/tests/components/unifiprotect/test_init.py b/tests/components/unifiprotect/test_init.py index 0d88754a110..b01c7e0cf4a 100644 --- a/tests/components/unifiprotect/test_init.py +++ b/tests/components/unifiprotect/test_init.py @@ -118,7 +118,7 @@ async def test_setup_too_old( ) -> None: """Test setup of unifiprotect entry with too old of version of UniFi Protect.""" - old_bootstrap = ufp.api.bootstrap.copy() + old_bootstrap = ufp.api.bootstrap.model_copy() old_bootstrap.nvr = old_nvr ufp.api.update.return_value = old_bootstrap ufp.api.bootstrap = old_bootstrap diff --git a/tests/components/unifiprotect/test_light.py b/tests/components/unifiprotect/test_light.py index bb0b6992e4e..724ed108673 100644 --- a/tests/components/unifiprotect/test_light.py +++ b/tests/components/unifiprotect/test_light.py @@ -74,7 +74,7 @@ async def test_light_update( await init_entry(hass, ufp, [light, unadopted_light]) assert_entity_counts(hass, Platform.LIGHT, 1, 1) - new_light = light.copy() + new_light = light.model_copy() new_light.is_light_on = True new_light.light_device_settings.led_level = LEDLevel(3) @@ -101,7 +101,7 @@ async def test_light_turn_on( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( @@ -123,7 +123,7 @@ async def test_light_turn_off( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_lock.py b/tests/components/unifiprotect/test_lock.py index 8b37b1c5928..9095c092ea2 100644 --- a/tests/components/unifiprotect/test_lock.py +++ b/tests/components/unifiprotect/test_lock.py @@ -75,7 +75,7 @@ async def test_lock_locked( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -102,7 +102,7 @@ async def test_lock_unlocking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.OPENING mock_msg = Mock() @@ -129,7 +129,7 @@ async def test_lock_locking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSING mock_msg = Mock() @@ -156,7 +156,7 @@ async def test_lock_jammed( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.JAMMED_WHILE_CLOSING mock_msg = Mock() @@ -183,7 +183,7 @@ async def test_lock_unavailable( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.NOT_CALIBRATED mock_msg = Mock() @@ -210,7 +210,7 @@ async def test_lock_do_lock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - doorlock.__fields__["close_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["close_lock"] = Mock(final=False, frozen=False) doorlock.close_lock = AsyncMock() await hass.services.async_call( @@ -234,7 +234,7 @@ async def test_lock_do_unlock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -245,7 +245,7 @@ async def test_lock_do_unlock( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - new_lock.__fields__["open_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["open_lock"] = Mock(final=False, frozen=False) new_lock.open_lock = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_media_player.py b/tests/components/unifiprotect/test_media_player.py index 642a3a1e372..6d27eb2a206 100644 --- a/tests/components/unifiprotect/test_media_player.py +++ b/tests/components/unifiprotect/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_update( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = Mock() new_camera.talkback_stream.is_running = True @@ -116,7 +116,7 @@ async def test_media_player_set_volume( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["set_speaker_volume"] = Mock(final=False) + doorbell.__pydantic_fields__["set_speaker_volume"] = Mock(final=False, frozen=False) doorbell.set_speaker_volume = AsyncMock() await hass.services.async_call( @@ -140,7 +140,7 @@ async def test_media_player_stop( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = AsyncMock() new_camera.talkback_stream.is_running = True @@ -173,9 +173,11 @@ async def test_media_player_play( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -208,9 +210,11 @@ async def test_media_player_play_media_source( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -247,7 +251,7 @@ async def test_media_player_play_invalid( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) doorbell.play_audio = AsyncMock() with pytest.raises(HomeAssistantError): @@ -276,8 +280,10 @@ async def test_media_player_play_error( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.play_audio = AsyncMock(side_effect=StreamError) doorbell.wait_until_audio_completes = AsyncMock() diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 18944460ca5..61f9680bdbc 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -204,9 +204,9 @@ async def test_browse_media_root_multiple_consoles( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = True - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" @@ -270,9 +270,9 @@ async def test_browse_media_root_multiple_consoles_only_one_media( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = False - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" diff --git a/tests/components/unifiprotect/test_number.py b/tests/components/unifiprotect/test_number.py index 77a409551b1..1838a574bc4 100644 --- a/tests/components/unifiprotect/test_number.py +++ b/tests/components/unifiprotect/test_number.py @@ -162,7 +162,7 @@ async def test_number_light_sensitivity( description = LIGHT_NUMBERS[0] assert description.ufp_set_method is not None - light.__fields__["set_sensitivity"] = Mock(final=False) + light.__pydantic_fields__["set_sensitivity"] = Mock(final=False, frozen=False) light.set_sensitivity = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -184,7 +184,7 @@ async def test_number_light_duration( description = LIGHT_NUMBERS[1] - light.__fields__["set_duration"] = Mock(final=False) + light.__pydantic_fields__["set_duration"] = Mock(final=False, frozen=False) light.set_duration = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -210,7 +210,9 @@ async def test_number_camera_simple( assert description.ufp_set_method is not None - camera.__fields__[description.ufp_set_method] = Mock(final=False) + camera.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(camera, description.ufp_set_method, AsyncMock()) _, entity_id = ids_from_device_description(Platform.NUMBER, camera, description) @@ -230,7 +232,9 @@ async def test_number_lock_auto_close( description = DOORLOCK_NUMBERS[0] - doorlock.__fields__["set_auto_close_time"] = Mock(final=False) + doorlock.__pydantic_fields__["set_auto_close_time"] = Mock( + final=False, frozen=False + ) doorlock.set_auto_close_time = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, doorlock, description) diff --git a/tests/components/unifiprotect/test_recorder.py b/tests/components/unifiprotect/test_recorder.py index fe102c2fdbc..1f025a63306 100644 --- a/tests/components/unifiprotect/test_recorder.py +++ b/tests/components/unifiprotect/test_recorder.py @@ -51,7 +51,7 @@ async def test_exclude_attributes( camera_id=doorbell.id, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id diff --git a/tests/components/unifiprotect/test_select.py b/tests/components/unifiprotect/test_select.py index 8795af57214..6db3ae22dcb 100644 --- a/tests/components/unifiprotect/test_select.py +++ b/tests/components/unifiprotect/test_select.py @@ -262,7 +262,7 @@ async def test_select_update_doorbell_settings( expected_length += 1 new_nvr = copy(ufp.api.bootstrap.nvr) - new_nvr.__fields__["update_all_messages"] = Mock(final=False) + new_nvr.__pydantic_fields__["update_all_messages"] = Mock(final=False, frozen=False) new_nvr.update_all_messages = Mock() new_nvr.doorbell_settings.all_messages = [ @@ -304,7 +304,7 @@ async def test_select_update_doorbell_message( assert state assert state.state == "Default Message (Welcome)" - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.lcd_message = LCDMessage( type=DoorbellMessageType.CUSTOM_MESSAGE, text="Test" ) @@ -332,7 +332,7 @@ async def test_select_set_option_light_motion( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[0]) - light.__fields__["set_light_settings"] = Mock(final=False) + light.__pydantic_fields__["set_light_settings"] = Mock(final=False, frozen=False) light.set_light_settings = AsyncMock() await hass.services.async_call( @@ -357,7 +357,7 @@ async def test_select_set_option_light_camera( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[1]) - light.__fields__["set_paired_camera"] = Mock(final=False) + light.__pydantic_fields__["set_paired_camera"] = Mock(final=False, frozen=False) light.set_paired_camera = AsyncMock() camera = list(light.api.bootstrap.cameras.values())[0] @@ -393,7 +393,7 @@ async def test_select_set_option_camera_recording( Platform.SELECT, doorbell, CAMERA_SELECTS[0] ) - doorbell.__fields__["set_recording_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_recording_mode"] = Mock(final=False, frozen=False) doorbell.set_recording_mode = AsyncMock() await hass.services.async_call( @@ -418,7 +418,7 @@ async def test_select_set_option_camera_ir( Platform.SELECT, doorbell, CAMERA_SELECTS[1] ) - doorbell.__fields__["set_ir_led_model"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ir_led_model"] = Mock(final=False, frozen=False) doorbell.set_ir_led_model = AsyncMock() await hass.services.async_call( @@ -443,7 +443,7 @@ async def test_select_set_option_camera_doorbell_custom( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -470,7 +470,7 @@ async def test_select_set_option_camera_doorbell_unifi( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -512,7 +512,7 @@ async def test_select_set_option_camera_doorbell_default( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -541,7 +541,7 @@ async def test_select_set_option_viewer( Platform.SELECT, viewer, VIEWER_SELECTS[0] ) - viewer.__fields__["set_liveview"] = Mock(final=False) + viewer.__pydantic_fields__["set_liveview"] = Mock(final=False, frozen=False) viewer.set_liveview = AsyncMock() liveview = list(viewer.api.bootstrap.liveviews.values())[0] diff --git a/tests/components/unifiprotect/test_sensor.py b/tests/components/unifiprotect/test_sensor.py index bc5f372c598..9489a49bf22 100644 --- a/tests/components/unifiprotect/test_sensor.py +++ b/tests/components/unifiprotect/test_sensor.py @@ -464,7 +464,7 @@ async def test_sensor_update_alarm( api=ufp.api, ) - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.set_alarm_timeout() new_sensor.last_alarm_event_id = event.id @@ -548,7 +548,7 @@ async def test_camera_update_license_plate( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -663,7 +663,7 @@ async def test_camera_update_license_plate_changes_number_during_detect( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -750,7 +750,7 @@ async def test_camera_update_license_plate_multiple_updates( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -873,7 +873,7 @@ async def test_camera_update_license_no_dupes( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 6808bacb40c..84e0e74a492 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -56,7 +56,9 @@ async def test_global_service_bad_device( """Test global service, invalid device ID.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() with pytest.raises(HomeAssistantError): @@ -75,7 +77,9 @@ async def test_global_service_exception( """Test global service, unexpected error.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock(side_effect=BadRequest) with pytest.raises(HomeAssistantError): @@ -94,7 +98,9 @@ async def test_add_doorbell_text( """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -112,7 +118,9 @@ async def test_remove_doorbell_text( """Test remove_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["remove_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["remove_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.remove_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -129,7 +137,9 @@ async def test_add_doorbell_text_disabled_config_entry( ) -> None: """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.config_entries.async_set_disabled_by( @@ -158,10 +168,10 @@ async def test_set_chime_paired_doorbells( ufp.api.update_device = AsyncMock() - camera1 = doorbell.copy() + camera1 = doorbell.model_copy() camera1.name = "Test Camera 1" - camera2 = doorbell.copy() + camera2 = doorbell.model_copy() camera2.name = "Test Camera 2" await init_entry(hass, ufp, [camera1, camera2, chime]) diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 9e0e9efa0ce..194e46681ce 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -89,7 +89,7 @@ async def test_switch_nvr(hass: HomeAssistant, ufp: MockUFPFixture) -> None: assert_entity_counts(hass, Platform.SWITCH, 2, 2) nvr = ufp.api.bootstrap.nvr - nvr.__fields__["set_insights"] = Mock(final=False) + nvr.__pydantic_fields__["set_insights"] = Mock(final=False, frozen=False) nvr.set_insights = AsyncMock() entity_id = "switch.unifiprotect_insights_enabled" @@ -272,7 +272,7 @@ async def test_switch_light_status( description = LIGHT_SWITCHES[1] - light.__fields__["set_status_light"] = Mock(final=False) + light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) light.set_status_light = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, light, description) @@ -300,7 +300,7 @@ async def test_switch_camera_ssh( description = CAMERA_SWITCHES[0] - doorbell.__fields__["set_ssh"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ssh"] = Mock(final=False, frozen=False) doorbell.set_ssh = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -333,7 +333,9 @@ async def test_switch_camera_simple( assert description.ufp_set_method is not None - doorbell.__fields__[description.ufp_set_method] = Mock(final=False) + doorbell.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(doorbell, description.ufp_set_method, AsyncMock()) set_method = getattr(doorbell, description.ufp_set_method) @@ -362,7 +364,7 @@ async def test_switch_camera_highfps( description = CAMERA_SWITCHES[3] - doorbell.__fields__["set_video_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_video_mode"] = Mock(final=False, frozen=False) doorbell.set_video_mode = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -393,7 +395,7 @@ async def test_switch_camera_privacy( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -409,7 +411,7 @@ async def test_switch_camera_privacy( doorbell.set_privacy.assert_called_with(True, 0, RecordingMode.NEVER) - new_doorbell = doorbell.copy() + new_doorbell = doorbell.model_copy() new_doorbell.add_privacy_zone() new_doorbell.mic_volume = 0 new_doorbell.recording_settings.mode = RecordingMode.NEVER @@ -445,7 +447,7 @@ async def test_switch_camera_privacy_already_on( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) diff --git a/tests/components/unifiprotect/test_text.py b/tests/components/unifiprotect/test_text.py index 3ca11744abb..c34611c43a9 100644 --- a/tests/components/unifiprotect/test_text.py +++ b/tests/components/unifiprotect/test_text.py @@ -78,7 +78,7 @@ async def test_text_camera_set( Platform.TEXT, doorbell, description ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( From bce6127264370f67ff99e7fad3a0bb13227349d9 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sat, 14 Dec 2024 03:36:15 -0500 Subject: [PATCH 0653/1198] Bump `nice-go` to 1.0.0 (#133185) * Bump Nice G.O. to 1.0.0 * Mypy * Pytest --- homeassistant/components/nice_go/coordinator.py | 1 - homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/nice_go/fixtures/get_all_barriers.json | 4 ---- tests/components/nice_go/test_init.py | 1 - 6 files changed, 3 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 29c0d8233fe..07b20bbbf10 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -239,7 +239,6 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): ].type, # Device type is not sent in device state update, and it can't change, so we just reuse the existing one BarrierState( deviceId=raw_data["deviceId"], - desired=json.loads(raw_data["desired"]), reported=json.loads(raw_data["reported"]), connectionState=ConnectionState( connected=raw_data["connectionState"]["connected"], diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 817d7ef9bc9..1af23ec4d9b 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["nice_go"], - "requirements": ["nice-go==0.3.10"] + "requirements": ["nice-go==1.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1e271ff1d57..3994f0f3029 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1468,7 +1468,7 @@ nextdns==4.0.0 nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 95d610361d9..f3309cf24ea 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1231,7 +1231,7 @@ nextdns==4.0.0 nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index 84799e0dd32..5a7607612c1 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -11,7 +11,6 @@ ], "state": { "deviceId": "1", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 1", "autoDisabled": false, @@ -42,7 +41,6 @@ ], "state": { "deviceId": "2", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 2", "autoDisabled": false, @@ -73,7 +71,6 @@ ], "state": { "deviceId": "3", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 3", "autoDisabled": false, @@ -101,7 +98,6 @@ ], "state": { "deviceId": "4", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 4", "autoDisabled": false, diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 4eb3851516e..051c6623b23 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -81,7 +81,6 @@ async def test_firmware_update_required( "displayName": "test-display-name", "migrationStatus": "NOT_STARTED", }, - desired=None, connectionState=None, version=None, timestamp=None, From d2dfba3116d3bd537c0f04a367d072f7d9ec76f7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 14 Dec 2024 12:00:28 +0100 Subject: [PATCH 0654/1198] Improve Slide Local device tests (#133197) --- .../components/slide_local/entity.py | 10 +++--- tests/components/slide_local/conftest.py | 20 +++++------ .../slide_local/fixtures/slide_1.json | 4 +-- .../slide_local/snapshots/test_init.ambr | 33 +++++++++++++++++++ .../slide_local/test_config_flow.py | 8 ++--- tests/components/slide_local/test_init.py | 29 ++++++++++++++++ 6 files changed, 81 insertions(+), 23 deletions(-) create mode 100644 tests/components/slide_local/snapshots/test_init.ambr create mode 100644 tests/components/slide_local/test_init.py diff --git a/homeassistant/components/slide_local/entity.py b/homeassistant/components/slide_local/entity.py index c1dbc101e6f..51269649add 100644 --- a/homeassistant/components/slide_local/entity.py +++ b/homeassistant/components/slide_local/entity.py @@ -1,6 +1,6 @@ """Entities for slide_local integration.""" -from homeassistant.const import CONF_MAC +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -12,18 +12,16 @@ class SlideEntity(CoordinatorEntity[SlideCoordinator]): _attr_has_entity_name = True - def __init__( - self, - coordinator: SlideCoordinator, - ) -> None: + def __init__(self, coordinator: SlideCoordinator) -> None: """Initialize the Slide device.""" super().__init__(coordinator) self._attr_device_info = DeviceInfo( manufacturer="Innovation in Motion", - connections={(CONF_MAC, coordinator.data["mac"])}, + connections={(dr.CONNECTION_NETWORK_MAC, coordinator.data["mac"])}, name=coordinator.data["device_name"], sw_version=coordinator.api_version, + hw_version=coordinator.data["board_rev"], serial_number=coordinator.data["mac"], configuration_url=f"http://{coordinator.host}", ) diff --git a/tests/components/slide_local/conftest.py b/tests/components/slide_local/conftest.py index 0d70d1989e7..ad2734bbb64 100644 --- a/tests/components/slide_local/conftest.py +++ b/tests/components/slide_local/conftest.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN -from homeassistant.const import CONF_API_VERSION, CONF_HOST +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC from .const import HOST, SLIDE_INFO_DATA @@ -22,6 +22,7 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: HOST, CONF_API_VERSION: 2, + CONF_MAC: "12:34:56:78:90:ab", }, options={ CONF_INVERT_POSITION: False, @@ -33,25 +34,22 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_slide_api(): +def mock_slide_api() -> Generator[AsyncMock]: """Build a fixture for the SlideLocalApi that connects successfully and returns one device.""" - mock_slide_local_api = AsyncMock() - mock_slide_local_api.slide_info.return_value = SLIDE_INFO_DATA - with ( patch( - "homeassistant.components.slide_local.SlideLocalApi", + "homeassistant.components.slide_local.coordinator.SlideLocalApi", autospec=True, - return_value=mock_slide_local_api, - ), + ) as mock_slide_local_api, patch( "homeassistant.components.slide_local.config_flow.SlideLocalApi", - autospec=True, - return_value=mock_slide_local_api, + new=mock_slide_local_api, ), ): - yield mock_slide_local_api + client = mock_slide_local_api.return_value + client.slide_info.return_value = SLIDE_INFO_DATA + yield client @pytest.fixture diff --git a/tests/components/slide_local/fixtures/slide_1.json b/tests/components/slide_local/fixtures/slide_1.json index e8c3c85a324..6367b94f243 100644 --- a/tests/components/slide_local/fixtures/slide_1.json +++ b/tests/components/slide_local/fixtures/slide_1.json @@ -1,6 +1,6 @@ { - "slide_id": "slide_300000000000", - "mac": "300000000000", + "slide_id": "slide_1234567890ab", + "mac": "1234567890ab", "board_rev": 1, "device_name": "slide bedroom", "zone_name": "bedroom", diff --git a/tests/components/slide_local/snapshots/test_init.ambr b/tests/components/slide_local/snapshots/test_init.ambr new file mode 100644 index 00000000000..d90f72e4b05 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.2', + 'connections': set({ + tuple( + 'mac', + '12:34:56:78:90:ab', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 1, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Innovation in Motion', + 'model': None, + 'model_id': None, + 'name': 'slide bedroom', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '1234567890ab', + 'suggested_area': None, + 'sw_version': 2, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 35aa99a90d7..025f8c323ff 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -63,7 +63,7 @@ async def test_user( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 2 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -96,7 +96,7 @@ async def test_user_api_1( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 1 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -143,7 +143,7 @@ async def test_user_api_error( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 1 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -259,7 +259,7 @@ async def test_abort_if_already_setup( ) -> None: """Test we abort if the device is already setup.""" - MockConfigEntry(domain=DOMAIN, unique_id="30:00:00:00:00:00").add_to_hass(hass) + MockConfigEntry(domain=DOMAIN, unique_id="12:34:56:78:90:ab").add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/slide_local/test_init.py b/tests/components/slide_local/test_init.py new file mode 100644 index 00000000000..7b0a2d83164 --- /dev/null +++ b/tests/components/slide_local/test_init.py @@ -0,0 +1,29 @@ +"""Tests for the Slide Local integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, "1234567890ab")} + ) + assert device_entry is not None + assert device_entry == snapshot From ca1bcbf5d57f636bcec8a0c0fb86513c31320f39 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:07:38 +0100 Subject: [PATCH 0655/1198] Bump openwebifpy to 4.3.0 (#133188) --- homeassistant/components/enigma2/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index 1a0875b04c0..7d6887ad14c 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.7"] + "requirements": ["openwebifpy==4.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3994f0f3029..0f24315caf1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1556,7 +1556,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f3309cf24ea..d6e9685d8d7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1298,7 +1298,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.opower opower==0.8.6 From 06391d4635aaf4dc3b528c78d892738be5b94859 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:10:28 +0100 Subject: [PATCH 0656/1198] Add reconfiguration to slide_local (#133182) Co-authored-by: Joostlek --- .../components/slide_local/__init__.py | 7 ++++ .../components/slide_local/config_flow.py | 35 ++++++++++++++++++- homeassistant/components/slide_local/cover.py | 6 ++-- .../components/slide_local/quality_scale.yaml | 2 +- .../components/slide_local/strings.json | 14 ++++++++ .../slide_local/test_config_flow.py | 27 +++++++++++++- 6 files changed, 85 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index 878830fe513..dbe4d516d75 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -25,9 +25,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> boo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + return True +async def update_listener(hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py index bc5033e972b..3ccc89be375 100644 --- a/homeassistant/components/slide_local/config_flow.py +++ b/homeassistant/components/slide_local/config_flow.py @@ -15,10 +15,12 @@ from goslideapi.goslideapi import ( import voluptuous as vol from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.core import callback from homeassistant.helpers.device_registry import format_mac +from . import SlideConfigEntry from .const import CONF_INVERT_POSITION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -34,6 +36,14 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 1 + @staticmethod + @callback + def async_get_options_flow( + config_entry: SlideConfigEntry, + ) -> SlideOptionsFlowHandler: + """Get the options flow for this handler.""" + return SlideOptionsFlowHandler() + async def async_test_connection( self, user_input: dict[str, str | int] ) -> dict[str, str]: @@ -181,3 +191,26 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): "host": self._host, }, ) + + +class SlideOptionsFlowHandler(OptionsFlow): + """Handle a options flow for slide_local.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_INVERT_POSITION): bool, + } + ), + {CONF_INVERT_POSITION: self.config_entry.options[CONF_INVERT_POSITION]}, + ), + ) diff --git a/homeassistant/components/slide_local/cover.py b/homeassistant/components/slide_local/cover.py index 1bf026746c6..cf04f46d139 100644 --- a/homeassistant/components/slide_local/cover.py +++ b/homeassistant/components/slide_local/cover.py @@ -54,7 +54,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): super().__init__(coordinator) self._attr_name = None - self._invert = entry.options[CONF_INVERT_POSITION] + self.invert = entry.options[CONF_INVERT_POSITION] self._attr_unique_id = coordinator.data["mac"] @property @@ -79,7 +79,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): if pos is not None: if (1 - pos) <= DEFAULT_OFFSET or pos <= DEFAULT_OFFSET: pos = round(pos) - if not self._invert: + if not self.invert: pos = 1 - pos pos = int(pos * 100) return pos @@ -101,7 +101,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" position = kwargs[ATTR_POSITION] / 100 - if not self._invert: + if not self.invert: position = 1 - position if self.coordinator.data["pos"] is not None: diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 048a428f236..4eda62f6497 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -33,7 +33,7 @@ rules: test-coverage: todo integration-owner: done docs-installation-parameters: done - docs-configuration-parameters: todo + docs-configuration-parameters: done # Gold entity-translations: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 38090c7e62d..3e693fe51b9 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -27,6 +27,20 @@ "unknown": "[%key:common::config_flow::error::unknown%]" } }, + "options": { + "step": { + "init": { + "title": "Configure Slide", + "description": "Reconfigure the Slide device", + "data": { + "invert_position": "Invert position" + }, + "data_description": { + "invert_position": "Invert the position of your slide cover." + } + } + } + }, "exceptions": { "update_error": { "message": "Error while updating data from the API." diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 025f8c323ff..48be7dd7850 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -14,10 +14,11 @@ import pytest from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_platform from .const import HOST, SLIDE_INFO_DATA from tests.common import MockConfigEntry @@ -371,3 +372,27 @@ async def test_zeroconf_connection_error( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "discovery_connection_failed" + + +async def test_options_flow( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test options flow works correctly.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_INVERT_POSITION: True, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.options == { + CONF_INVERT_POSITION: True, + } From d85d98607589e76ef89c3917c4f6384df6591700 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:19:42 +0100 Subject: [PATCH 0657/1198] Add button entity to slide_local (#133141) Co-authored-by: Joostlek --- .../components/slide_local/__init__.py | 6 +-- .../components/slide_local/button.py | 42 +++++++++++++++++ .../components/slide_local/icons.json | 9 ++++ .../components/slide_local/strings.json | 7 +++ .../slide_local/snapshots/test_button.ambr | 47 +++++++++++++++++++ tests/components/slide_local/test_button.py | 46 ++++++++++++++++++ 6 files changed, 153 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/slide_local/button.py create mode 100644 homeassistant/components/slide_local/icons.json create mode 100644 tests/components/slide_local/snapshots/test_button.ambr create mode 100644 tests/components/slide_local/test_button.py diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index dbe4d516d75..6f329477600 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -2,16 +2,14 @@ from __future__ import annotations -from goslideapi.goslideapi import GoSlideLocal as SlideLocalApi - from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from .coordinator import SlideCoordinator -PLATFORMS = [Platform.COVER] -type SlideConfigEntry = ConfigEntry[SlideLocalApi] +PLATFORMS = [Platform.BUTTON, Platform.COVER] +type SlideConfigEntry = ConfigEntry[SlideCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: diff --git a/homeassistant/components/slide_local/button.py b/homeassistant/components/slide_local/button.py new file mode 100644 index 00000000000..9c285881116 --- /dev/null +++ b/homeassistant/components/slide_local/button.py @@ -0,0 +1,42 @@ +"""Support for Slide button.""" + +from __future__ import annotations + +from homeassistant.components.button import ButtonEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up button for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideButton(coordinator)]) + + +class SlideButton(SlideEntity, ButtonEntity): + """Defines a Slide button.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "calibrate" + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide button.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-calibrate" + + async def async_press(self) -> None: + """Send out a calibrate command.""" + await self.coordinator.slide.slide_calibrate(self.coordinator.host) diff --git a/homeassistant/components/slide_local/icons.json b/homeassistant/components/slide_local/icons.json new file mode 100644 index 00000000000..70d53e7f7a3 --- /dev/null +++ b/homeassistant/components/slide_local/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "button": { + "calibrate": { + "default": "mdi:tape-measure" + } + } + } +} diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 3e693fe51b9..c593dea8ed7 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -41,6 +41,13 @@ } } }, + "entity": { + "button": { + "calibrate": { + "name": "Calibrate" + } + } + }, "exceptions": { "update_error": { "message": "Error while updating data from the API." diff --git a/tests/components/slide_local/snapshots/test_button.ambr b/tests/components/slide_local/snapshots/test_button.ambr new file mode 100644 index 00000000000..549538f1361 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.slide_bedroom_calibrate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibrate', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibrate', + 'unique_id': '1234567890ab-calibrate', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.slide_bedroom_calibrate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'slide bedroom Calibrate', + }), + 'context': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/slide_local/test_button.py b/tests/components/slide_local/test_button.py new file mode 100644 index 00000000000..646c8fd7ef3 --- /dev/null +++ b/tests/components/slide_local/test_button.py @@ -0,0 +1,46 @@ +"""Tests for the Slide Local button platform.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_pressing_button( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.slide_bedroom_calibrate", + }, + blocking=True, + ) + mock_slide_api.slide_calibrate.assert_called_once() From 980b8a91e62c449fab558318573fa756818875a6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 14 Dec 2024 14:21:19 +0100 Subject: [PATCH 0658/1198] Revert "Simplify recorder RecorderRunsManager" (#133201) Revert "Simplify recorder RecorderRunsManager (#131785)" This reverts commit cf0ee635077114961f6e508be56ce7620c718c18. --- .../recorder/table_managers/recorder_runs.py | 73 ++++++++++++++++--- .../table_managers/test_recorder_runs.py | 32 ++++++-- 2 files changed, 90 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index 4ca0aa18b88..b0b9818118b 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,6 +2,8 @@ from __future__ import annotations +import bisect +from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -9,6 +11,34 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns +from ..models import process_timestamp + + +def _find_recorder_run_for_start_time( + run_history: _RecorderRunsHistory, start: datetime +) -> RecorderRuns | None: + """Find the recorder run for a start time in _RecorderRunsHistory.""" + run_timestamps = run_history.run_timestamps + runs_by_timestamp = run_history.runs_by_timestamp + + # bisect_left tells us were we would insert + # a value in the list of runs after the start timestamp. + # + # The run before that (idx-1) is when the run started + # + # If idx is 0, history never ran before the start timestamp + # + if idx := bisect.bisect_left(run_timestamps, start.timestamp()): + return runs_by_timestamp[run_timestamps[idx - 1]] + return None + + +@dataclass(frozen=True) +class _RecorderRunsHistory: + """Bisectable history of RecorderRuns.""" + + run_timestamps: list[int] + runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -18,7 +48,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._first_run: RecorderRuns | None = None + self._run_history = _RecorderRunsHistory([], {}) @property def recording_start(self) -> datetime: @@ -28,7 +58,9 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - return self._first_run or self.current + if runs_by_timestamp := self._run_history.runs_by_timestamp: + return next(iter(runs_by_timestamp.values())) + return self.current @property def current(self) -> RecorderRuns: @@ -46,6 +78,15 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None + def get(self, start: datetime) -> RecorderRuns | None: + """Return the recorder run that started before or at start. + + If the first run started after the start, return None + """ + if start >= self.recording_start: + return self.current + return _find_recorder_run_for_start_time(self._run_history, start) + def start(self, session: Session) -> None: """Start a new run. @@ -81,17 +122,31 @@ class RecorderRunsManager: Must run in the recorder thread. """ - if ( - run := session.query(RecorderRuns) - .order_by(RecorderRuns.start.asc()) - .first() - ): + run_timestamps: list[int] = [] + runs_by_timestamp: dict[int, RecorderRuns] = {} + + for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): session.expunge(run) - self._first_run = run + if run_dt := process_timestamp(run.start): + # Not sure if this is correct or runs_by_timestamp annotation should be changed + timestamp = int(run_dt.timestamp()) + run_timestamps.append(timestamp) + runs_by_timestamp[timestamp] = run + + # + # self._run_history is accessed in get() + # which is allowed to be called from any thread + # + # We use a dataclass to ensure that when we update + # run_timestamps and runs_by_timestamp + # are never out of sync with each other. + # + self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - self._current_run_info = None + if self._current_run_info: + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index e79def01bad..41f3a8fef4d 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,11 +21,6 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) - # Test that the first run falls back to the current run - assert process_timestamp( - instance.recorder_runs_manager.first.start - ) == process_timestamp(instance.recorder_runs_manager.current.start) - with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -34,7 +29,32 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago + process_timestamp( + instance.recorder_runs_manager.get( + three_days_ago + timedelta(microseconds=1) + ).start + ) + == three_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + two_days_ago + timedelta(microseconds=1) + ).start + ) + == two_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + one_day_ago + timedelta(microseconds=1) + ).start + ) + == one_day_ago + ) + assert ( + process_timestamp(instance.recorder_runs_manager.get(now).start) + == instance.recorder_runs_manager.recording_start ) From 9e2a3ea0e5c95c451ffc03f765b17041f69fcfa7 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sat, 14 Dec 2024 17:12:44 +0000 Subject: [PATCH 0659/1198] Add Ohme integration (#132574) --- CODEOWNERS | 2 + homeassistant/components/ohme/__init__.py | 65 +++++ homeassistant/components/ohme/config_flow.py | 64 +++++ homeassistant/components/ohme/const.py | 6 + homeassistant/components/ohme/coordinator.py | 68 +++++ homeassistant/components/ohme/entity.py | 42 +++ homeassistant/components/ohme/icons.json | 18 ++ homeassistant/components/ohme/manifest.json | 11 + .../components/ohme/quality_scale.yaml | 83 ++++++ homeassistant/components/ohme/sensor.py | 107 +++++++ homeassistant/components/ohme/strings.json | 51 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/ohme/__init__.py | 14 + tests/components/ohme/conftest.py | 64 +++++ .../components/ohme/snapshots/test_init.ambr | 33 +++ .../ohme/snapshots/test_sensor.ambr | 268 ++++++++++++++++++ tests/components/ohme/test_config_flow.py | 110 +++++++ tests/components/ohme/test_init.py | 47 +++ tests/components/ohme/test_sensor.py | 59 ++++ 22 files changed, 1125 insertions(+) create mode 100644 homeassistant/components/ohme/__init__.py create mode 100644 homeassistant/components/ohme/config_flow.py create mode 100644 homeassistant/components/ohme/const.py create mode 100644 homeassistant/components/ohme/coordinator.py create mode 100644 homeassistant/components/ohme/entity.py create mode 100644 homeassistant/components/ohme/icons.json create mode 100644 homeassistant/components/ohme/manifest.json create mode 100644 homeassistant/components/ohme/quality_scale.yaml create mode 100644 homeassistant/components/ohme/sensor.py create mode 100644 homeassistant/components/ohme/strings.json create mode 100644 tests/components/ohme/__init__.py create mode 100644 tests/components/ohme/conftest.py create mode 100644 tests/components/ohme/snapshots/test_init.ambr create mode 100644 tests/components/ohme/snapshots/test_sensor.ambr create mode 100644 tests/components/ohme/test_config_flow.py create mode 100644 tests/components/ohme/test_init.py create mode 100644 tests/components/ohme/test_sensor.py diff --git a/CODEOWNERS b/CODEOWNERS index 06eb70c7576..f1c6aa4aea5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1053,6 +1053,8 @@ build.json @home-assistant/supervisor /homeassistant/components/octoprint/ @rfleming71 /tests/components/octoprint/ @rfleming71 /homeassistant/components/ohmconnect/ @robbiet480 +/homeassistant/components/ohme/ @dan-r +/tests/components/ohme/ @dan-r /homeassistant/components/ollama/ @synesthesiam /tests/components/ollama/ @synesthesiam /homeassistant/components/ombi/ @larssont diff --git a/homeassistant/components/ohme/__init__.py b/homeassistant/components/ohme/__init__.py new file mode 100644 index 00000000000..8ca983cd72a --- /dev/null +++ b/homeassistant/components/ohme/__init__.py @@ -0,0 +1,65 @@ +"""Set up ohme integration.""" + +from dataclasses import dataclass + +from ohme import ApiException, AuthException, OhmeApiClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady + +from .const import DOMAIN, PLATFORMS +from .coordinator import OhmeAdvancedSettingsCoordinator, OhmeChargeSessionCoordinator + +type OhmeConfigEntry = ConfigEntry[OhmeRuntimeData] + + +@dataclass() +class OhmeRuntimeData: + """Dataclass to hold ohme coordinators.""" + + charge_session_coordinator: OhmeChargeSessionCoordinator + advanced_settings_coordinator: OhmeAdvancedSettingsCoordinator + + +async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Set up Ohme from a config entry.""" + + client = OhmeApiClient(entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD]) + + try: + await client.async_login() + + if not await client.async_update_device_info(): + raise ConfigEntryNotReady( + translation_key="device_info_failed", translation_domain=DOMAIN + ) + except AuthException as e: + raise ConfigEntryError( + translation_key="auth_failed", translation_domain=DOMAIN + ) from e + except ApiException as e: + raise ConfigEntryNotReady( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + coordinators = ( + OhmeChargeSessionCoordinator(hass, client), + OhmeAdvancedSettingsCoordinator(hass, client), + ) + + for coordinator in coordinators: + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = OhmeRuntimeData(*coordinators) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py new file mode 100644 index 00000000000..ea110f6df23 --- /dev/null +++ b/homeassistant/components/ohme/config_flow.py @@ -0,0 +1,64 @@ +"""Config flow for ohme integration.""" + +from typing import Any + +from ohme import ApiException, AuthException, OhmeApiClient +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +USER_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + + +class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): + """Config flow.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """First config step.""" + + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + + instance = OhmeApiClient(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) + try: + await instance.async_login() + except AuthException: + errors["base"] = "invalid_auth" + except ApiException: + errors["base"] = "unknown" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_EMAIL], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=USER_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py new file mode 100644 index 00000000000..adc5ddfd61b --- /dev/null +++ b/homeassistant/components/ohme/const.py @@ -0,0 +1,6 @@ +"""Component constants.""" + +from homeassistant.const import Platform + +DOMAIN = "ohme" +PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/ohme/coordinator.py b/homeassistant/components/ohme/coordinator.py new file mode 100644 index 00000000000..5de59b3d4b2 --- /dev/null +++ b/homeassistant/components/ohme/coordinator.py @@ -0,0 +1,68 @@ +"""Ohme coordinators.""" + +from abc import abstractmethod +from datetime import timedelta +import logging + +from ohme import ApiException, OhmeApiClient + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class OhmeBaseCoordinator(DataUpdateCoordinator[None]): + """Base for all Ohme coordinators.""" + + client: OhmeApiClient + _default_update_interval: timedelta | None = timedelta(minutes=1) + coordinator_name: str = "" + + def __init__(self, hass: HomeAssistant, client: OhmeApiClient) -> None: + """Initialise coordinator.""" + super().__init__( + hass, + _LOGGER, + name="", + update_interval=self._default_update_interval, + ) + + self.name = f"Ohme {self.coordinator_name}" + self.client = client + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + try: + await self._internal_update_data() + except ApiException as e: + raise UpdateFailed( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + @abstractmethod + async def _internal_update_data(self) -> None: + """Update coordinator data.""" + + +class OhmeChargeSessionCoordinator(OhmeBaseCoordinator): + """Coordinator to pull all updates from the API.""" + + coordinator_name = "Charge Sessions" + _default_update_interval = timedelta(seconds=30) + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_charge_session() + + +class OhmeAdvancedSettingsCoordinator(OhmeBaseCoordinator): + """Coordinator to pull settings and charger state from the API.""" + + coordinator_name = "Advanced Settings" + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_advanced_settings() diff --git a/homeassistant/components/ohme/entity.py b/homeassistant/components/ohme/entity.py new file mode 100644 index 00000000000..2c662f7fccb --- /dev/null +++ b/homeassistant/components/ohme/entity.py @@ -0,0 +1,42 @@ +"""Base class for entities.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OhmeBaseCoordinator + + +class OhmeEntity(CoordinatorEntity[OhmeBaseCoordinator]): + """Base class for all Ohme entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: OhmeBaseCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + client = coordinator.client + self._attr_unique_id = f"{client.serial}_{entity_description.key}" + + device_info = client.device_info + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, client.serial)}, + name=device_info["name"], + manufacturer="Ohme", + model=device_info["model"], + sw_version=device_info["sw_version"], + serial_number=client.serial, + ) + + @property + def available(self) -> bool: + """Return if charger reporting as online.""" + return super().available and self.coordinator.client.available diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json new file mode 100644 index 00000000000..228907b3dbe --- /dev/null +++ b/homeassistant/components/ohme/icons.json @@ -0,0 +1,18 @@ +{ + "entity": { + "sensor": { + "status": { + "default": "mdi:car", + "state": { + "unplugged": "mdi:power-plug-off", + "plugged_in": "mdi:power-plug", + "charging": "mdi:battery-charging-100", + "pending_approval": "mdi:alert-decagram" + } + }, + "ct_current": { + "default": "mdi:gauge" + } + } + } +} diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json new file mode 100644 index 00000000000..2d387ce9e8a --- /dev/null +++ b/homeassistant/components/ohme/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "ohme", + "name": "Ohme", + "codeowners": ["@dan-r"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ohme/", + "integration_type": "device", + "iot_class": "cloud_polling", + "quality_scale": "bronze", + "requirements": ["ohme==1.1.1"] +} diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml new file mode 100644 index 00000000000..cffc9eb7b82 --- /dev/null +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -0,0 +1,83 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration has no custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration has no custom actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration has no explicit subscriptions to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration has no custom actions and read-only platform only. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration has no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + discovery-update-info: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration currently has no repairs. + stale-devices: todo + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py new file mode 100644 index 00000000000..d4abaf85b1f --- /dev/null +++ b/homeassistant/components/ohme/sensor.py @@ -0,0 +1,107 @@ +"""Platform for sensor.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import ChargerStatus, OhmeApiClient + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfElectricCurrent, UnitOfEnergy, UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .entity import OhmeEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class OhmeSensorDescription(SensorEntityDescription): + """Class describing Ohme sensor entities.""" + + value_fn: Callable[[OhmeApiClient], str | int | float] + is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True + + +SENSOR_CHARGE_SESSION = [ + OhmeSensorDescription( + key="status", + translation_key="status", + device_class=SensorDeviceClass.ENUM, + options=[e.value for e in ChargerStatus], + value_fn=lambda client: client.status.value, + ), + OhmeSensorDescription( + key="current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.amps, + ), + OhmeSensorDescription( + key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + suggested_display_precision=1, + value_fn=lambda client: client.power.watts, + ), + OhmeSensorDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=1, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda client: client.energy, + ), +] + +SENSOR_ADVANCED_SETTINGS = [ + OhmeSensorDescription( + key="ct_current", + translation_key="ct_current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.ct_amps, + is_supported_fn=lambda client: client.ct_connected, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + coordinators = config_entry.runtime_data + coordinator_map = [ + (SENSOR_CHARGE_SESSION, coordinators.charge_session_coordinator), + (SENSOR_ADVANCED_SETTINGS, coordinators.advanced_settings_coordinator), + ] + + async_add_entities( + OhmeSensor(coordinator, description) + for entities, coordinator in coordinator_map + for description in entities + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeSensor(OhmeEntity, SensorEntity): + """Generic sensor for Ohme.""" + + entity_description: OhmeSensorDescription + + @property + def native_value(self) -> str | int | float: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json new file mode 100644 index 00000000000..06231ed5cf4 --- /dev/null +++ b/homeassistant/components/ohme/strings.json @@ -0,0 +1,51 @@ +{ + "config": { + "step": { + "user": { + "description": "Configure your Ohme account. If you signed up to Ohme with a third party account like Google, please reset your password via Ohme before configuring this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "Enter the email address associated with your Ohme account.", + "password": "Enter the password for your Ohme account" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "status": { + "name": "Status", + "state": { + "unplugged": "Unplugged", + "plugged_in": "Plugged in", + "charging": "Charging", + "pending_approval": "Pending approval" + } + }, + "ct_current": { + "name": "CT current" + } + } + }, + "exceptions": { + "auth_failed": { + "message": "Unable to login to Ohme" + }, + "device_info_failed": { + "message": "Unable to get Ohme device information" + }, + "api_failed": { + "message": "Error communicating with Ohme API" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 3b33d31a2a2..8e88e8a2ae8 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -423,6 +423,7 @@ FLOWS = { "nzbget", "obihai", "octoprint", + "ohme", "ollama", "omnilogic", "oncue", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 1530e308e7d..a94962b458b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4329,6 +4329,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "ohme": { + "name": "Ohme", + "integration_type": "device", + "config_flow": true, + "iot_class": "cloud_polling" + }, "ollama": { "name": "Ollama", "integration_type": "service", diff --git a/requirements_all.txt b/requirements_all.txt index 0f24315caf1..54e80820491 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1522,6 +1522,9 @@ odp-amsterdam==6.0.2 # homeassistant.components.oem oemthermostat==1.1.1 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama ollama==0.3.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d6e9685d8d7..d4c1efeda15 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1270,6 +1270,9 @@ objgraph==3.5.0 # homeassistant.components.garages_amsterdam odp-amsterdam==6.0.2 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama ollama==0.3.3 diff --git a/tests/components/ohme/__init__.py b/tests/components/ohme/__init__.py new file mode 100644 index 00000000000..7c00bedbd1e --- /dev/null +++ b/tests/components/ohme/__init__.py @@ -0,0 +1,14 @@ +"""Tests for the Ohme integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the Ohme integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py new file mode 100644 index 00000000000..90395feeb6b --- /dev/null +++ b/tests/components/ohme/conftest.py @@ -0,0 +1,64 @@ +"""Provide common fixtures.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from ohme import ChargerPower, ChargerStatus +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ohme.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="test@example.com", + domain=DOMAIN, + version=1, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + }, + ) + + +@pytest.fixture +def mock_client(): + """Fixture to mock the OhmeApiClient.""" + with ( + patch( + "homeassistant.components.ohme.config_flow.OhmeApiClient", + autospec=True, + ) as client, + patch( + "homeassistant.components.ohme.OhmeApiClient", + new=client, + ), + ): + client = client.return_value + client.async_login.return_value = True + client.status = ChargerStatus.CHARGING + client.power = ChargerPower(0, 0, 0, 0) + client.serial = "chargerid" + client.ct_connected = True + client.energy = 1000 + client.device_info = { + "name": "Ohme Home Pro", + "model": "Home Pro", + "sw_version": "v2.65", + } + yield client diff --git a/tests/components/ohme/snapshots/test_init.ambr b/tests/components/ohme/snapshots/test_init.ambr new file mode 100644 index 00000000000..e3ed339b78a --- /dev/null +++ b/tests/components/ohme/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ohme', + 'chargerid', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Ohme', + 'model': 'Home Pro', + 'model_id': None, + 'name': 'Ohme Home Pro', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'chargerid', + 'suggested_area': None, + 'sw_version': 'v2.65', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/ohme/snapshots/test_sensor.ambr b/tests/components/ohme/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..fbffa5b7e5d --- /dev/null +++ b/tests/components/ohme/snapshots/test_sensor.ambr @@ -0,0 +1,268 @@ +# serializer version: 1 +# name: test_sensors[sensor.ohme_home_pro_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CT current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ct_current', + 'unique_id': 'chargerid_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro CT current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro Current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohme Home Pro Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohme Home Pro Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': 'chargerid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohme Home Pro Status', + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'charging', + }) +# --- diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py new file mode 100644 index 00000000000..b9d4a10a76e --- /dev/null +++ b/tests/components/ohme/test_config_flow.py @@ -0,0 +1,110 @@ +"""Tests for the config flow.""" + +from unittest.mock import AsyncMock, MagicMock + +from ohme import ApiException, AuthException +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_client: MagicMock +) -> None: + """Test config flow.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Successful login + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter2"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + } + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_config_flow_fail( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test config flow errors.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with CREATE_ENTRY + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + } + + +async def test_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Ensure we can't add the same account twice.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter3", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/ohme/test_init.py b/tests/components/ohme/test_init.py new file mode 100644 index 00000000000..0f4c7cd64ee --- /dev/null +++ b/tests/components/ohme/test_init.py @@ -0,0 +1,47 @@ +"""Test init of Ohme integration.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test loading and unloading the integration.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + mock_client: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Snapshot the device from registry.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(DOMAIN, mock_client.serial)}) + assert device + assert device == snapshot diff --git a/tests/components/ohme/test_sensor.py b/tests/components/ohme/test_sensor.py new file mode 100644 index 00000000000..21f9f06f963 --- /dev/null +++ b/tests/components/ohme/test_sensor.py @@ -0,0 +1,59 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ApiException +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme sensors.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_sensors_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that sensors show as unavailable after a coordinator failure.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" + + mock_client.async_get_charge_session.side_effect = ApiException + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == STATE_UNAVAILABLE + + mock_client.async_get_charge_session.side_effect = None + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" From ff1df757b157c912eeee993fdd0347686b11ffec Mon Sep 17 00:00:00 2001 From: YogevBokobza Date: Sat, 14 Dec 2024 21:06:36 +0200 Subject: [PATCH 0660/1198] Switcher move _async_call_api to entity.py (#132877) * Switcher move _async_call_api to entity.py * fix based on requested changes * fix based on requested changes --- .../components/switcher_kis/cover.py | 31 ---------------- .../components/switcher_kis/entity.py | 34 ++++++++++++++++++ .../components/switcher_kis/light.py | 31 ---------------- .../components/switcher_kis/switch.py | 31 +--------------- tests/components/switcher_kis/conftest.py | 12 ++----- tests/components/switcher_kis/test_button.py | 8 ++--- tests/components/switcher_kis/test_climate.py | 18 +++++----- tests/components/switcher_kis/test_cover.py | 12 +++---- tests/components/switcher_kis/test_light.py | 8 ++--- .../components/switcher_kis/test_services.py | 26 +++++++------- tests/components/switcher_kis/test_switch.py | 36 ++++++++++--------- 11 files changed, 91 insertions(+), 156 deletions(-) diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index 7d3ec0e4af0..513b786a033 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -2,10 +2,8 @@ from __future__ import annotations -import logging from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, ShutterDirection, SwitcherShutter from homeassistant.components.cover import ( @@ -16,7 +14,6 @@ from homeassistant.components.cover import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -24,8 +21,6 @@ from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity -_LOGGER = logging.getLogger(__name__) - API_SET_POSITON = "set_position" API_STOP = "stop_shutter" @@ -92,32 +87,6 @@ class SwitcherBaseCoverEntity(SwitcherEntity, CoverEntity): data.direction[self._cover_id] == ShutterDirection.SHUTTER_UP ) - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - self.coordinator.token, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call api for {self.name} failed, api: '{api}', " - f"args: {args}, response/error: {response or error}" - ) - async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" await self._async_call_api(API_SET_POSITON, 0, self._cover_id) diff --git a/homeassistant/components/switcher_kis/entity.py b/homeassistant/components/switcher_kis/entity.py index 12bde521377..e24f59a4a1c 100644 --- a/homeassistant/components/switcher_kis/entity.py +++ b/homeassistant/components/switcher_kis/entity.py @@ -1,11 +1,19 @@ """Base class for Switcher entities.""" +import logging +from typing import Any + +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse + +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import SwitcherDataUpdateCoordinator +_LOGGER = logging.getLogger(__name__) + class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): """Base class for Switcher entities.""" @@ -18,3 +26,29 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} ) + + async def _async_call_api(self, api: str, *args: Any) -> None: + """Call Switcher API.""" + _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) + response: SwitcherBaseResponse | None = None + error = None + + try: + async with SwitcherApi( + self.coordinator.data.device_type, + self.coordinator.data.ip_address, + self.coordinator.data.device_id, + self.coordinator.data.device_key, + self.coordinator.token, + ) as swapi: + response = await getattr(swapi, api)(*args) + except (TimeoutError, OSError, RuntimeError) as err: + error = repr(err) + + if error or not response or not response.successful: + self.coordinator.last_update_success = False + self.async_write_ha_state() + raise HomeAssistantError( + f"Call api for {self.name} failed, api: '{api}', " + f"args: {args}, response/error: {response or error}" + ) diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py index b2ee624dbc5..75156044efa 100644 --- a/homeassistant/components/switcher_kis/light.py +++ b/homeassistant/components/switcher_kis/light.py @@ -2,16 +2,13 @@ from __future__ import annotations -import logging from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, DeviceState, SwitcherLight from homeassistant.components.light import ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -19,8 +16,6 @@ from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity -_LOGGER = logging.getLogger(__name__) - API_SET_LIGHT = "set_light" @@ -79,32 +74,6 @@ class SwitcherBaseLightEntity(SwitcherEntity, LightEntity): data = cast(SwitcherLight, self.coordinator.data) return bool(data.light[self._light_id] == DeviceState.ON) - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - self.coordinator.token, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call api for {self.name} failed, api: '{api}', " - f"args: {args}, response/error: {response or error}" - ) - async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" await self._async_call_api(API_SET_LIGHT, DeviceState.ON, self._light_id) diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index 7d14620c1aa..ba0a99b4089 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -6,7 +6,7 @@ from datetime import timedelta import logging from typing import Any -from aioswitcher.api import Command, SwitcherApi, SwitcherBaseResponse +from aioswitcher.api import Command from aioswitcher.device import DeviceCategory, DeviceState import voluptuous as vol @@ -96,35 +96,6 @@ class SwitcherBaseSwitchEntity(SwitcherEntity, SwitchEntity): self.control_result = None self.async_write_ha_state() - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug( - "Calling api for %s, api: '%s', args: %s", self.coordinator.name, api, args - ) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - _LOGGER.error( - "Call api for %s failed, api: '%s', args: %s, response/error: %s", - self.coordinator.name, - api, - args, - response or error, - ) - self.coordinator.last_update_success = False - @property def is_on(self) -> bool: """Return True if entity is on.""" diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 518c36616ee..58172a6962d 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -60,19 +60,11 @@ def mock_api(): patchers = [ patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.connect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.connect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.disconnect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.connect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.disconnect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.disconnect", new=api_mock, ), ] diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index 50c015b4024..6ebd82363e4 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -42,7 +42,7 @@ async def test_assume_button( assert hass.states.get(SWING_OFF_EID) is None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -79,7 +79,7 @@ async def test_swing_button( assert hass.states.get(SWING_OFF_EID) is not None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -103,7 +103,7 @@ async def test_control_device_fail( # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -130,7 +130,7 @@ async def test_control_device_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index 72e1a93d1c3..72a25d20d04 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -49,7 +49,7 @@ async def test_climate_hvac_mode( # Test set hvac mode heat with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -71,7 +71,7 @@ async def test_climate_hvac_mode( # Test set hvac mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -108,7 +108,7 @@ async def test_climate_temperature( # Test set target temperature with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -128,7 +128,7 @@ async def test_climate_temperature( # Test set target temperature - incorrect params with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -160,7 +160,7 @@ async def test_climate_fan_level( # Test set fan level to high with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -195,7 +195,7 @@ async def test_climate_swing( # Test set swing mode on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -218,7 +218,7 @@ async def test_climate_swing( # Test set swing mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -249,7 +249,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -276,7 +276,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 2936cafdd53..5829d6345ef 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -115,7 +115,7 @@ async def test_cover( # Test set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -136,7 +136,7 @@ async def test_cover( # Test open with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -156,7 +156,7 @@ async def test_cover( # Test close with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -176,7 +176,7 @@ async def test_cover( # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.stop_shutter" + "homeassistant.components.switcher_kis.entity.SwitcherApi.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -232,7 +232,7 @@ async def test_cover_control_fail( # Test exception during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -257,7 +257,7 @@ async def test_cover_control_fail( # Test error response during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py index aa7d6551d75..51d0eb6332f 100644 --- a/tests/components/switcher_kis/test_light.py +++ b/tests/components/switcher_kis/test_light.py @@ -86,7 +86,7 @@ async def test_light( # Test turning on light with patch( - "homeassistant.components.switcher_kis.light.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -99,7 +99,7 @@ async def test_light( # Test turning off light with patch( - "homeassistant.components.switcher_kis.light.SwitcherApi.set_light" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light" ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -153,7 +153,7 @@ async def test_light_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -178,7 +178,7 @@ async def test_light_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 65e1967cbac..b4a8168419f 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -16,6 +16,7 @@ from homeassistant.components.switcher_kis.const import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.config_validation import time_period_str from homeassistant.util import slugify @@ -48,7 +49,7 @@ async def test_turn_on_with_timer_service( assert state.state == STATE_OFF with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( DOMAIN, @@ -78,7 +79,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown" ) as mock_set_auto_shutdown: await hass.services.async_call( DOMAIN, @@ -95,7 +96,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_set_auto_off_service_fail( - hass: HomeAssistant, mock_bridge, mock_api, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, mock_bridge, mock_api ) -> None: """Test set auto off service failed.""" await init_integration(hass) @@ -105,24 +106,21 @@ async def test_set_auto_off_service_fail( entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown", return_value=None, ) as mock_set_auto_shutdown: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_AUTO_OFF_NAME, - {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, - blocking=True, - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_AUTO_OFF_NAME, + {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, + blocking=True, + ) assert mock_api.call_count == 2 mock_set_auto_shutdown.assert_called_once_with( time_period_str(DUMMY_AUTO_OFF_SET) ) - assert ( - f"Call api for {device.name} failed, api: 'set_auto_shutdown'" - in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index 443c7bc930d..9bfe11fe202 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -16,6 +16,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from . import init_integration @@ -47,7 +48,7 @@ async def test_switch( # Test turning on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -60,7 +61,7 @@ async def test_switch( # Test turning off with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -78,7 +79,6 @@ async def test_switch_control_fail( mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" await init_integration(hass) @@ -97,18 +97,19 @@ async def test_switch_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 2 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE @@ -121,17 +122,18 @@ async def test_switch_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 4 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE From 79ecb4a87cfa935816886ea8a5dd6b684c594280 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sat, 14 Dec 2024 20:43:27 +0100 Subject: [PATCH 0661/1198] Suez_water: add removal instructions (#133206) --- homeassistant/components/suez_water/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 0980ee472eb..474340a1489 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -21,7 +21,7 @@ rules: common-modules: done docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done docs-actions: status: exempt comment: no service action From 35d5a16a3ca35014e505ec5449e394c36a369a7f Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sat, 14 Dec 2024 20:47:06 +0100 Subject: [PATCH 0662/1198] Bump pynecil to 2.1.0 (#133211) --- homeassistant/components/iron_os/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index d85b8bf4707..982fae16cc4 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==2.0.2", "aiogithubapi==24.6.0"] + "requirements": ["pynecil==2.1.0", "aiogithubapi==24.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 54e80820491..37248e33077 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2104,7 +2104,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==2.0.2 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d4c1efeda15..5187e004989 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1706,7 +1706,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==2.0.2 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 From 4dc1405e9934fc6aaadbcef533876a4c7cfe3688 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 14 Dec 2024 20:51:30 +0100 Subject: [PATCH 0663/1198] Bump incomfort-client to v0.6.4 (#133205) --- homeassistant/components/incomfort/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index 40c93012eef..f404f33b970 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], - "requirements": ["incomfort-client==0.6.3-1"] + "requirements": ["incomfort-client==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 37248e33077..7fcc2db9e06 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1197,7 +1197,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5187e004989..c97aac88311 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1011,7 +1011,7 @@ ifaddr==0.2.0 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 From 74aa1a8f7e6a782e72995aa1b4e0a27eb3cbcb8d Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 14 Dec 2024 21:47:27 +0100 Subject: [PATCH 0664/1198] Update Fronius translations (#132876) * Remove exception translation that's handled by configflow errors dict * Remove entity name translations handled by device class * Add data_description for Fronius config flow * Remove unnecessary exception case * review suggestion --- .../components/fronius/config_flow.py | 7 +--- homeassistant/components/fronius/strings.json | 24 ++--------- tests/components/fronius/test_config_flow.py | 42 ++++++------------- 3 files changed, 18 insertions(+), 55 deletions(-) diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index 1d5a26984fa..53433e31233 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -52,14 +52,9 @@ async def validate_host( try: inverter_info = await fronius.inverter_info() first_inverter = next(inverter for inverter in inverter_info["inverters"]) - except FroniusError as err: + except (FroniusError, StopIteration) as err: _LOGGER.debug(err) raise CannotConnect from err - except StopIteration as err: - raise CannotConnect( - translation_domain=DOMAIN, - translation_key="no_supported_device_found", - ) from err first_inverter_uid: str = first_inverter["unique_id"]["value"] return first_inverter_uid, FroniusConfigEntryData( host=host, diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 86348a0e2d7..9a2b498f28c 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -3,10 +3,12 @@ "flow_title": "{device}", "step": { "user": { - "title": "Fronius SolarNet", - "description": "Configure the IP address or local hostname of your Fronius device.", + "description": "Configure your Fronius SolarAPI device.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The IP address or hostname of your Fronius device." } }, "confirm_discovery": { @@ -41,9 +43,6 @@ "energy_total": { "name": "Total energy" }, - "frequency_ac": { - "name": "[%key:component::sensor::entity_component::frequency::name%]" - }, "current_ac": { "name": "AC current" }, @@ -156,9 +155,6 @@ "power_apparent_phase_3": { "name": "Apparent power phase 3" }, - "power_apparent": { - "name": "[%key:component::sensor::entity_component::apparent_power::name%]" - }, "power_factor_phase_1": { "name": "Power factor phase 1" }, @@ -168,9 +164,6 @@ "power_factor_phase_3": { "name": "Power factor phase 3" }, - "power_factor": { - "name": "[%key:component::sensor::entity_component::power_factor::name%]" - }, "power_reactive_phase_1": { "name": "Reactive power phase 1" }, @@ -216,12 +209,6 @@ "energy_real_ac_consumed": { "name": "Energy consumed" }, - "power_real_ac": { - "name": "[%key:component::sensor::entity_component::power::name%]" - }, - "temperature_channel_1": { - "name": "[%key:component::sensor::entity_component::temperature::name%]" - }, "state_code": { "name": "State code" }, @@ -296,9 +283,6 @@ } }, "exceptions": { - "no_supported_device_found": { - "message": "No supported Fronius SolarNet device found." - }, "entry_cannot_connect": { "message": "Failed to connect to Fronius device at {host}: {fronius_error}" }, diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 1b9c41d5aa6..5d0b93e7cd5 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -118,8 +118,18 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_cannot_connect(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "inverter_side_effect", + [ + FroniusError, + None, # raises StopIteration through INVERTER_INFO_NONE + ], +) +async def test_form_cannot_connect( + hass: HomeAssistant, inverter_side_effect: type[FroniusError] | None +) -> None: """Test we handle cannot connect error.""" + INVERTER_INFO_NONE: dict[str, list] = {"inverters": []} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -131,34 +141,8 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: ), patch( "pyfronius.Fronius.inverter_info", - side_effect=FroniusError, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_no_device(hass: HomeAssistant) -> None: - """Test we handle no device found error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - side_effect=FroniusError, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value={"inverters": []}, + side_effect=inverter_side_effect, + return_value=INVERTER_INFO_NONE, ), ): result2 = await hass.config_entries.flow.async_configure( From 2117e35d53b1cf397a149ee9f45f3089f94d4bb4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 14 Dec 2024 15:06:26 -0600 Subject: [PATCH 0665/1198] Bump yalexs-ble to 2.5.5 (#133229) changelog: https://github.com/bdraco/yalexs-ble/compare/v2.5.4...v2.5.5 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index ed2c8007ee8..d0b41411c96 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 2ed1f4b5c43..7b7edfac77b 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 1472f9035ea..b2c331397b3 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.4"] + "requirements": ["yalexs-ble==2.5.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7fcc2db9e06..4c257ba9c11 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3058,7 +3058,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c97aac88311..5b33e7d3c12 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2453,7 +2453,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale From 229a68dc7321de4a43b96a71b15e11189dd7135d Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 15 Dec 2024 09:27:14 +0100 Subject: [PATCH 0666/1198] set PARALLEL_UPDATES to 1 for enphase_envoy (#132373) * set PARALLEL_UPDATES to 1 for enphase_envoy * move PARALLEL_UPDATES from _init_ to platform files. * Implement review feedback * set parrallel_update 0 for read-only platforms --- homeassistant/components/enphase_envoy/binary_sensor.py | 2 ++ homeassistant/components/enphase_envoy/number.py | 2 ++ homeassistant/components/enphase_envoy/select.py | 2 ++ homeassistant/components/enphase_envoy/sensor.py | 2 ++ homeassistant/components/enphase_envoy/switch.py | 2 ++ 5 files changed, 10 insertions(+) diff --git a/homeassistant/components/enphase_envoy/binary_sensor.py b/homeassistant/components/enphase_envoy/binary_sensor.py index 6be29d19ecb..1ad6f259de1 100644 --- a/homeassistant/components/enphase_envoy/binary_sensor.py +++ b/homeassistant/components/enphase_envoy/binary_sensor.py @@ -22,6 +22,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyEnchargeBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/enphase_envoy/number.py b/homeassistant/components/enphase_envoy/number.py index f27335b1f4c..a62913a4c0b 100644 --- a/homeassistant/components/enphase_envoy/number.py +++ b/homeassistant/components/enphase_envoy/number.py @@ -25,6 +25,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelayNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/enphase_envoy/select.py b/homeassistant/components/enphase_envoy/select.py index 903c2c1edf6..d9729a16683 100644 --- a/homeassistant/components/enphase_envoy/select.py +++ b/homeassistant/components/enphase_envoy/select.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelaySelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index 20d610e4b71..fadbf191840 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -59,6 +59,8 @@ _LOGGER = logging.getLogger(__name__) INVERTERS_KEY = "inverters" LAST_REPORTED_KEY = "last_reported" +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyInverterSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/enphase_envoy/switch.py b/homeassistant/components/enphase_envoy/switch.py index 14451aaf266..5170b694587 100644 --- a/homeassistant/components/enphase_envoy/switch.py +++ b/homeassistant/components/enphase_envoy/switch.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyEnpowerSwitchEntityDescription(SwitchEntityDescription): From 1b2cf68e8277bbcc6296a436fca3d79025b38cec Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sun, 15 Dec 2024 09:46:14 +0100 Subject: [PATCH 0667/1198] Suez_water: store coordinator in runtime_data (#133204) * Suez_water: store coordinator in runtime_data * jhfg --- homeassistant/components/suez_water/__init__.py | 15 +++++---------- .../components/suez_water/coordinator.py | 7 +++++-- .../components/suez_water/quality_scale.yaml | 4 +--- homeassistant/components/suez_water/sensor.py | 7 +++---- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/suez_water/__init__.py b/homeassistant/components/suez_water/__init__.py index 06f503b85c2..cbaac912642 100644 --- a/homeassistant/components/suez_water/__init__.py +++ b/homeassistant/components/suez_water/__init__.py @@ -2,32 +2,27 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import SuezWaterCoordinator +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Set up Suez Water from a config entry.""" coordinator = SuezWaterCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py index 224929c606e..72da68c0f5d 100644 --- a/homeassistant/components/suez_water/coordinator.py +++ b/homeassistant/components/suez_water/coordinator.py @@ -37,13 +37,16 @@ class SuezWaterData: price: float +type SuezWaterConfigEntry = ConfigEntry[SuezWaterCoordinator] + + class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): """Suez water coordinator.""" _suez_client: SuezClient - config_entry: ConfigEntry + config_entry: SuezWaterConfigEntry - def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, config_entry: SuezWaterConfigEntry) -> None: """Initialize suez water coordinator.""" super().__init__( hass, diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 474340a1489..399c0b73a5a 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -4,9 +4,7 @@ rules: test-before-configure: done unique-config-entry: done config-flow-test-coverage: done - runtime-data: - status: todo - comment: coordinator is created during setup, should be stored in runtime_data + runtime-data: done test-before-setup: done appropriate-polling: done entity-unique-id: done diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 2ba699a9af1..e4e53dd7f6d 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CURRENCY_EURO, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -21,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN -from .coordinator import SuezWaterCoordinator, SuezWaterData +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator, SuezWaterData @dataclass(frozen=True, kw_only=True) @@ -53,11 +52,11 @@ SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SuezWaterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Suez Water sensor from a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data counter_id = entry.data[CONF_COUNTER_ID] async_add_entities( From 94941283955c88e34253256332628e9ea2754d18 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Sun, 15 Dec 2024 20:24:41 +1100 Subject: [PATCH 0668/1198] Bump aiolifx to 1.1.2 and add new HomeKit product prefixes (#133191) Signed-off-by: Avi Miller --- homeassistant/components/lifx/manifest.json | 5 ++++- homeassistant/generated/zeroconf.py | 12 ++++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index c7d8a27a1c7..2e16eb2082b 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -23,6 +23,7 @@ "LIFX Ceiling", "LIFX Clean", "LIFX Color", + "LIFX Colour", "LIFX DLCOL", "LIFX Dlight", "LIFX DLWW", @@ -35,12 +36,14 @@ "LIFX Neon", "LIFX Nightvision", "LIFX PAR38", + "LIFX Permanent Outdoor", "LIFX Pls", "LIFX Plus", "LIFX Round", "LIFX Square", "LIFX String", "LIFX Tile", + "LIFX Tube", "LIFX White", "LIFX Z" ] @@ -48,7 +51,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.1.1", + "aiolifx==1.1.2", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.5" ] diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index e5b50841d11..2c914c2d240 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -92,6 +92,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Colour": { + "always_discover": True, + "domain": "lifx", + }, "LIFX DLCOL": { "always_discover": True, "domain": "lifx", @@ -140,6 +144,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Permanent Outdoor": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Pls": { "always_discover": True, "domain": "lifx", @@ -164,6 +172,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Tube": { + "always_discover": True, + "domain": "lifx", + }, "LIFX White": { "always_discover": True, "domain": "lifx", diff --git a/requirements_all.txt b/requirements_all.txt index 4c257ba9c11..f0b050b49ea 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -286,7 +286,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b33e7d3c12..7b9fafb5958 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -268,7 +268,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 From af6948a9112575ff6cf4b9a8d26aaff29cc124e7 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 15 Dec 2024 10:34:33 +0100 Subject: [PATCH 0669/1198] Fix pydantic warnings in purpleair (#133247) --- homeassistant/components/purpleair/diagnostics.py | 2 +- tests/components/purpleair/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index 30f1deeb368..f7c44b7e9b2 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -37,7 +37,7 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.dict(), # type: ignore[deprecated] + "data": coordinator.data.model_dump(), }, TO_REDACT, ) diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 3d6776dd12e..1809b16bd75 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -73,7 +73,7 @@ def config_entry_options_fixture() -> dict[str, Any]: @pytest.fixture(name="get_sensors_response", scope="package") def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" - return GetSensorsResponse.parse_raw( + return GetSensorsResponse.model_validate_json( load_fixture("get_sensors_response.json", "purpleair") ) From 80e4d7ee12ea8d8052ed6993adb334f427453a9a Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:02:26 +0100 Subject: [PATCH 0670/1198] Fix fibaro climate hvac mode (#132508) --- homeassistant/components/fibaro/climate.py | 6 +- tests/components/fibaro/conftest.py | 56 +++++++++ tests/components/fibaro/test_climate.py | 134 +++++++++++++++++++++ 3 files changed, 193 insertions(+), 3 deletions(-) create mode 100644 tests/components/fibaro/test_climate.py diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index 2541781773c..d5605e71c73 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -272,7 +272,9 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if isinstance(fibaro_operation_mode, str): with suppress(ValueError): return HVACMode(fibaro_operation_mode.lower()) - elif fibaro_operation_mode in OPMODES_HVAC: + # when the mode cannot be instantiated a preset_mode is selected + return HVACMode.AUTO + if fibaro_operation_mode in OPMODES_HVAC: return OPMODES_HVAC[fibaro_operation_mode] return None @@ -280,8 +282,6 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): """Set new target operation mode.""" if not self._op_mode_device: return - if self.preset_mode: - return if "setOperatingMode" in self._op_mode_device.fibaro_device.actions: self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode]) diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index 1976a8f310b..583c44a41e6 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -129,6 +129,62 @@ def mock_light() -> Mock: return light +@pytest.fixture +def mock_thermostat() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setThermostatMode": 1} + climate.supported_features = {} + climate.has_supported_thermostat_modes = True + climate.supported_thermostat_modes = ["Off", "Heat", "CustomerSpecific"] + climate.has_operating_mode = False + climate.has_thermostat_mode = True + climate.thermostat_mode = "CustomerSpecific" + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + +@pytest.fixture +def mock_thermostat_with_operating_mode() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setOperationMode": 1} + climate.supported_features = {} + climate.has_supported_operating_modes = True + climate.supported_operating_modes = [0, 1, 15] + climate.has_operating_mode = True + climate.operating_mode = 15 + climate.has_thermostat_mode = False + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_climate.py b/tests/components/fibaro/test_climate.py new file mode 100644 index 00000000000..31022e19a08 --- /dev/null +++ b/tests/components/fibaro/test_climate.py @@ -0,0 +1,134 @@ +"""Test the Fibaro climate platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.climate import ClimateEntityFeature, HVACMode +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_climate_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("climate.room_1_test_climate_4") + assert entry + assert entry.unique_id == "hc2_111111.4" + assert entry.original_name == "Room 1 Test climate" + assert entry.supported_features == ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.PRESET_MODE + ) + + +async def test_hvac_mode_preset( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate state is auto when a preset is selected.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO + assert state.attributes["preset_mode"] == "CustomerSpecific" + + +async def test_hvac_mode_heat( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the preset mode is None if a hvac mode is active.""" + + # Arrange + mock_thermostat.thermostat_mode = "Heat" + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.HEAT + assert state.attributes["preset_mode"] is None + + +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that set_hvac_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_hvac_mode", + {"entity_id": "climate.room_1_test_climate_4", "hvac_mode": HVACMode.HEAT}, + blocking=True, + ) + + # Assert + mock_thermostat.execute_action.assert_called_once() + + +async def test_hvac_mode_with_operation_mode_support( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_with_operating_mode: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO From f8da2c3e5c98d98fd1c55b978d3b259ba45e5e0f Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:04:11 +0100 Subject: [PATCH 0671/1198] Bump aioautomower to 2024.12.0 (#132962) --- homeassistant/components/husqvarna_automower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../husqvarna_automower/snapshots/test_diagnostics.ambr | 7 ------- 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 0f35e60c219..02e87a3a772 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_push", "loggers": ["aioautomower"], "quality_scale": "silver", - "requirements": ["aioautomower==2024.10.3"] + "requirements": ["aioautomower==2024.12.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index f0b050b49ea..237b57a1438 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -201,7 +201,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.12.0 # homeassistant.components.azure_devops aioazuredevops==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b9fafb5958..613f9793cf3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -189,7 +189,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.12.0 # homeassistant.components.azure_devops aioazuredevops==2.2.1 diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ce9fc9ac01a..2dab82451a6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -71,9 +71,7 @@ 'activity': 'parked_in_cs', 'error_code': 0, 'error_datetime': None, - 'error_datetime_naive': None, 'error_key': None, - 'error_timestamp': 0, 'inactive_reason': 'none', 'is_error_confirmable': False, 'mode': 'main_area', @@ -82,9 +80,7 @@ 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start': 1685991600000, 'next_start_datetime': '2023-06-05T19:00:00+02:00', - 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'not_active', }), @@ -141,7 +137,6 @@ 'cutting_height': 50, 'enabled': False, 'last_time_completed': '2024-08-12T05:07:49+02:00', - 'last_time_completed_naive': '2024-08-12T05:07:49', 'name': 'my_lawn', 'progress': 20, }), @@ -149,7 +144,6 @@ 'cutting_height': 50, 'enabled': True, 'last_time_completed': '2024-08-12T07:54:29+02:00', - 'last_time_completed_naive': '2024-08-12T07:54:29', 'name': 'Front lawn', 'progress': 40, }), @@ -157,7 +151,6 @@ 'cutting_height': 25, 'enabled': True, 'last_time_completed': None, - 'last_time_completed_naive': None, 'name': 'Back lawn', 'progress': None, }), From 412aa60e8f294833ec48199bf04e9f77399aed61 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:05:17 +0100 Subject: [PATCH 0672/1198] Fix enigma2 integration for devices not reporting MAC address (#133226) --- .../components/enigma2/config_flow.py | 3 +- .../components/enigma2/coordinator.py | 29 +++++++++++------ .../components/enigma2/media_player.py | 7 +--- tests/components/enigma2/test_init.py | 32 +++++++++++++------ 4 files changed, 45 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index e9502a0f7cd..b0649a8368d 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -133,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Exception: # noqa: BLE001 errors = {"base": "unknown"} else: - await self.async_set_unique_id(about["info"]["ifaces"][0]["mac"]) + unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id + await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() return errors diff --git a/homeassistant/components/enigma2/coordinator.py b/homeassistant/components/enigma2/coordinator.py index a35e74f582f..d5bbf2c0ce5 100644 --- a/homeassistant/components/enigma2/coordinator.py +++ b/homeassistant/components/enigma2/coordinator.py @@ -35,6 +35,7 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): """The Enigma2 data update coordinator.""" device: OpenWebIfDevice + unique_id: str | None def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Enigma2 data update coordinator.""" @@ -64,6 +65,10 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): name=config_entry.data[CONF_HOST], ) + # set the unique ID for the entities to the config entry unique ID + # for devices that don't report a MAC address + self.unique_id = config_entry.unique_id + async def _async_setup(self) -> None: """Provide needed data to the device info.""" @@ -71,16 +76,20 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): self.device.mac_address = about["info"]["ifaces"][0]["mac"] self.device_info["model"] = about["info"]["model"] self.device_info["manufacturer"] = about["info"]["brand"] - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } + if self.device.mac_address is not None: + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.unique_id = self.device.mac_address + elif self.unique_id is not None: + self.device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)} async def _async_update_data(self) -> OpenWebIfStatus: await self.device.update() diff --git a/homeassistant/components/enigma2/media_player.py b/homeassistant/components/enigma2/media_player.py index 8287e055814..ee0de15c3fb 100644 --- a/homeassistant/components/enigma2/media_player.py +++ b/homeassistant/components/enigma2/media_player.py @@ -4,7 +4,6 @@ from __future__ import annotations import contextlib from logging import getLogger -from typing import cast from aiohttp.client_exceptions import ServerDisconnectedError from openwebif.enums import PowerState, RemoteControlCodes, SetVolumeOption @@ -15,7 +14,6 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -65,10 +63,7 @@ class Enigma2Device(CoordinatorEntity[Enigma2UpdateCoordinator], MediaPlayerEnti super().__init__(coordinator) - self._attr_unique_id = ( - coordinator.device.mac_address - or cast(ConfigEntry, coordinator.config_entry).entry_id - ) + self._attr_unique_id = coordinator.unique_id self._attr_device_info = coordinator.device_info diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index ab19c2ce51a..d12f96d4b0f 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -5,23 +5,37 @@ from unittest.mock import patch from homeassistant.components.enigma2.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from .conftest import TEST_REQUIRED, MockDevice from tests.common import MockConfigEntry +async def test_device_without_mac_address( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that a device gets successfully registered when the device doesn't report a MAC address.""" + mock_device = MockDevice() + mock_device.mac_address = None + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=mock_device, + ): + entry = MockConfigEntry( + domain=DOMAIN, data=TEST_REQUIRED, title="name", unique_id="123456" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert device_registry.async_get_device({(DOMAIN, entry.unique_id)}) is not None + + async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" - with ( - patch( - "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", - return_value=MockDevice(), - ), - patch( - "homeassistant.components.enigma2.media_player.async_setup_entry", - return_value=True, - ), + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=MockDevice(), ): entry = MockConfigEntry(domain=DOMAIN, data=TEST_REQUIRED, title="name") entry.add_to_hass(hass) From 879d809e5a0f1dd827c5e91f91e991b716937ab4 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:47:18 +0100 Subject: [PATCH 0673/1198] Enhance translation strings in fibaro (#133234) --- homeassistant/components/fibaro/strings.json | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/fibaro/strings.json b/homeassistant/components/fibaro/strings.json index de875176cdb..99f718d545c 100644 --- a/homeassistant/components/fibaro/strings.json +++ b/homeassistant/components/fibaro/strings.json @@ -3,16 +3,25 @@ "step": { "user": { "data": { - "url": "URL in the format http://HOST/api/", + "url": "[%key:common::config_flow::data::url%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "import_plugins": "Import entities from fibaro plugins?" + "import_plugins": "Import entities from fibaro plugins / quickapps" + }, + "data_description": { + "url": "The URL of the Fibaro hub in the format `http(s)://IP`.", + "username": "The username of the Fibaro hub user.", + "password": "The password of the Fibaro hub user.", + "import_plugins": "Select if entities from Fibaro plugins / quickapps should be imported." } }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "password": "[%key:component::fibaro::config::step::user::data_description::password%]" + }, "title": "[%key:common::config_flow::title::reauth%]", "description": "Please update your password for {username}" } From 314076b85f6c848c9c254cfa9edb731b5ba15930 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:48:11 +0100 Subject: [PATCH 0674/1198] Replace aiogithub dependency with pynecil update check (#133213) --- .strict-typing | 1 + homeassistant/components/iron_os/__init__.py | 5 ++-- .../components/iron_os/coordinator.py | 25 +++++++------------ .../components/iron_os/manifest.json | 4 +-- .../components/iron_os/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++ requirements_all.txt | 1 - requirements_test_all.txt | 1 - tests/components/iron_os/conftest.py | 21 +++++++--------- tests/components/iron_os/test_update.py | 8 +++--- 10 files changed, 38 insertions(+), 40 deletions(-) diff --git a/.strict-typing b/.strict-typing index 66dae130fb5..899b22af35f 100644 --- a/.strict-typing +++ b/.strict-typing @@ -271,6 +271,7 @@ homeassistant.components.ios.* homeassistant.components.iotty.* homeassistant.components.ipp.* homeassistant.components.iqvia.* +homeassistant.components.iron_os.* homeassistant.components.islamic_prayer_times.* homeassistant.components.isy994.* homeassistant.components.jellyfin.* diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 225bf0ff582..0fe5acc2db6 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -5,8 +5,7 @@ from __future__ import annotations import logging from typing import TYPE_CHECKING -from aiogithubapi import GitHubAPI -from pynecil import Pynecil +from pynecil import IronOSUpdate, Pynecil from homeassistant.components import bluetooth from homeassistant.config_entries import ConfigEntry @@ -48,7 +47,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up IronOS firmware update coordinator.""" session = async_get_clientsession(hass) - github = GitHubAPI(session=session) + github = IronOSUpdate(session) hass.data[IRON_OS_KEY] = IronOSFirmwareUpdateCoordinator(hass, github) await hass.data[IRON_OS_KEY].async_request_refresh() diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index 82c7c3b99cd..e8ddef43bd7 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -5,15 +5,16 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta import logging -from typing import TYPE_CHECKING -from aiogithubapi import GitHubAPI, GitHubException, GitHubReleaseModel from pynecil import ( CommunicationError, DeviceInfoResponse, + IronOSUpdate, + LatestRelease, LiveDataResponse, Pynecil, SettingsDataResponse, + UpdateException, ) from homeassistant.config_entries import ConfigEntry @@ -104,10 +105,10 @@ class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): return False -class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]): +class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[LatestRelease]): """IronOS coordinator for retrieving update information from github.""" - def __init__(self, hass: HomeAssistant, github: GitHubAPI) -> None: + def __init__(self, hass: HomeAssistant, github: IronOSUpdate) -> None: """Initialize IronOS coordinator.""" super().__init__( hass, @@ -118,21 +119,13 @@ class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]) ) self.github = github - async def _async_update_data(self) -> GitHubReleaseModel: + async def _async_update_data(self) -> LatestRelease: """Fetch data from Github.""" try: - release = await self.github.repos.releases.latest("Ralim/IronOS") - - except GitHubException as e: - raise UpdateFailed( - "Failed to retrieve latest release data from Github" - ) from e - - if TYPE_CHECKING: - assert release.data - - return release.data + return await self.github.latest_release() + except UpdateException as e: + raise UpdateFailed("Failed to check for latest IronOS update") from e class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]): diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 982fae16cc4..8556d1e3609 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -12,6 +12,6 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", - "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==2.1.0", "aiogithubapi==24.6.0"] + "loggers": ["pynecil"], + "requirements": ["pynecil==2.1.0"] } diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index b793af1815f..a379e7965b3 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -81,4 +81,4 @@ rules: inject-websession: status: exempt comment: Device doesn't make http requests. - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index 6daf54a8eb7..e76bc97585c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2465,6 +2465,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.iron_os.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.islamic_prayer_times.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 237b57a1438..9cdc7021f53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -252,7 +252,6 @@ aioflo==2021.11.0 aioftp==0.21.3 # homeassistant.components.github -# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 613f9793cf3..70b6674edc8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -237,7 +237,6 @@ aioesphomeapi==28.0.0 aioflo==2021.11.0 # homeassistant.components.github -# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index eda9c2c5d1d..9091694e6a5 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -7,6 +7,7 @@ from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak from pynecil import ( DeviceInfoResponse, + LatestRelease, LiveDataResponse, OperatingMode, PowerSource, @@ -114,24 +115,20 @@ def mock_ble_device() -> Generator[MagicMock]: @pytest.fixture(autouse=True) -def mock_githubapi() -> Generator[AsyncMock]: - """Mock aiogithubapi.""" +def mock_ironosupdate() -> Generator[AsyncMock]: + """Mock IronOSUpdate.""" with patch( - "homeassistant.components.iron_os.GitHubAPI", + "homeassistant.components.iron_os.IronOSUpdate", autospec=True, ) as mock_client: client = mock_client.return_value - client.repos.releases.latest = AsyncMock() - - client.repos.releases.latest.return_value.data.html_url = ( - "https://github.com/Ralim/IronOS/releases/tag/v2.22" + client.latest_release.return_value = LatestRelease( + html_url="https://github.com/Ralim/IronOS/releases/tag/v2.22", + name="V2.22 | TS101 & S60 Added | PinecilV2 improved", + tag_name="v2.22", + body="**RELEASE_NOTES**", ) - client.repos.releases.latest.return_value.data.name = ( - "V2.22 | TS101 & S60 Added | PinecilV2 improved" - ) - client.repos.releases.latest.return_value.data.tag_name = "v2.22" - client.repos.releases.latest.return_value.data.body = "**RELEASE_NOTES**" yield client diff --git a/tests/components/iron_os/test_update.py b/tests/components/iron_os/test_update.py index 7a2650ba7a3..47f3197da0e 100644 --- a/tests/components/iron_os/test_update.py +++ b/tests/components/iron_os/test_update.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch -from aiogithubapi import GitHubException +from pynecil import UpdateException import pytest from syrupy.assertion import SnapshotAssertion @@ -26,7 +26,7 @@ async def update_only() -> AsyncGenerator[None]: yield -@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_githubapi") +@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_ironosupdate") async def test_update( hass: HomeAssistant, config_entry: MockConfigEntry, @@ -60,11 +60,11 @@ async def test_update( async def test_update_unavailable( hass: HomeAssistant, config_entry: MockConfigEntry, - mock_githubapi: AsyncMock, + mock_ironosupdate: AsyncMock, ) -> None: """Test update entity unavailable on error.""" - mock_githubapi.repos.releases.latest.side_effect = GitHubException + mock_ironosupdate.latest_release.side_effect = UpdateException config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) From 14a61d94e2fb3bcf8e5661ec6bfa9a0b94a3a905 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:49:23 +0100 Subject: [PATCH 0675/1198] Use entry.runtime_data in fibaro (#133235) --- homeassistant/components/fibaro/__init__.py | 16 ++++++++-------- homeassistant/components/fibaro/binary_sensor.py | 8 +++----- homeassistant/components/fibaro/climate.py | 8 +++----- homeassistant/components/fibaro/cover.py | 8 +++----- homeassistant/components/fibaro/event.py | 8 +++----- homeassistant/components/fibaro/light.py | 8 +++----- homeassistant/components/fibaro/lock.py | 8 +++----- homeassistant/components/fibaro/scene.py | 7 +++---- homeassistant/components/fibaro/sensor.py | 8 +++----- homeassistant/components/fibaro/switch.py | 8 +++----- 10 files changed, 35 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 18b9f46eb20..8ede0169482 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -28,8 +28,9 @@ from homeassistant.util import slugify from .const import CONF_IMPORT_PLUGINS, DOMAIN -_LOGGER = logging.getLogger(__name__) +type FibaroConfigEntry = ConfigEntry[FibaroController] +_LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -381,7 +382,7 @@ def init_controller(data: Mapping[str, Any]) -> FibaroController: return controller -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Set up the Fibaro Component. The unique id of the config entry is the serial number of the home center. @@ -395,7 +396,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except FibaroAuthFailed as auth_ex: raise ConfigEntryAuthFailed from auth_ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = controller + entry.runtime_data = controller # register the hub device info separately as the hub has sometimes no entities device_registry = dr.async_get(hass) @@ -417,25 +418,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Unload a config entry.""" _LOGGER.debug("Shutting down Fibaro connection") unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - hass.data[DOMAIN][entry.entry_id].disable_state_handler() - hass.data[DOMAIN].pop(entry.entry_id) + entry.runtime_data.disable_state_handler() return unload_ok async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry + hass: HomeAssistant, config_entry: FibaroConfigEntry, device_entry: DeviceEntry ) -> bool: """Remove a device entry from fibaro integration. Only removing devices which are not present anymore are eligible to be removed. """ - controller: FibaroController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data for identifiers in controller.get_all_device_identifiers(): if device_entry.identifiers == identifiers: # Fibaro device is still served by the controller, diff --git a/homeassistant/components/fibaro/binary_sensor.py b/homeassistant/components/fibaro/binary_sensor.py index 9f3efbfb514..16e79c0c1d0 100644 --- a/homeassistant/components/fibaro/binary_sensor.py +++ b/homeassistant/components/fibaro/binary_sensor.py @@ -12,13 +12,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity SENSOR_TYPES = { @@ -43,11 +41,11 @@ SENSOR_TYPES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroBinarySensor(device) diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index d5605e71c73..45f700026a0 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -17,13 +17,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity PRESET_RESUME = "resume" @@ -111,11 +109,11 @@ OP_MODE_ACTIONS = ("setMode", "setOperatingMode", "setThermostatMode") async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroThermostat(device) diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index 0898d1c9318..bfebbf87bd2 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -13,23 +13,21 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro covers.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroCover(device) for device in controller.fibaro_devices[Platform.COVER]], True, diff --git a/homeassistant/components/fibaro/event.py b/homeassistant/components/fibaro/event.py index c964ab283c1..a2d5da7f877 100644 --- a/homeassistant/components/fibaro/event.py +++ b/homeassistant/components/fibaro/event.py @@ -10,23 +10,21 @@ from homeassistant.components.event import ( EventDeviceClass, EventEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro event entities.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data # Each scene event represents a button on a device async_add_entities( diff --git a/homeassistant/components/fibaro/light.py b/homeassistant/components/fibaro/light.py index 18f86b6df7d..d40e26244f3 100644 --- a/homeassistant/components/fibaro/light.py +++ b/homeassistant/components/fibaro/light.py @@ -17,13 +17,11 @@ from homeassistant.components.light import ( brightness_supported, color_supported, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity PARALLEL_UPDATES = 2 @@ -52,11 +50,11 @@ def scaleto99(value: int | None) -> int: async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLight(device) for device in controller.fibaro_devices[Platform.LIGHT]], True, diff --git a/homeassistant/components/fibaro/lock.py b/homeassistant/components/fibaro/lock.py index 55583d2a967..62a9dfa43b1 100644 --- a/homeassistant/components/fibaro/lock.py +++ b/homeassistant/components/fibaro/lock.py @@ -7,23 +7,21 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.lock import ENTITY_ID_FORMAT, LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro locks.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLock(device) for device in controller.fibaro_devices[Platform.LOCK]], True, diff --git a/homeassistant/components/fibaro/scene.py b/homeassistant/components/fibaro/scene.py index a40a1ef5b57..a4c0f1bd7f1 100644 --- a/homeassistant/components/fibaro/scene.py +++ b/homeassistant/components/fibaro/scene.py @@ -7,23 +7,22 @@ from typing import Any from pyfibaro.fibaro_scene import SceneModel from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import slugify -from . import FibaroController +from . import FibaroConfigEntry, FibaroController from .const import DOMAIN async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro scenes.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroScene(scene, controller) for scene in controller.read_scenes()], True, diff --git a/homeassistant/components/fibaro/sensor.py b/homeassistant/components/fibaro/sensor.py index da94cde9ead..245a0d087d8 100644 --- a/homeassistant/components/fibaro/sensor.py +++ b/homeassistant/components/fibaro/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, @@ -27,8 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import convert -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity # List of known sensors which represents a fibaro device @@ -103,12 +101,12 @@ FIBARO_TO_HASS_UNIT: dict[str, str] = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data entities: list[SensorEntity] = [ FibaroSensor(device, MAIN_SENSOR_TYPES.get(device.type)) for device in controller.fibaro_devices[Platform.SENSOR] diff --git a/homeassistant/components/fibaro/switch.py b/homeassistant/components/fibaro/switch.py index 1ad933f5d20..f67683dff6a 100644 --- a/homeassistant/components/fibaro/switch.py +++ b/homeassistant/components/fibaro/switch.py @@ -7,23 +7,21 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro switches.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroSwitch(device) for device in controller.fibaro_devices[Platform.SWITCH]], True, From 73cb3fa88dda485ca38746c3569df3ada3e7821e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 11:55:33 +0100 Subject: [PATCH 0676/1198] Fix lingering mqtt device_trigger unload entry test (#133202) --- tests/components/mqtt/test_device_trigger.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 009a0315029..5cdfb14a5cf 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -2,6 +2,7 @@ import json from typing import Any +from unittest.mock import patch import pytest from pytest_unordered import unordered @@ -1692,14 +1693,19 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 -@pytest.mark.usefixtures("mqtt_mock") +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) async def test_unload_entry( hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test unloading the MQTT entry.""" + await mqtt_mock_entry() data1 = ( '{ "automation_type":"trigger",' ' "device":{"identifiers":["0AFFD2"]},' @@ -1733,6 +1739,7 @@ async def test_unload_entry( ] }, ) + await hass.async_block_till_done() # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") From ebc8ca8419c534795afff15f2d184d3d14176b2e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 12:10:54 +0100 Subject: [PATCH 0677/1198] Replace "this" with "a" to fix Install Update action description (#133210) --- homeassistant/components/update/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/update/strings.json b/homeassistant/components/update/strings.json index eb6db257bb2..5194965cf69 100644 --- a/homeassistant/components/update/strings.json +++ b/homeassistant/components/update/strings.json @@ -56,7 +56,7 @@ "services": { "install": { "name": "Install update", - "description": "Installs an update for this device or service.", + "description": "Installs an update for a device or service.", "fields": { "version": { "name": "Version", @@ -64,7 +64,7 @@ }, "backup": { "name": "Backup", - "description": "If supported by the integration, this creates a backup before starting the update ." + "description": "If supported by the integration, this creates a backup before starting the update." } } }, From 8953ac13574eea3655409cdc6d8d638d152e2558 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:16:10 +0100 Subject: [PATCH 0678/1198] Improve BMW translations (#133236) --- .../components/bmw_connected_drive/button.py | 9 +++-- .../bmw_connected_drive/coordinator.py | 29 ++++++++++++--- .../bmw_connected_drive/device_tracker.py | 4 +- .../components/bmw_connected_drive/lock.py | 14 +++++-- .../components/bmw_connected_drive/notify.py | 10 +++-- .../components/bmw_connected_drive/number.py | 8 +++- .../components/bmw_connected_drive/select.py | 8 +++- .../bmw_connected_drive/strings.json | 27 +++++++++++++- .../components/bmw_connected_drive/switch.py | 16 +++++--- .../bmw_connected_drive/__init__.py | 5 +++ .../bmw_connected_drive/test_button.py | 12 ++++-- .../bmw_connected_drive/test_lock.py | 11 ++++-- .../bmw_connected_drive/test_notify.py | 19 ++++++---- .../bmw_connected_drive/test_number.py | 37 +++++++++++++++---- .../bmw_connected_drive/test_select.py | 37 +++++++++++++++---- .../bmw_connected_drive/test_switch.py | 27 ++++++++++---- 16 files changed, 209 insertions(+), 64 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/button.py b/homeassistant/components/bmw_connected_drive/button.py index 1b3043a2dcb..a7c31d0ef79 100644 --- a/homeassistant/components/bmw_connected_drive/button.py +++ b/homeassistant/components/bmw_connected_drive/button.py @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .entity import BMWBaseEntity if TYPE_CHECKING: @@ -55,7 +55,6 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = ( BMWButtonEntityDescription( key="deactivate_air_conditioning", translation_key="deactivate_air_conditioning", - name="Deactivate air conditioning", remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(), is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled, ), @@ -111,6 +110,10 @@ class BMWButton(BMWBaseEntity, ButtonEntity): try: await self.entity_description.remote_function(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 3828a827e68..815bf3393e4 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -22,7 +22,13 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util.ssl import get_default_context -from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS +from .const import ( + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, + DOMAIN as BMW_DOMAIN, + SCAN_INTERVALS, +) _LOGGER = logging.getLogger(__name__) @@ -57,7 +63,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): hass, _LOGGER, config_entry=config_entry, - name=f"{DOMAIN}-{config_entry.data[CONF_USERNAME]}", + name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}", update_interval=timedelta( seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]] ), @@ -75,18 +81,29 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): except MyBMWCaptchaMissingError as err: # If a captcha is required (user/password login flow), always trigger the reauth flow raise ConfigEntryAuthFailed( - translation_domain=DOMAIN, + translation_domain=BMW_DOMAIN, translation_key="missing_captcha", ) from err except MyBMWAuthError as err: # Allow one retry interval before raising AuthFailed to avoid flaky API issues if self.last_update_success: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err # Clear refresh token and trigger reauth if previous update failed as well self._update_config_entry_refresh_token(None) - raise ConfigEntryAuthFailed(err) from err + raise ConfigEntryAuthFailed( + translation_domain=BMW_DOMAIN, + translation_key="invalid_auth", + ) from err except (MyBMWAPIError, RequestError) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err if self.account.refresh_token != old_refresh_token: self._update_config_entry_refresh_token(self.account.refresh_token) diff --git a/homeassistant/components/bmw_connected_drive/device_tracker.py b/homeassistant/components/bmw_connected_drive/device_tracker.py index f53cd72d5de..74df8693f7a 100644 --- a/homeassistant/components/bmw_connected_drive/device_tracker.py +++ b/homeassistant/components/bmw_connected_drive/device_tracker.py @@ -49,7 +49,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): _attr_force_update = False _attr_translation_key = "car" - _attr_icon = "mdi:car" + _attr_name = None def __init__( self, @@ -58,9 +58,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): ) -> None: """Initialize the Tracker.""" super().__init__(coordinator, vehicle) - self._attr_unique_id = vehicle.vin - self._attr_name = None @property def extra_state_attributes(self) -> dict[str, Any]: diff --git a/homeassistant/components/bmw_connected_drive/lock.py b/homeassistant/components/bmw_connected_drive/lock.py index 4aa0b411895..4bec12e796b 100644 --- a/homeassistant/components/bmw_connected_drive/lock.py +++ b/homeassistant/components/bmw_connected_drive/lock.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -70,7 +70,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() @@ -90,7 +94,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/notify.py b/homeassistant/components/bmw_connected_drive/notify.py index 04b9fa594e4..dfa0939e81f 100644 --- a/homeassistant/components/bmw_connected_drive/notify.py +++ b/homeassistant/components/bmw_connected_drive/notify.py @@ -20,7 +20,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN, BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry PARALLEL_UPDATES = 1 @@ -92,7 +92,7 @@ class BMWNotificationService(BaseNotificationService): except (vol.Invalid, TypeError, ValueError) as ex: raise ServiceValidationError( - translation_domain=DOMAIN, + translation_domain=BMW_DOMAIN, translation_key="invalid_poi", translation_placeholders={ "poi_exception": str(ex), @@ -106,4 +106,8 @@ class BMWNotificationService(BaseNotificationService): try: await vehicle.remote_services.trigger_send_poi(poi) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex diff --git a/homeassistant/components/bmw_connected_drive/number.py b/homeassistant/components/bmw_connected_drive/number.py index 7181bad76e0..c6a328ecc20 100644 --- a/homeassistant/components/bmw_connected_drive/number.py +++ b/homeassistant/components/bmw_connected_drive/number.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -109,6 +109,10 @@ class BMWNumber(BMWBaseEntity, NumberEntity): try: await self.entity_description.remote_service(self.vehicle, value) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 7091cbc6817..385b45fd9fa 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -123,6 +123,10 @@ class BMWSelect(BMWBaseEntity, SelectEntity): try: await self.entity_description.remote_service(self.vehicle, option) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 93abce5d73f..edb0d5cfb12 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -2,11 +2,16 @@ "config": { "step": { "user": { - "description": "Enter your MyBMW/MINI Connected credentials.", + "description": "Connect to your MyBMW/MINI Connected account to retrieve vehicle data.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "region": "ConnectedDrive Region" + }, + "data_description": { + "username": "The email address of your MyBMW/MINI Connected account.", + "password": "The password of your MyBMW/MINI Connected account.", + "region": "The region of your MyBMW/MINI Connected account." } }, "captcha": { @@ -23,6 +28,9 @@ "description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::bmw_connected_drive::config::step::user::data_description::password%]" } } }, @@ -41,7 +49,10 @@ "step": { "account_options": { "data": { - "read_only": "Read-only (only sensors and notify, no execution of services, no lock)" + "read_only": "Read-only mode" + }, + "data_description": { + "read_only": "Only retrieve values and send POI data, but don't offer any services that can change the vehicle state." } } } @@ -83,6 +94,9 @@ "activate_air_conditioning": { "name": "Activate air conditioning" }, + "deactivate_air_conditioning": { + "name": "Deactivate air conditioning" + }, "find_vehicle": { "name": "Find vehicle" } @@ -220,6 +234,15 @@ }, "missing_captcha": { "message": "Login requires captcha validation" + }, + "invalid_auth": { + "message": "[%key:common::config_flow::error::invalid_auth%]" + }, + "remote_service_error": { + "message": "Error executing remote service on vehicle. {exception}" + }, + "update_failed": { + "message": "Error updating vehicle data. {exception}" } } } diff --git a/homeassistant/components/bmw_connected_drive/switch.py b/homeassistant/components/bmw_connected_drive/switch.py index 826f6b840b2..600ad41165a 100644 --- a/homeassistant/components/bmw_connected_drive/switch.py +++ b/homeassistant/components/bmw_connected_drive/switch.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -111,8 +111,11 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_on(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() async def async_turn_off(self, **kwargs: Any) -> None: @@ -120,6 +123,9 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_off(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index f490b854749..c437e1d3669 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -48,6 +48,11 @@ FIXTURE_CONFIG_ENTRY = { "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_USERNAME]}", } +REMOTE_SERVICE_EXC_REASON = "HTTPStatusError: 502 Bad Gateway" +REMOTE_SERVICE_EXC_TRANSLATION = ( + "Error executing remote service on vehicle. HTTPStatusError: 502 Bad Gateway" +) + async def setup_mocked_integration(hass: HomeAssistant) -> MockConfigEntry: """Mock a fully setup config entry and all components based on fixtures.""" diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 88c7990cde9..356cfcb439e 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -13,7 +13,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -81,11 +85,13 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock( + side_effect=MyBMWRemoteServiceError("HTTPStatusError: 502 Bad Gateway") + ), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "button", "press", diff --git a/tests/components/bmw_connected_drive/test_lock.py b/tests/components/bmw_connected_drive/test_lock.py index 2fa694d426b..088534c79f5 100644 --- a/tests/components/bmw_connected_drive/test_lock.py +++ b/tests/components/bmw_connected_drive/test_lock.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform from tests.components.recorder.common import async_wait_recording_done @@ -118,11 +123,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock(side_effect=MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON)), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "lock", service, diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py index 4113f618be0..1bade3be011 100644 --- a/tests/components/bmw_connected_drive/test_notify.py +++ b/tests/components/bmw_connected_drive/test_notify.py @@ -11,7 +11,11 @@ import respx from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) async def test_legacy_notify_service_simple( @@ -68,21 +72,21 @@ async def test_legacy_notify_service_simple( { "latitude": POI_DATA.get("lat"), }, - "Invalid data for point of interest: required key not provided @ data['longitude']", + r"Invalid data for point of interest: required key not provided @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": "text", }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": 9999, }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ], ) @@ -96,7 +100,7 @@ async def test_service_call_invalid_input( # Setup component assert await setup_mocked_integration(hass) - with pytest.raises(ServiceValidationError) as exc: + with pytest.raises(ServiceValidationError, match=exc_translation): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", @@ -106,7 +110,6 @@ async def test_service_call_invalid_input( }, blocking=True, ) - assert str(exc.value) == exc_translation @pytest.mark.usefixtures("bmw_fixture") @@ -132,11 +135,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=raised), + AsyncMock(side_effect=raised("HTTPStatusError: 502 Bad Gateway")), ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", diff --git a/tests/components/bmw_connected_drive/test_number.py b/tests/components/bmw_connected_drive/test_number.py index f2a50ce4df6..733f4fe3113 100644 --- a/tests/components/bmw_connected_drive/test_number.py +++ b/tests/components/bmw_connected_drive/test_number.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -89,7 +94,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ): await hass.services.async_call( "number", "set_value", @@ -102,17 +110,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ValueError( + "Target SoC must be an integer between 20 and 100 that is a multiple of 5." + ), + ValueError, + "Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -130,7 +153,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "number", "set_value", diff --git a/tests/components/bmw_connected_drive/test_select.py b/tests/components/bmw_connected_drive/test_select.py index a270f38ee01..53c39f572f2 100644 --- a/tests/components/bmw_connected_drive/test_select.py +++ b/tests/components/bmw_connected_drive/test_select.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.translation import async_get_translations -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -105,7 +110,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ServiceValidationError): + with pytest.raises( + ServiceValidationError, + match=f"Option {value} is not valid for entity {entity_id}", + ): await hass.services.async_call( "select", "select_option", @@ -118,17 +126,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ServiceValidationError, ServiceValidationError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ServiceValidationError( + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit" + ), + ServiceValidationError, + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -146,7 +169,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "select", "select_option", diff --git a/tests/components/bmw_connected_drive/test_switch.py b/tests/components/bmw_connected_drive/test_switch.py index 58bddbfc937..c28b651abaf 100644 --- a/tests/components/bmw_connected_drive/test_switch.py +++ b/tests/components/bmw_connected_drive/test_switch.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -75,17 +80,25 @@ async def test_service_call_success( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -107,7 +120,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_on", @@ -122,7 +135,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_off", From d1e466e6150f9890547ab9afa3708163105a165f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:19:25 +0100 Subject: [PATCH 0679/1198] Update elevenlabs to 1.9.0 (#133264) --- homeassistant/components/elevenlabs/__init__.py | 3 +-- homeassistant/components/elevenlabs/config_flow.py | 2 +- homeassistant/components/elevenlabs/manifest.json | 2 +- homeassistant/components/elevenlabs/tts.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/elevenlabs/conftest.py | 2 +- 7 files changed, 7 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index 7da4802e98a..db7a7f64c97 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -4,8 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from elevenlabs import Model -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs, Model from elevenlabs.core import ApiError from homeassistant.config_entries import ConfigEntry diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 227150a0f4e..55cdd3ea944 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError import voluptuous as vol diff --git a/homeassistant/components/elevenlabs/manifest.json b/homeassistant/components/elevenlabs/manifest.json index 968ea7b688a..eb6df09149a 100644 --- a/homeassistant/components/elevenlabs/manifest.json +++ b/homeassistant/components/elevenlabs/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["elevenlabs"], - "requirements": ["elevenlabs==1.6.1"] + "requirements": ["elevenlabs==1.9.0"] } diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index efc2154882a..8b016b6af8b 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -6,7 +6,7 @@ import logging from types import MappingProxyType from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError from elevenlabs.types import Model, Voice as ElevenLabsVoice, VoiceSettings diff --git a/requirements_all.txt b/requirements_all.txt index 9cdc7021f53..011fedd5a5f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -815,7 +815,7 @@ eheimdigital==1.0.3 electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70b6674edc8..0f94266313c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -693,7 +693,7 @@ eheimdigital==1.0.3 electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index c4d9a87b5ad..c9ed49ba13c 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -31,7 +31,7 @@ def mock_async_client() -> Generator[AsyncMock]: client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) client_mock.models.get_all.return_value = MOCK_MODELS with patch( - "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + "elevenlabs.AsyncElevenLabs", return_value=client_mock ) as mock_async_client: yield mock_async_client From 85ef2c0fb17f85e69e8272853114c97b0af7d6e8 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 03:19:57 -0800 Subject: [PATCH 0680/1198] Mark Google Tasks action-exceptions quality scale as done (#133253) --- homeassistant/components/google_tasks/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index b4159b30145..94c81d0b7f8 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -39,7 +39,7 @@ rules: reauthentication-flow: status: todo comment: Missing a test that reauthenticates with the wrong account - action-exceptions: todo + action-exceptions: done docs-installation-parameters: todo integration-owner: done parallel-updates: todo From 760c3ac98ce8bdcab3ffee3d8ba49c971081c4b4 Mon Sep 17 00:00:00 2001 From: Claudio Ruggeri - CR-Tech <41435902+crug80@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:24:27 +0100 Subject: [PATCH 0681/1198] Bump pymodbus version 3.7.4 (#133175) Co-authored-by: Joost Lekkerkerker --- .../components/modbus/binary_sensor.py | 2 +- homeassistant/components/modbus/manifest.json | 2 +- homeassistant/components/modbus/modbus.py | 19 +++++++++---------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/modbus/test_init.py | 4 +--- 6 files changed, 14 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index b50d21faf42..97ade53762b 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -121,7 +121,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): else: self._attr_available = True if self._input_type in (CALL_TYPE_COIL, CALL_TYPE_DISCRETE): - self._result = result.bits + self._result = [int(bit) for bit in result.bits] else: self._result = result.registers self._attr_is_on = bool(self._result[0] & 1) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 7cba4692eb6..fc25a329c11 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/modbus", "iot_class": "local_polling", "loggers": ["pymodbus"], - "requirements": ["pymodbus==3.6.9"] + "requirements": ["pymodbus==3.7.4"] } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 18d91f8dd3b..efce44d7979 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -14,8 +14,8 @@ from pymodbus.client import ( AsyncModbusUdpClient, ) from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ModbusResponse -from pymodbus.transaction import ModbusAsciiFramer, ModbusRtuFramer, ModbusSocketFramer +from pymodbus.framer import FramerType +from pymodbus.pdu import ModbusPDU import voluptuous as vol from homeassistant.const import ( @@ -265,14 +265,13 @@ class ModbusHub: "port": client_config[CONF_PORT], "timeout": client_config[CONF_TIMEOUT], "retries": 3, - "retry_on_empty": True, } if self._config_type == SERIAL: # serial configuration if client_config[CONF_METHOD] == "ascii": - self._pb_params["framer"] = ModbusAsciiFramer + self._pb_params["framer"] = FramerType.ASCII else: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU self._pb_params.update( { "baudrate": client_config[CONF_BAUDRATE], @@ -285,9 +284,9 @@ class ModbusHub: # network configuration self._pb_params["host"] = client_config[CONF_HOST] if self._config_type == RTUOVERTCP: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU else: - self._pb_params["framer"] = ModbusSocketFramer + self._pb_params["framer"] = FramerType.SOCKET if CONF_MSG_WAIT in client_config: self._msg_wait = client_config[CONF_MSG_WAIT] / 1000 @@ -370,12 +369,12 @@ class ModbusHub: async def low_level_pb_call( self, slave: int | None, address: int, value: int | list[int], use_call: str - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Call sync. pymodbus.""" kwargs = {"slave": slave} if slave else {} entry = self._pb_request[use_call] try: - result: ModbusResponse = await entry.func(address, value, **kwargs) + result: ModbusPDU = await entry.func(address, value, **kwargs) except ModbusException as exception_error: error = f"Error: device: {slave} address: {address} -> {exception_error!s}" self._log_error(error) @@ -403,7 +402,7 @@ class ModbusHub: address: int, value: int | list[int], use_call: str, - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Convert async to sync pymodbus call.""" if self._config_delay: return None diff --git a/requirements_all.txt b/requirements_all.txt index 011fedd5a5f..e4b9787c641 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2091,7 +2091,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f94266313c..58f6d599825 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1696,7 +1696,7 @@ pymicro-vad==1.0.1 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 3b8a76f5606..0cfa7ba8b24 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -19,7 +19,7 @@ from unittest import mock from freezegun.api import FrozenDateTimeFactory from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ExceptionResponse, IllegalFunctionRequest +from pymodbus.pdu import ExceptionResponse import pytest import voluptuous as vol @@ -820,7 +820,6 @@ SERVICE = "service" [ {VALUE: ReadResult([0x0001]), DATA: ""}, {VALUE: ExceptionResponse(0x06), DATA: "Pymodbus:"}, - {VALUE: IllegalFunctionRequest(0x06), DATA: "Pymodbus:"}, {VALUE: ModbusException("fail write_"), DATA: "Pymodbus:"}, ], ) @@ -928,7 +927,6 @@ async def mock_modbus_read_pymodbus_fixture( ("do_return", "do_exception", "do_expect_state", "do_expect_value"), [ (ReadResult([1]), None, STATE_ON, "1"), - (IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), (ExceptionResponse(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), ( ReadResult([1]), From aa4b64386e462ef5379bee1480f30d3d899d3125 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 12:25:35 +0100 Subject: [PATCH 0682/1198] Don't update existing Fronius config entries from config flow (#132886) --- homeassistant/components/fronius/__init__.py | 2 +- .../components/fronius/config_flow.py | 2 +- tests/components/fronius/test_config_flow.py | 34 ++++++++----------- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/fronius/__init__.py b/homeassistant/components/fronius/__init__.py index 03d80e3b2d9..4ba893df85c 100644 --- a/homeassistant/components/fronius/__init__.py +++ b/homeassistant/components/fronius/__init__.py @@ -60,7 +60,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, config_entry: FroniusConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove a config entry from a device.""" return True diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index 53433e31233..ccc15d80401 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -87,7 +87,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: await self.async_set_unique_id(unique_id, raise_on_progress=False) - self._abort_if_unique_id_configured(updates=dict(info)) + self._abort_if_unique_id_configured() return self.async_create_entry(title=create_title(info), data=info) diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 5d0b93e7cd5..ed90e266b81 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -205,10 +205,10 @@ async def test_form_already_existing(hass: HomeAssistant) -> None: assert result2["reason"] == "already_configured" -async def test_form_updates_host( +async def test_config_flow_already_configured( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test existing entry gets updated.""" + """Test existing entry doesn't get updated by config flow.""" old_host = "http://10.1.0.1" new_host = "http://10.1.0.2" entry = MockConfigEntry( @@ -231,26 +231,20 @@ async def test_form_updates_host( ) mock_responses(aioclient_mock, host=new_host) - with patch( - "homeassistant.components.fronius.async_unload_entry", - return_value=True, - ) as mock_unload_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": new_host, - }, - ) - await hass.async_block_till_done() - + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": new_host, + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - mock_unload_entry.assert_called_with(hass, entry) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].data == { - "host": new_host, + "host": old_host, # not updated from config flow - only from reconfigure flow "is_logger": True, } @@ -326,11 +320,13 @@ async def test_dhcp_invalid( async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfiguring an entry.""" + old_host = "http://10.1.0.1" + new_host = "http://10.1.0.2" entry = MockConfigEntry( domain=DOMAIN, unique_id="1234567", data={ - CONF_HOST: "10.1.2.3", + CONF_HOST: old_host, "is_logger": True, }, ) @@ -357,7 +353,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ - "host": "10.9.1.1", + "host": new_host, }, ) await hass.async_block_till_done() @@ -365,7 +361,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert entry.data == { - "host": "10.9.1.1", + "host": new_host, "is_logger": False, } assert len(mock_setup_entry.mock_calls) == 1 From 74e4654c26177909e653921f27f838fd1366adc0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 15 Dec 2024 12:28:32 +0100 Subject: [PATCH 0683/1198] Revert "Improve recorder history queries (#131702)" (#133203) --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 - .../components/recorder/history/legacy.py | 18 ++++++----- .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 -- homeassistant/components/recorder/queries.py | 9 ------ .../recorder/table_managers/states.py | 32 ------------------- homeassistant/components/recorder/tasks.py | 2 ++ tests/components/recorder/test_purge.py | 17 ---------- 11 files changed, 38 insertions(+), 102 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 7241e1fac9a..365be06fd2d 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after CONF_ORDER = "use_include_order" @@ -107,10 +107,7 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index 2010b7373ff..bd477e7e4ed 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,6 +6,7 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -25,10 +26,8 @@ def entities_may_have_state_changes_after( return False -def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has states as old or older than run_time. - - Returns True if there may be such states. - """ - oldest_ts = get_instance(hass).states_manager.oldest_ts - return oldest_ts is not None and run_time.timestamp() >= oldest_ts +def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has any runs after a specific time.""" + return run_time >= process_timestamp( + get_instance(hass).recorder_runs_manager.first.start + ) diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index 35f8ed5f1ac..c85d975c3c9 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after _LOGGER = logging.getLogger(__name__) @@ -142,10 +142,7 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index a3163d5b396..76cf0a7c05e 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1430,7 +1430,6 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) - self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index dc49ebb9768..da90b296fe3 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import StateAttributes, States +from ..db_schema import RecorderRuns, StateAttributes, States from ..filters import Filters -from ..models import process_timestamp_to_utc_isoformat +from ..models import process_timestamp, process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start_ts: float, + run_start: datetime, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,6 +447,7 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. + run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += lambda q: q.join( ( @@ -482,7 +483,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - *, + run: RecorderRuns | None = None, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -494,16 +495,17 @@ def _get_rows_with_session( ), ) - oldest_ts = get_instance(hass).states_manager.oldest_ts + if run is None: + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): - # We don't have any states for the requested time + if run is None or process_timestamp(run.start) > utc_point_in_time: + # History did not run before utc_point_in_time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - oldest_ts, utc_point_in_time, entity_ids, no_attributes + run.start, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 01551de1f28..9159bbc6181 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -34,6 +34,7 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, + process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -245,9 +246,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -263,7 +264,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - oldest_ts, + run_start_ts, ), track_on=[ bool(single_metadata_id), @@ -410,9 +411,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -425,7 +426,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - oldest_ts, + run_start_ts, has_last_reported, ), track_on=[ @@ -599,17 +600,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_oldest_possible_ts( +def _get_run_start_ts_for_utc_point_in_time( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the oldest possible timestamp. - - Returns None if there are no states as old as utc_point_in_time. - """ - - oldest_ts = get_instance(hass).states_manager.oldest_ts - if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): - return oldest_ts + """Return the start time of a run.""" + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + if ( + run is not None + and (run_start := process_timestamp(run.start)) < utc_point_in_time + ): + return run_start.timestamp() + # History did not run before utc_point_in_time but we still return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 11f5accc978..eb67300e8d4 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -122,9 +122,6 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) - with session_scope(session=instance.get_session(), read_only=True) as session: - instance.recorder_runs_manager.load_from_db(session) - instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 8ca7bef2691..2e4b588a0b0 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,15 +637,6 @@ def find_states_to_purge( ) -def find_oldest_state() -> StatementLambdaElement: - """Find the last_updated_ts of the oldest state.""" - return lambda_stmt( - lambda: select(States.last_updated_ts).where( - States.state_id.in_(select(func.min(States.state_id))) - ) - ) - - def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index fafcfa0ea61..d5cef759c54 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,15 +2,7 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import Any, cast - -from sqlalchemy.engine.row import Row -from sqlalchemy.orm.session import Session - from ..db_schema import States -from ..queries import find_oldest_state -from ..util import execute_stmt_lambda_element class StatesManager: @@ -21,12 +13,6 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} - self._oldest_ts: float | None = None - - @property - def oldest_ts(self) -> float | None: - """Return the oldest timestamp.""" - return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -58,8 +44,6 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state - if self._oldest_ts is None: - self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -90,22 +74,6 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() - self._oldest_ts = None - - def load_from_db(self, session: Session) -> None: - """Update the cache. - - Must run in the recorder thread. - """ - result = cast( - Sequence[Row[Any]], - execute_stmt_lambda_element(session, find_oldest_state()), - ) - if not result: - ts = None - else: - ts = result[0].last_updated_ts - self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index fa10c12aa68..783f0a80b8e 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,6 +120,8 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): + with instance.get_session() as session: + instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index c3ff5027b70..ea764b14401 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,9 +112,6 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - assert recorder_mock.states_manager.oldest_ts is None - oldest_ts = recorder_mock.states_manager.oldest_ts - await _add_test_states(hass) # make sure we start with 6 states @@ -130,10 +127,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -147,8 +140,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -171,8 +162,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished - # states_manager.oldest_ts should now be updated - assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -180,10 +169,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -196,8 +181,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From 16ad2d52c7bd9ece9a202f236644d92fc0cbe013 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 13:07:10 +0100 Subject: [PATCH 0684/1198] Improve MQTT json color_temp validation (#133174) * Improve MQTT json color_temp validation * Revert unrelated changes and assert on logs * Typo --- homeassistant/components/mqtt/light/schema_json.py | 2 +- tests/components/mqtt/test_light_json.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 5901967610a..5880a684ec0 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -490,7 +490,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): ) except KeyError: pass - except ValueError: + except (TypeError, ValueError): _LOGGER.warning( "Invalid color temp value '%s' received for entity %s", values["color_temp"], diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index b1031bec342..c6032678a47 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -2185,7 +2185,9 @@ async def test_white_scale( ], ) async def test_invalid_values( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that invalid color/brightness/etc. values are ignored.""" await mqtt_mock_entry() @@ -2287,6 +2289,10 @@ async def test_invalid_values( async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "color_temp": "badValue"}' ) + assert ( + "Invalid color temp value 'badValue' received for entity light.test" + in caplog.text + ) # Color temperature should not have changed state = hass.states.get("light.test") From c2ee020eee3dde7c532124b74dd9891cb07d6ae1 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sun, 15 Dec 2024 13:14:32 +0100 Subject: [PATCH 0685/1198] Update quality scale documentation rules in IronOS integration (#133245) --- .../components/iron_os/quality_scale.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index a379e7965b3..5ede3d6971d 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -12,9 +12,9 @@ rules: docs-actions: status: done comment: Integration does register actions aside from entity actions - docs-high-level-description: todo - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: status: exempt comment: Integration does not register events. @@ -52,13 +52,13 @@ rules: status: exempt comment: Device is not connected to an ip network. Other information from discovery is immutable and does not require updating. discovery: done - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done docs-supported-devices: done docs-supported-functions: done - docs-troubleshooting: todo - docs-use-cases: todo + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: Only one device per config entry. New devices are set up as new entries. From b13a54f605dbf1c1c164d2e9140de81e4ad0ead7 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sun, 15 Dec 2024 13:22:21 +0000 Subject: [PATCH 0686/1198] Add button platform to Ohme (#133267) * Add button platform and reauth flow * CI fixes * Test comment change * Remove reauth from this PR * Move is_supported_fn to OhmeEntityDescription * Set parallel updates to 1 * Add coordinator refresh to button press * Add exception handling to button async_press --- homeassistant/components/ohme/button.py | 77 ++++++++++++++++++ homeassistant/components/ohme/const.py | 2 +- homeassistant/components/ohme/entity.py | 12 +++ homeassistant/components/ohme/icons.json | 5 ++ .../components/ohme/quality_scale.yaml | 5 +- homeassistant/components/ohme/sensor.py | 5 +- homeassistant/components/ohme/strings.json | 5 ++ .../ohme/snapshots/test_button.ambr | 47 +++++++++++ tests/components/ohme/test_button.py | 79 +++++++++++++++++++ 9 files changed, 229 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/ohme/button.py create mode 100644 tests/components/ohme/snapshots/test_button.ambr create mode 100644 tests/components/ohme/test_button.py diff --git a/homeassistant/components/ohme/button.py b/homeassistant/components/ohme/button.py new file mode 100644 index 00000000000..21792770bb4 --- /dev/null +++ b/homeassistant/components/ohme/button.py @@ -0,0 +1,77 @@ +"""Platform for button.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from ohme import ApiException, ChargerStatus, OhmeApiClient + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .const import DOMAIN +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription): + """Class describing Ohme button entities.""" + + press_fn: Callable[[OhmeApiClient], Awaitable[None]] + available_fn: Callable[[OhmeApiClient], bool] + + +BUTTON_DESCRIPTIONS = [ + OhmeButtonDescription( + key="approve", + translation_key="approve", + press_fn=lambda client: client.async_approve_charge(), + is_supported_fn=lambda client: client.is_capable("pluginsRequireApprovalMode"), + available_fn=lambda client: client.status is ChargerStatus.PENDING_APPROVAL, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up buttons.""" + coordinator = config_entry.runtime_data.charge_session_coordinator + + async_add_entities( + OhmeButton(coordinator, description) + for description in BUTTON_DESCRIPTIONS + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeButton(OhmeEntity, ButtonEntity): + """Generic button for Ohme.""" + + entity_description: OhmeButtonDescription + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.entity_description.press_fn(self.coordinator.client) + except ApiException as e: + raise HomeAssistantError( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + await self.coordinator.async_request_refresh() + + @property + def available(self) -> bool: + """Is entity available.""" + + return super().available and self.entity_description.available_fn( + self.coordinator.client + ) diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py index adc5ddfd61b..b44262ad509 100644 --- a/homeassistant/components/ohme/const.py +++ b/homeassistant/components/ohme/const.py @@ -3,4 +3,4 @@ from homeassistant.const import Platform DOMAIN = "ohme" -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.BUTTON, Platform.SENSOR] diff --git a/homeassistant/components/ohme/entity.py b/homeassistant/components/ohme/entity.py index 2c662f7fccb..6a7d0ea16e4 100644 --- a/homeassistant/components/ohme/entity.py +++ b/homeassistant/components/ohme/entity.py @@ -1,5 +1,10 @@ """Base class for entities.""" +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import OhmeApiClient + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -8,6 +13,13 @@ from .const import DOMAIN from .coordinator import OhmeBaseCoordinator +@dataclass(frozen=True) +class OhmeEntityDescription(EntityDescription): + """Class describing Ohme entities.""" + + is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True + + class OhmeEntity(CoordinatorEntity[OhmeBaseCoordinator]): """Base class for all Ohme entities.""" diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json index 228907b3dbe..d5bf3fa1187 100644 --- a/homeassistant/components/ohme/icons.json +++ b/homeassistant/components/ohme/icons.json @@ -1,5 +1,10 @@ { "entity": { + "button": { + "approve": { + "default": "mdi:check-decagram" + } + }, "sensor": { "status": { "default": "mdi:car", diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index cffc9eb7b82..15697cb11a3 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -29,10 +29,7 @@ rules: unique-config-entry: done # Silver - action-exceptions: - status: exempt - comment: | - This integration has no custom actions and read-only platform only. + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: status: exempt diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py index d4abaf85b1f..6d111cf7af6 100644 --- a/homeassistant/components/ohme/sensor.py +++ b/homeassistant/components/ohme/sensor.py @@ -18,17 +18,16 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OhmeConfigEntry -from .entity import OhmeEntity +from .entity import OhmeEntity, OhmeEntityDescription PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) -class OhmeSensorDescription(SensorEntityDescription): +class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription): """Class describing Ohme sensor entities.""" value_fn: Callable[[OhmeApiClient], str | int | float] - is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True SENSOR_CHARGE_SESSION = [ diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 06231ed5cf4..42e0a60b83e 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -22,6 +22,11 @@ } }, "entity": { + "button": { + "approve": { + "name": "Approve charge" + } + }, "sensor": { "status": { "name": "Status", diff --git a/tests/components/ohme/snapshots/test_button.ambr b/tests/components/ohme/snapshots/test_button.ambr new file mode 100644 index 00000000000..32de16208f4 --- /dev/null +++ b/tests/components/ohme/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_buttons[button.ohme_home_pro_approve_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Approve charge', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'approve', + 'unique_id': 'chargerid_approve', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.ohme_home_pro_approve_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Approve charge', + }), + 'context': , + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/ohme/test_button.py b/tests/components/ohme/test_button.py new file mode 100644 index 00000000000..1728563b2e9 --- /dev/null +++ b/tests/components/ohme/test_button.py @@ -0,0 +1,79 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ChargerStatus +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme buttons.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_available( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that button shows as unavailable when a charge is not pending approval.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNKNOWN + + mock_client.status = ChargerStatus.PLUGGED_IN + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNAVAILABLE + + +async def test_button_press( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the button press action.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.ohme_home_pro_approve_charge", + }, + blocking=True, + ) + + assert len(mock_client.async_approve_charge.mock_calls) == 1 From b4b6067e8ee3ec660b893cba734c0f83aa89d211 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 15 Dec 2024 14:41:35 +0100 Subject: [PATCH 0687/1198] Use typed BMWConfigEntry (#133272) --- homeassistant/components/bmw_connected_drive/__init__.py | 7 +++---- .../components/bmw_connected_drive/config_flow.py | 4 ++-- .../components/bmw_connected_drive/coordinator.py | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 5ec678b9c95..7b6fb4119db 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -6,7 +6,6 @@ import logging import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_NAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -50,7 +49,7 @@ SERVICE_UPDATE_STATE = "update_state" @callback def _async_migrate_options_from_data_if_missing( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BMWConfigEntry ) -> None: data = dict(entry.data) options = dict(entry.options) @@ -116,7 +115,7 @@ async def _async_migrate_entries( return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Set up BMW Connected Drive from a config entry.""" _async_migrate_options_from_data_if_missing(hass, entry) @@ -164,7 +163,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms( diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 95fec101c9d..04fb3842dfa 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -18,7 +18,6 @@ import voluptuous as vol from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, - ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, @@ -39,6 +38,7 @@ from .const import ( CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) +from .coordinator import BMWConfigEntry DATA_SCHEMA = vol.Schema( { @@ -224,7 +224,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: BMWConfigEntry, ) -> BMWOptionsFlow: """Return a MyBMW option flow.""" return BMWOptionsFlow() diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 815bf3393e4..b54d9245bbd 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -42,7 +42,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): account: MyBMWAccount config_entry: BMWConfigEntry - def __init__(self, hass: HomeAssistant, *, config_entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, *, config_entry: BMWConfigEntry) -> None: """Initialize account-wide BMW data updater.""" self.account = MyBMWAccount( config_entry.data[CONF_USERNAME], From 95babbef21296faf157f28dd4a10da4398282220 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 17:39:25 +0100 Subject: [PATCH 0688/1198] Fix two typos in KEF strings (#133294) --- homeassistant/components/kef/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/kef/strings.json b/homeassistant/components/kef/strings.json index e5ffff68162..c8aa644333a 100644 --- a/homeassistant/components/kef/strings.json +++ b/homeassistant/components/kef/strings.json @@ -22,14 +22,14 @@ }, "high_pass": { "name": "High pass", - "description": "High-pass mode\"." + "description": "High-pass mode." }, "sub_polarity": { "name": "Subwoofer polarity", "description": "Sub polarity." }, "bass_extension": { - "name": "Base extension", + "name": "Bass extension", "description": "Bass extension." } } From 51422a4502d4e63c388f9332f000f291e6d0283e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 15 Dec 2024 17:41:43 +0100 Subject: [PATCH 0689/1198] Bump pynordpool 0.2.3 (#133277) --- homeassistant/components/nordpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json index bf093eb3ee9..b3a18eb040a 100644 --- a/homeassistant/components/nordpool/manifest.json +++ b/homeassistant/components/nordpool/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pynordpool"], - "requirements": ["pynordpool==0.2.2"], + "requirements": ["pynordpool==0.2.3"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index e4b9787c641..cfa3763ce0e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2115,7 +2115,7 @@ pynetio==0.1.9.1 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.2 +pynordpool==0.2.3 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 58f6d599825..d269c63d097 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1714,7 +1714,7 @@ pynetgear==0.10.10 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.2 +pynordpool==0.2.3 # homeassistant.components.nuki pynuki==1.6.3 From 042d4cd39b77511fe76ed7de12055ae721012914 Mon Sep 17 00:00:00 2001 From: Conor Eager Date: Mon, 16 Dec 2024 05:43:21 +1300 Subject: [PATCH 0690/1198] Bump starlink-grpc-core to 1.2.1 to fix missing ping (#133183) --- homeassistant/components/starlink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/starlink/manifest.json b/homeassistant/components/starlink/manifest.json index 070cbf1b44c..15bad3ebc2e 100644 --- a/homeassistant/components/starlink/manifest.json +++ b/homeassistant/components/starlink/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/starlink", "iot_class": "local_polling", - "requirements": ["starlink-grpc-core==1.2.0"] + "requirements": ["starlink-grpc-core==1.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index cfa3763ce0e..cd2b0c04544 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2747,7 +2747,7 @@ starline==0.1.5 starlingbank==3.2 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d269c63d097..6101fe6e41e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2202,7 +2202,7 @@ srpenergy==1.3.6 starline==0.1.5 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 From f069f340a3c0215cf455b07abb43fe707316ae2b Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 08:53:36 -0800 Subject: [PATCH 0691/1198] Explicitly set `PARALLEL_UPDATES` for Google Tasks (#133296) --- homeassistant/components/google_tasks/quality_scale.yaml | 2 +- homeassistant/components/google_tasks/todo.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 94c81d0b7f8..0cecb88484f 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -42,7 +42,7 @@ rules: action-exceptions: done docs-installation-parameters: todo integration-owner: done - parallel-updates: todo + parallel-updates: done test-coverage: status: todo comment: Test coverage for __init__.py is not above 95% yet diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index d749adbfb2b..9a44b91b529 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -19,6 +19,7 @@ from homeassistant.util import dt as dt_util from .coordinator import TaskUpdateCoordinator from .types import GoogleTasksConfigEntry +PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(minutes=15) TODO_STATUS_MAP = { From 2a49378f4cb3e808bee83d959aaff9755da044cb Mon Sep 17 00:00:00 2001 From: Tomer Shemesh Date: Sun, 15 Dec 2024 12:27:17 -0500 Subject: [PATCH 0692/1198] Refactor Onkyo tests to patch underlying pyeiscp library (#132653) * Refactor Onkyo tests to patch underlying pyeiscp library instead of home assistant methods * limit test patches to specific component, move atches into conftest * use patch.multiple and restrict patches to specific component * use side effect instead of mocking method --- tests/components/onkyo/__init__.py | 10 + tests/components/onkyo/conftest.py | 68 ++++- tests/components/onkyo/test_config_flow.py | 273 +++++++++------------ 3 files changed, 179 insertions(+), 172 deletions(-) diff --git a/tests/components/onkyo/__init__.py b/tests/components/onkyo/__init__.py index 8900f189aea..064075d109e 100644 --- a/tests/components/onkyo/__init__.py +++ b/tests/components/onkyo/__init__.py @@ -19,6 +19,16 @@ def create_receiver_info(id: int) -> ReceiverInfo: ) +def create_connection(id: int) -> Mock: + """Create an mock connection object for testing.""" + connection = Mock() + connection.host = f"host {id}" + connection.port = 0 + connection.name = f"type {id}" + connection.identifier = f"id{id}" + return connection + + def create_config_entry_from_info(info: ReceiverInfo) -> MockConfigEntry: """Create a config entry from receiver info.""" data = {CONF_HOST: info.host} diff --git a/tests/components/onkyo/conftest.py b/tests/components/onkyo/conftest.py index c37966e3bae..abbe39dd966 100644 --- a/tests/components/onkyo/conftest.py +++ b/tests/components/onkyo/conftest.py @@ -1,25 +1,16 @@ """Configure tests for the Onkyo integration.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from homeassistant.components.onkyo.const import DOMAIN +from . import create_connection + from tests.common import MockConfigEntry -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.onkyo.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - @pytest.fixture(name="config_entry") def mock_config_entry() -> MockConfigEntry: """Create Onkyo entry in Home Assistant.""" @@ -28,3 +19,56 @@ def mock_config_entry() -> MockConfigEntry: title="Onkyo", data={}, ) + + +@pytest.fixture(autouse=True) +def patch_timeouts(): + """Patch timeouts to avoid tests waiting.""" + with patch.multiple( + "homeassistant.components.onkyo.receiver", + DEVICE_INTERVIEW_TIMEOUT=0, + DEVICE_DISCOVERY_TIMEOUT=0, + ): + yield + + +@pytest.fixture +async def default_mock_discovery(): + """Mock discovery with a single device.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(create_connection(1)) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def stub_mock_discovery(): + """Mock discovery with no devices.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + pass + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def empty_mock_discovery(): + """Mock discovery with an empty connection.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(None) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index a9d6f072559..1ee0bfdf9c5 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -20,12 +20,13 @@ from homeassistant.data_entry_flow import FlowResultType, InvalidData from . import ( create_config_entry_from_info, + create_connection, create_empty_config_entry, create_receiver_info, setup_integration, ) -from tests.common import Mock, MockConfigEntry +from tests.common import MockConfigEntry async def test_user_initial_menu(hass: HomeAssistant) -> None: @@ -40,9 +41,8 @@ async def test_user_initial_menu(hass: HomeAssistant) -> None: assert not set(init_result["menu_options"]) ^ {"manual", "eiscp_discovery"} -async def test_manual_valid_host(hass: HomeAssistant) -> None: +async def test_manual_valid_host(hass: HomeAssistant, default_mock_discovery) -> None: """Test valid host entered.""" - init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -53,30 +53,17 @@ async def test_manual_valid_host(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" - mock_info.host = "mock_host" - mock_info.model_name = "mock_model" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "host 1"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - assert select_result["step_id"] == "configure_receiver" - assert ( - select_result["description_placeholders"]["name"] - == "mock_model (mock_host)" - ) + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 1 (host 1)" -async def test_manual_invalid_host(hass: HomeAssistant) -> None: +async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> None: """Test invalid host entered.""" - init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -87,19 +74,18 @@ async def test_manual_invalid_host(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", return_value=None - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) assert host_result["step_id"] == "manual" assert host_result["errors"]["base"] == "cannot_connect" -async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: +async def test_manual_valid_host_unexpected_error( + hass: HomeAssistant, empty_mock_discovery +) -> None: """Test valid host entered.""" init_result = await hass.config_entries.flow.async_init( @@ -112,55 +98,49 @@ async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - side_effect=Exception(), - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) assert host_result["step_id"] == "manual" assert host_result["errors"]["base"] == "unknown" -async def test_discovery_and_no_devices_discovered(hass: HomeAssistant) -> None: +async def test_discovery_and_no_devices_discovered( + hass: HomeAssistant, stub_mock_discovery +) -> None: """Test initial menu.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", return_value=[] - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "no_devices_found" + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "no_devices_found" -async def test_discovery_with_exception(hass: HomeAssistant) -> None: +async def test_discovery_with_exception( + hass: HomeAssistant, empty_mock_discovery +) -> None: """Test discovery which throws an unexpected exception.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", - side_effect=Exception(), - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "unknown" + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "unknown" async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> None: @@ -170,13 +150,12 @@ async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> Non context={"source": SOURCE_USER}, ) - infos = [create_receiver_info(1), create_receiver_info(2)] + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(1)) + await discovery_callback(create_connection(2)) with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), + patch("pyeiscp.Connection.discover", new=mock_discover), # Fake it like the first entry was already added patch.object(OnkyoConfigFlow, "_async_current_ids", return_value=["id1"]), ): @@ -185,12 +164,12 @@ async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> Non {"next_step_id": "eiscp_discovery"}, ) - assert form_result["type"] is FlowResultType.FORM + assert form_result["type"] is FlowResultType.FORM - assert form_result["data_schema"] is not None - schema = form_result["data_schema"].schema - container = schema["device"].container - assert container == {"id2": "type 2 (host 2)"} + assert form_result["data_schema"] is not None + schema = form_result["data_schema"].schema + container = schema["device"].container + assert container == {"id2": "type 2 (host 2)"} async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: @@ -200,14 +179,11 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: context={"source": SOURCE_USER}, ) - infos = [create_receiver_info(42), create_receiver_info(0)] + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(42)) + await discovery_callback(create_connection(0)) - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), - ): + with patch("pyeiscp.Connection.discover", new=mock_discover): form_result = await hass.config_entries.flow.async_configure( init_result["flow_id"], {"next_step_id": "eiscp_discovery"}, @@ -218,11 +194,13 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: user_input={"device": "id42"}, ) - assert select_result["step_id"] == "configure_receiver" - assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" -async def test_configure_empty_source_list(hass: HomeAssistant) -> None: +async def test_configure_empty_source_list( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configuration with no sources set.""" init_result = await hass.config_entries.flow.async_init( @@ -235,29 +213,22 @@ async def test_configure_empty_source_list(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": []}, + ) - configure_result = await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 200, "input_sources": []}, - ) - - assert configure_result["errors"] == { - "input_sources": "empty_input_source_list" - } + assert configure_result["errors"] == {"input_sources": "empty_input_source_list"} -async def test_configure_no_resolution(hass: HomeAssistant) -> None: +async def test_configure_no_resolution( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with no resolution set.""" init_result = await hass.config_entries.flow.async_init( @@ -270,26 +241,21 @@ async def test_configure_no_resolution(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"input_sources": ["TV"]}, ) - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"input_sources": ["TV"]}, - ) - -async def test_configure_resolution_set(hass: HomeAssistant) -> None: +async def test_configure_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with specified resolution.""" init_result = await hass.config_entries.flow.async_init( @@ -302,16 +268,10 @@ async def test_configure_resolution_set(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - receiver_info = create_receiver_info(1) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) configure_result = await hass.config_entries.flow.async_configure( select_result["flow_id"], @@ -322,7 +282,9 @@ async def test_configure_resolution_set(hass: HomeAssistant) -> None: assert configure_result["options"]["volume_resolution"] == 200 -async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: +async def test_configure_invalid_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with invalid resolution.""" init_result = await hass.config_entries.flow.async_init( @@ -335,26 +297,19 @@ async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 42, "input_sources": ["TV"]}, ) - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 42, "input_sources": ["TV"]}, - ) - -async def test_reconfigure(hass: HomeAssistant) -> None: +async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: """Test the reconfigure config flow.""" receiver_info = create_receiver_info(1) config_entry = create_config_entry_from_info(receiver_info) @@ -368,14 +323,10 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "manual" - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info.host} - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": receiver_info.host} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "configure_receiver" @@ -403,14 +354,18 @@ async def test_reconfigure_new_device(hass: HomeAssistant) -> None: result = await config_entry.start_reconfigure_flow(hass) - receiver_info_2 = create_receiver_info(2) + mock_connection = create_connection(2) + + # Create mock discover that calls callback immediately + async def mock_discover(host, discovery_callback, timeout): + await discovery_callback(mock_connection) with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info_2, + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, ): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info_2.host} + result["flow_id"], user_input={"host": mock_connection.host} ) await hass.async_block_till_done() @@ -455,12 +410,10 @@ async def test_import_fail( error: str, ) -> None: """Test import flow failed.""" - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=None, - side_effect=exception, - ), + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + side_effect=exception, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input From e9515111323194e9c83f21d856fa8a3d647c0450 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:26:46 +0100 Subject: [PATCH 0693/1198] Allow load_verify_locations with only cadata passed (#133299) --- homeassistant/block_async_io.py | 8 +++++++- tests/test_block_async_io.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 7a68b2515e9..767716dbe27 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool: return False +def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool: + # If only cadata is passed, we can ignore it + kwargs = mapped_args.get("kwargs") + return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs) + + @dataclass(slots=True, frozen=True) class BlockingCall: """Class to hold information about a blocking call.""" @@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( original_func=SSLContext.load_verify_locations, object=SSLContext, function="load_verify_locations", - check_allowed=None, + check_allowed=_check_load_verify_locations_call_allowed, strict=False, strict_core=False, skip_for_tests=True, diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index dc2b096f595..dd23d4e9709 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -429,6 +429,12 @@ async def test_protect_loop_load_verify_locations( context.load_verify_locations("/dev/null") assert "Detected blocking call to load_verify_locations" in caplog.text + # ignore with only cadata + caplog.clear() + with pytest.raises(ssl.SSLError): + context.load_verify_locations(cadata="xxx") + assert "Detected blocking call to load_verify_locations" not in caplog.text + async def test_protect_loop_load_cert_chain( hass: HomeAssistant, caplog: pytest.LogCaptureFixture From 6d6445bfcffa2ca474c379d2e9a66564a99cff1e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 15 Dec 2024 19:28:10 +0100 Subject: [PATCH 0694/1198] Update quality scale for Nord Pool (#133282) --- homeassistant/components/nordpool/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index 79d5ac0ecea..dada1115715 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -86,7 +86,7 @@ rules: docs-supported-functions: done docs-data-update: done docs-known-limitations: done - docs-troubleshooting: todo + docs-troubleshooting: done docs-examples: done # Platinum From e81add5a065741bc9c61a7bc0fefbf1acdc1c9fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 15 Dec 2024 12:28:29 -0600 Subject: [PATCH 0695/1198] Set code_arm_required to False for homekit_controller (#133284) --- .../components/homekit_controller/alarm_control_panel.py | 1 + tests/components/homekit_controller/snapshots/test_init.ambr | 4 ++-- .../components/homekit_controller/test_alarm_control_panel.py | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homekit_controller/alarm_control_panel.py b/homeassistant/components/homekit_controller/alarm_control_panel.py index 3cb80f2c817..b17f122dfa5 100644 --- a/homeassistant/components/homekit_controller/alarm_control_panel.py +++ b/homeassistant/components/homekit_controller/alarm_control_panel.py @@ -69,6 +69,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.ARM_NIGHT ) + _attr_code_arm_required = False def get_characteristic_types(self) -> list[str]: """Define the homekit characteristics the entity cares about.""" diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index b96da507adf..2bd5e7faf75 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -1474,7 +1474,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara-Hub-E1-00A0 Security System', 'supported_features': , @@ -1848,7 +1848,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara Hub-1563 Security System', 'supported_features': , diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index 1e9f023fc46..3ab9dc82e41 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -6,6 +6,7 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from homeassistant.components.alarm_control_panel import ATTR_CODE_ARM_REQUIRED from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -106,6 +107,7 @@ async def test_switch_read_alarm_state( state = await helper.poll_and_get_state() assert state.state == "armed_home" assert state.attributes["battery_level"] == 50 + assert state.attributes[ATTR_CODE_ARM_REQUIRED] is False await helper.async_update( ServicesTypes.SECURITY_SYSTEM, From 9e8a158c891b424c7df0c70a3c4a737c90e2fb26 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:35:36 +0100 Subject: [PATCH 0696/1198] Bump plugwise to v1.6.4 and adapt (#133293) --- homeassistant/components/plugwise/climate.py | 10 ---------- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../fixtures/anna_heatpump_heating/all_data.json | 1 + .../plugwise/fixtures/legacy_anna/all_data.json | 1 + .../plugwise/fixtures/m_adam_cooling/all_data.json | 4 ++-- .../plugwise/fixtures/m_adam_jip/all_data.json | 1 - .../fixtures/m_anna_heatpump_cooling/all_data.json | 1 + .../fixtures/m_anna_heatpump_idle/all_data.json | 1 + 10 files changed, 9 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 3cf536eb445..3caed1e7bc2 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -188,19 +188,9 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """Return the current running hvac operation if supported.""" # Keep track of the previous action-mode self._previous_action_mode(self.coordinator) - - # Adam provides the hvac_action for each thermostat if (action := self.device.get("control_state")) is not None: return HVACAction(action) - # Anna - heater: str = self._gateway["heater_id"] - heater_data = self._devices[heater] - if heater_data["binary_sensors"]["heating_state"]: - return HVACAction.HEATING - if heater_data["binary_sensors"].get("cooling_state", False): - return HVACAction.COOLING - return HVACAction.IDLE @property diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 60de4496779..80f5be974e1 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.3"], + "requirements": ["plugwise==1.6.4"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index cd2b0c04544..9ffc6a8f16e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1632,7 +1632,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6101fe6e41e..25c4167a0bf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1345,7 +1345,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index 5fc2a114b2f..3a54c3fb9a2 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "heating", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", diff --git a/tests/components/plugwise/fixtures/legacy_anna/all_data.json b/tests/components/plugwise/fixtures/legacy_anna/all_data.json index 2cb439950af..9275b82cde9 100644 --- a/tests/components/plugwise/fixtures/legacy_anna/all_data.json +++ b/tests/components/plugwise/fixtures/legacy_anna/all_data.json @@ -37,6 +37,7 @@ "0d266432d64443e283b5d708ae98b455": { "active_preset": "home", "climate_mode": "heat", + "control_state": "heating", "dev_class": "thermostat", "firmware": "2017-03-13T11:54:58+01:00", "hardware": "6539-1301-500", diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index c5afd68bed5..af6d4b83380 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -176,8 +176,8 @@ "Weekschema", "off" ], - "climate_mode": "cool", - "control_state": "idle", + "climate_mode": "auto", + "control_state": "cooling", "dev_class": "climate", "model": "ThermoZone", "name": "Bathroom", diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 1ca9e77010f..1a3ef66c147 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,7 +3,6 @@ "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", "climate_mode": "off", - "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Slaapkamer", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 74f20379d68..eaa42facf10 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "cooling", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 3b1e9bf8cac..52645b0f317 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "idle", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", From 544ebcf310a0663c62373faca0bfabcc2a50b83a Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 19:35:50 +0100 Subject: [PATCH 0697/1198] Fix typo "configurered" in MQTT (#133295) --- homeassistant/components/mqtt/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index c062c111487..3b337c05d2a 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -115,7 +115,7 @@ "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "invalid_inclusion": "The client certificate and private key must be configurered together" + "invalid_inclusion": "The client certificate and private key must be configured together" } }, "device_automation": { From be6ed05aa220c47d37bd54f1af21759cff8b49e2 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 19:40:51 +0100 Subject: [PATCH 0698/1198] Improve Fronius tests (#132872) --- tests/components/fronius/__init__.py | 27 +- .../fronius/snapshots/test_sensor.ambr | 9024 +++++++++++++++++ tests/components/fronius/test_config_flow.py | 184 +- tests/components/fronius/test_coordinator.py | 12 +- tests/components/fronius/test_init.py | 24 +- tests/components/fronius/test_sensor.py | 260 +- 6 files changed, 9132 insertions(+), 399 deletions(-) create mode 100644 tests/components/fronius/snapshots/test_sensor.ambr diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 57b22490ed0..8445e6b6a79 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,20 +3,16 @@ from __future__ import annotations from collections.abc import Callable -from datetime import timedelta import json from typing import Any -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker MOCK_HOST = "http://fronius" @@ -115,24 +111,3 @@ def mock_responses( f"{host}/solar_api/v1/GetOhmPilotRealtimeData.cgi?Scope=System", text=_load(f"{fixture_set}/GetOhmPilotRealtimeData.json", "fronius"), ) - - -async def enable_all_entities( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry_id: str, - time_till_next_update: timedelta, -) -> None: - """Enable all entities for a config entry and fast forward time to receive data.""" - registry = er.async_get(hass) - entities = er.async_entries_for_config_entry(registry, config_entry_id) - for entry in [ - entry - for entry in entities - if entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - ]: - registry.async_update_entity(entry.entity_id, disabled_by=None) - await hass.async_block_till_done() - freezer.tick(time_till_next_update) - async_fire_time_changed(hass) - await hass.async_block_till_done() diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..700c09da2f6 --- /dev/null +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -0,0 +1,9024 @@ +# serializer version: 1 +# name: test_gen24[sensor.inverter_name_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1589', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter name AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.3204', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '234.9168', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0783', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0754', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '411.3811', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '403.4312', + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Error code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Inverter name Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9917', + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Inverter state', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Status code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Inverter name Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter name Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '868.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '243.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '323.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '301.2', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.145', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.33', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.825', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.828', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.441', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.934', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.832', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '88221.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1989125.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-517.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-218.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-132.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-166.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '653.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '106.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '294.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '251.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '235.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '408.7', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.4', + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'meter', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.9481', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.3592', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': 'P030T020Z2001234567 -current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'BYD Battery-Box Premium HV DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': 'P030T020Z2001234567 -voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'BYD Battery-Box Premium HV DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Designed capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_designed', + 'unique_id': 'P030T020Z2001234567 -capacity_designed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Designed capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Maximum capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_maximum', + 'unique_id': 'P030T020Z2001234567 -capacity_maximum', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Maximum capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_of_charge', + 'unique_id': 'P030T020Z2001234567 -state_of_charge', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'BYD Battery-Box Premium HV State of charge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_cell', + 'unique_id': 'P030T020Z2001234567 -temperature_cell', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'BYD Battery-Box Premium HV Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.1087', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Gen24 Storage AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '250.9093', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '227.354', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3952', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3564', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '419.1009', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '318.8103', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Error code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gen24 Storage Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9816', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Inverter state', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Status code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gen24 Storage Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Gen24 Storage Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512794.0117', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_consumed', + 'unique_id': '23456789-energy_real_ac_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohmpilot Energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1233295.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_ac', + 'unique_id': '23456789-power_real_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohmpilot Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_code', + 'unique_id': '23456789-state_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohmpilot State code', + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_message', + 'unique_id': '23456789-state_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohmpilot State message', + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'up_and_running', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_channel_1', + 'unique_id': '23456789-temperature_channel_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Ohmpilot Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '319.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '383.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '118.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.701', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.832', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.645', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.698', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.995', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.389', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.163', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5482.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3266105.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-501.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-31.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-353.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-116.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '487.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '317.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '229.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '396.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '225.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '393.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '228.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '394.3', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_12345678-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bidirectional', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery', + 'unique_id': 'solar_net_12345678-power_flow-power_battery', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_charge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_charge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery charge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery discharge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_discharge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery discharge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_12345678-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_12345678-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_12345678-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_12345678-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_12345678-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '216.4328', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_12345678-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.4984', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_12345678-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_12345678-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512664.4042', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '234567-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.32', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '234567-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 3.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '296', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '234567-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.6', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '234567-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.97', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '234567-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '329.5', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '234567-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14237', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '234567-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3596193.25', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '234567-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '234567-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 3.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.01', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '234567-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '234567-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '234567-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '234567-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 3.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '234567-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5796010', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '123456-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.85', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '123456-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 5.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '862', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '123456-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.9', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '123456-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.23', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '123456-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '452.3', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '123456-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22504', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '123456-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7532755.5', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '123456-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '123456-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 5.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '123456-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '123456-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '123456-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '123456-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 5.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '123456-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17114940', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'S0 Meter at inverter 1 Meter location', + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'S0 Meter at inverter 1 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'consumption_path', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'S0 Meter at inverter 1 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2216.7487', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_co2_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CO₂ factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_factor', + 'unique_id': '123.4567890-co2_factor', + 'unit_of_measurement': 'kg/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet CO₂ factor', + 'state_class': , + 'unit_of_measurement': 'kg/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_co2_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.53', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36724', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11128933.25', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid export tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cash_factor', + 'unique_id': '123.4567890-cash_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid export tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid import tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'delivery_factor', + 'unique_id': '123.4567890-delivery_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid import tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'vague-meter', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1834', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '82.6523', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22910919.5', + }) +# --- diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index ed90e266b81..933b8fad8ef 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -44,43 +44,62 @@ MOCK_DHCP_DATA = DhcpServiceInfo( ) -async def test_form_with_logger(hass: HomeAssistant) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, +async def assert_finish_flow_with_logger(hass: HomeAssistant, flow_id: str) -> None: + """Assert finishing the flow with a logger device.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result = await hass.config_entries.flow.async_configure( + flow_id, { "host": "10.9.8.1", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "SolarNet Datalogger at 10.9.8.1" - assert result2["data"] == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SolarNet Datalogger at 10.9.8.1" + assert result["data"] == { "host": "10.9.8.1", "is_logger": True, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "123.4567" + + +async def assert_abort_flow_with_logger( + hass: HomeAssistant, flow_id: str, reason: str +) -> config_entries.ConfigFlowResult: + """Assert the flow was aborted when a logger device responded.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + "host": "10.9.8.1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + return result + + +async def test_form_with_logger(hass: HomeAssistant) -> None: + """Test the basic flow with a logger device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + await assert_finish_flow_with_logger(hass, result["flow_id"]) async def test_form_with_inverter(hass: HomeAssistant) -> None: - """Test we get the form.""" + """Test the basic flow with a Gen24 device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -96,10 +115,6 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -115,7 +130,7 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "host": "10.9.1.1", "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result2["result"].unique_id == "1234567" @pytest.mark.parametrize( @@ -154,6 +169,7 @@ async def test_form_cannot_connect( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_unexpected(hass: HomeAssistant) -> None: @@ -175,13 +191,14 @@ async def test_form_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_already_existing(hass: HomeAssistant) -> None: """Test existing entry.""" MockConfigEntry( domain=DOMAIN, - unique_id="123.4567", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={CONF_HOST: "10.9.8.1", "is_logger": True}, ).add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -189,20 +206,9 @@ async def test_form_already_existing(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.9.8.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="already_configured" + ) async def test_config_flow_already_configured( @@ -273,6 +279,7 @@ async def test_dhcp(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> "host": MOCK_DHCP_DATA.ip, "is_logger": True, } + assert result["result"].unique_id == "123.4567" async def test_dhcp_already_configured( @@ -345,10 +352,6 @@ async def test_reconfigure(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -364,14 +367,13 @@ async def test_reconfigure(hass: HomeAssistant) -> None: "host": new_host, "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -401,12 +403,16 @@ async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) + async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: """Test we handle unexpected error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -430,12 +436,16 @@ async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) -async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: - """Test reconfiguring an entry.""" + +async def test_reconfigure_to_different_device(hass: HomeAssistant) -> None: + """Test reconfiguring an entry to a different device.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id="999.9999999", data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -447,68 +457,6 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value=INVERTER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - "host": "10.1.2.3", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" - assert len(mock_setup_entry.mock_calls) == 0 - - -async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: - """Test reconfiguring entry to already existing device.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="123.4567890", - data={ - CONF_HOST: "10.1.2.3", - "is_logger": True, - }, + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="unique_id_mismatch" ) - entry.add_to_hass(hass) - - entry_2_uid = "222.2222222" - entry_2 = MockConfigEntry( - domain=DOMAIN, - unique_id=entry_2_uid, - data={ - CONF_HOST: "10.2.2.2", - "is_logger": True, - }, - ) - entry_2.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value={"unique_identifier": {"value": entry_2_uid}}, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.1.1.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unique_id_mismatch" diff --git a/tests/components/fronius/test_coordinator.py b/tests/components/fronius/test_coordinator.py index 13a08bbe70e..fab2d509767 100644 --- a/tests/components/fronius/test_coordinator.py +++ b/tests/components/fronius/test_coordinator.py @@ -29,7 +29,7 @@ async def test_adaptive_update_interval( mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -38,13 +38,13 @@ async def test_adaptive_update_interval( # first 3 bad requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 3 mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 1 mock_inverter_data.reset_mock() @@ -52,13 +52,13 @@ async def test_adaptive_update_interval( mock_inverter_data.side_effect = None # next successful request resets to default interval freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -68,7 +68,7 @@ async def test_adaptive_update_interval( # first 3 requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() # BadStatusError does 3 silent retries for inverter endpoint * 3 request intervals = 9 assert mock_inverter_data.call_count == 9 diff --git a/tests/components/fronius/test_init.py b/tests/components/fronius/test_init.py index 9d570785073..a950ed4e296 100644 --- a/tests/components/fronius/test_init.py +++ b/tests/components/fronius/test_init.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pyfronius import FroniusError from homeassistant.components.fronius.const import DOMAIN, SOLAR_NET_RESCAN_TIMER @@ -10,7 +11,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from . import mock_responses, setup_fronius_integration @@ -66,6 +66,7 @@ async def test_inverter_night_rescan( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test dynamic adding of an inverter discovered automatically after a Home Assistant reboot during the night.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -78,9 +79,8 @@ async def test_inverter_night_rescan( # Switch to daytime mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # We expect our inverter to be present now @@ -88,9 +88,8 @@ async def test_inverter_night_rescan( assert inverter_1.manufacturer == "Fronius" # After another re-scan we still only expect this inverter - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() inverter_1 = device_registry.async_get_device(identifiers={(DOMAIN, "203200")}) assert inverter_1.manufacturer == "Fronius" @@ -100,6 +99,7 @@ async def test_inverter_rescan_interruption( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test interruption of re-scan during runtime to process further.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -115,9 +115,8 @@ async def test_inverter_rescan_interruption( "pyfronius.Fronius.inverter_info", side_effect=FroniusError, ): - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # No increase of devices expected because of a FroniusError @@ -132,9 +131,8 @@ async def test_inverter_rescan_interruption( # Next re-scan will pick up the new inverter. Expect 2 devices now. mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert ( diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index 04c25ce26f2..b5d051d56ca 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -2,27 +2,29 @@ from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from homeassistant.components.fronius.const import DOMAIN from homeassistant.components.fronius.coordinator import ( FroniusInverterUpdateCoordinator, - FroniusMeterUpdateCoordinator, FroniusPowerFlowUpdateCoordinator, ) from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import enable_all_entities, mock_responses, setup_fronius_integration +from . import mock_responses, setup_fronius_integration -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_inverter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Symo inverter entities.""" @@ -32,15 +34,8 @@ async def test_symo_inverter( # Init at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) @@ -54,13 +49,6 @@ async def test_symo_inverter( freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 62 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) @@ -104,6 +92,7 @@ async def test_symo_logger( assert_state("sensor.solarnet_grid_import_tariff", 0.15) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_meter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -117,15 +106,8 @@ async def test_symo_meter( assert state.state == str(expected_state) mock_responses(aioclient_mock) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 26 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) @@ -206,6 +188,7 @@ async def test_symo_meter_forged( ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_power_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -220,15 +203,8 @@ async def test_symo_power_flow( # First test at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) @@ -277,10 +253,13 @@ async def test_symo_power_flow( assert_state("sensor.solarnet_relative_self_consumption", 0) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter entities.""" @@ -292,72 +271,10 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 24 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 - # inverter 1 - assert_state("sensor.inverter_name_ac_current", 0.1589) - assert_state("sensor.inverter_name_dc_current_2", 0.0754) - assert_state("sensor.inverter_name_status_code", 7) - assert_state("sensor.inverter_name_status_message", "running") - assert_state("sensor.inverter_name_dc_current", 0.0783) - assert_state("sensor.inverter_name_dc_voltage_2", 403.4312) - assert_state("sensor.inverter_name_ac_power", 37.3204) - assert_state("sensor.inverter_name_error_code", 0) - assert_state("sensor.inverter_name_dc_voltage", 411.3811) - assert_state("sensor.inverter_name_total_energy", 1530193.42) - assert_state("sensor.inverter_name_inverter_state", "Running") - assert_state("sensor.inverter_name_ac_voltage", 234.9168) - assert_state("sensor.inverter_name_frequency", 49.9917) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 653.1) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.828) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 88221.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 2.33) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 235.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 408.7) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 294.9) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 236.1) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 1989125.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 236.9) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.441) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 409.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 1.825) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 243.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 409.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 323.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 301.2) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 106.8) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.934) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 251.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -218.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -132.8) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -166.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 868.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -517.4) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.145) - # power_flow - assert_state("sensor.solarnet_power_grid", 658.4) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 62.9481) - assert_state("sensor.solarnet_power_load", -695.6827) - assert_state("sensor.solarnet_meter_mode", "meter") - assert_state("sensor.solarnet_relative_autonomy", 5.3592) - assert_state("sensor.solarnet_total_energy", 1530193.42) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert_state("sensor.inverter_name_total_energy", 1530193.42) # Gen24 devices may report 0 for total energy while doing firmware updates. # This should yield "unknown" state instead of 0. mock_responses( @@ -375,11 +292,14 @@ async def test_gen24( assert_state("sensor.inverter_name_total_energy", "unknown") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24_storage( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter with BYD battery and Ohmpilot entities.""" @@ -393,87 +313,8 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 37 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 72 - # inverter 1 - assert_state("sensor.gen24_storage_dc_current", 0.3952) - assert_state("sensor.gen24_storage_dc_voltage_2", 318.8103) - assert_state("sensor.gen24_storage_dc_current_2", 0.3564) - assert_state("sensor.gen24_storage_ac_current", 1.1087) - assert_state("sensor.gen24_storage_ac_power", 250.9093) - assert_state("sensor.gen24_storage_error_code", 0) - assert_state("sensor.gen24_storage_status_code", 7) - assert_state("sensor.gen24_storage_status_message", "running") - assert_state("sensor.gen24_storage_total_energy", 7512794.0117) - assert_state("sensor.gen24_storage_inverter_state", "Running") - assert_state("sensor.gen24_storage_dc_voltage", 419.1009) - assert_state("sensor.gen24_storage_ac_voltage", 227.354) - assert_state("sensor.gen24_storage_frequency", 49.9816) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 487.7) - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.698) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -501.5) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 3266105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 19.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 0.645) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 383.9) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.701) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 1.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 319.5) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 229.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 150.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 394.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 225.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 5482.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.995) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.163) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.389) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -31.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -116.7) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 396.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 393.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -353.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 317.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 228.3) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 821.9) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 118.4) - # ohmpilot - assert_state("sensor.ohmpilot_energy_consumed", 1233295.0) - assert_state("sensor.ohmpilot_power", 0.0) - assert_state("sensor.ohmpilot_temperature", 38.9) - assert_state("sensor.ohmpilot_state_code", 0.0) - assert_state("sensor.ohmpilot_state_message", "up_and_running") - # power_flow - assert_state("sensor.solarnet_power_grid", 2274.9) - assert_state("sensor.solarnet_power_battery", 0.1591) - assert_state("sensor.solarnet_power_battery_charge", 0) - assert_state("sensor.solarnet_power_battery_discharge", 0.1591) - assert_state("sensor.solarnet_power_load", -2459.3092) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 216.4328) - assert_state("sensor.solarnet_relative_autonomy", 7.4984) - assert_state("sensor.solarnet_meter_mode", "bidirectional") - assert_state("sensor.solarnet_total_energy", 7512664.4042) - # storage - assert_state("sensor.byd_battery_box_premium_hv_dc_current", 0.0) - assert_state("sensor.byd_battery_box_premium_hv_state_of_charge", 4.6) - assert_state("sensor.byd_battery_box_premium_hv_maximum_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_temperature", 21.5) - assert_state("sensor.byd_battery_box_premium_hv_designed_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_dc_voltage", 0.0) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( @@ -507,11 +348,14 @@ async def test_gen24_storage( assert storage.name == "BYD Battery-Box Premium HV" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_primo_s0( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Primo dual inverter with S0 meter entities.""" @@ -523,64 +367,8 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 31 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 47 - # logger - assert_state("sensor.solarnet_grid_export_tariff", 1) - assert_state("sensor.solarnet_co2_factor", 0.53) - assert_state("sensor.solarnet_grid_import_tariff", 1) - # inverter 1 - assert_state("sensor.primo_5_0_1_total_energy", 17114940) - assert_state("sensor.primo_5_0_1_energy_day", 22504) - assert_state("sensor.primo_5_0_1_dc_voltage", 452.3) - assert_state("sensor.primo_5_0_1_ac_power", 862) - assert_state("sensor.primo_5_0_1_error_code", 0) - assert_state("sensor.primo_5_0_1_dc_current", 4.23) - assert_state("sensor.primo_5_0_1_status_code", 7) - assert_state("sensor.primo_5_0_1_status_message", "running") - assert_state("sensor.primo_5_0_1_energy_year", 7532755.5) - assert_state("sensor.primo_5_0_1_ac_current", 3.85) - assert_state("sensor.primo_5_0_1_ac_voltage", 223.9) - assert_state("sensor.primo_5_0_1_frequency", 60) - assert_state("sensor.primo_5_0_1_led_color", 2) - assert_state("sensor.primo_5_0_1_led_state", 0) - # inverter 2 - assert_state("sensor.primo_3_0_1_total_energy", 5796010) - assert_state("sensor.primo_3_0_1_energy_day", 14237) - assert_state("sensor.primo_3_0_1_dc_voltage", 329.5) - assert_state("sensor.primo_3_0_1_ac_power", 296) - assert_state("sensor.primo_3_0_1_error_code", 0) - assert_state("sensor.primo_3_0_1_dc_current", 0.97) - assert_state("sensor.primo_3_0_1_status_code", 7) - assert_state("sensor.primo_3_0_1_status_message", "running") - assert_state("sensor.primo_3_0_1_energy_year", 3596193.25) - assert_state("sensor.primo_3_0_1_ac_current", 1.32) - assert_state("sensor.primo_3_0_1_ac_voltage", 223.6) - assert_state("sensor.primo_3_0_1_frequency", 60.01) - assert_state("sensor.primo_3_0_1_led_color", 2) - assert_state("sensor.primo_3_0_1_led_state", 0) - # meter - assert_state("sensor.s0_meter_at_inverter_1_meter_location", 1) - assert_state( - "sensor.s0_meter_at_inverter_1_meter_location_description", "consumption_path" - ) - assert_state("sensor.s0_meter_at_inverter_1_real_power", -2216.7487) - # power_flow - assert_state("sensor.solarnet_power_load", -2218.9349) - assert_state("sensor.solarnet_meter_mode", "vague-meter") - assert_state("sensor.solarnet_power_photovoltaics", 1834) - assert_state("sensor.solarnet_power_grid", 384.9349) - assert_state("sensor.solarnet_relative_self_consumption", 100) - assert_state("sensor.solarnet_relative_autonomy", 82.6523) - assert_state("sensor.solarnet_total_energy", 22910919.5) - assert_state("sensor.solarnet_energy_day", 36724) - assert_state("sensor.solarnet_energy_year", 11128933.25) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( From 6ca5f3e82874d155c2a0cb4c34459d109bd9fa9c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 10:42:22 -0800 Subject: [PATCH 0699/1198] Mark Google Tasks `test-before-setup` quality scale rule as `done` (#133298) --- homeassistant/components/google_tasks/quality_scale.yaml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 0cecb88484f..671b744d080 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -20,12 +20,7 @@ rules: entity-unique-id: done docs-installation-instructions: done docs-removal-instructions: todo - test-before-setup: - status: todo - comment: | - The integration refreshes the access token, but does not poll the API. The - setup can be changed to request the list of todo lists in setup instead - of during platform setup. + test-before-setup: done docs-high-level-description: done config-flow-test-coverage: done docs-actions: From 2003fc7ae0ffc336e94933a65915ca026b5d8145 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 19:42:54 +0100 Subject: [PATCH 0700/1198] Adjust MQTT tests not to assert on deprecated color_temp attribute (#133198) --- tests/components/mqtt/test_light.py | 28 +++++++-------- tests/components/mqtt/test_light_json.py | 38 ++++++++++---------- tests/components/mqtt/test_light_template.py | 20 +++++------ 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index ed4b16e3d0c..dbca09e803c 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -270,7 +270,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -285,7 +285,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -350,7 +350,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -366,7 +366,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -649,7 +649,7 @@ async def test_invalid_state_via_topic( assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("xy_color") is None @@ -665,7 +665,7 @@ async def test_invalid_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (0, 0) assert state.attributes.get("xy_color") == (0.323, 0.329) @@ -723,14 +723,14 @@ async def test_invalid_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 251) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 153 + assert state.attributes.get("color_temp_kelvin") == 6535 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (54.768, 1.6) assert state.attributes.get("xy_color") == (0.325, 0.333) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") light_state = hass.states.get("light.test") - assert light_state.attributes["color_temp"] == 153 + assert light_state.attributes["color_temp_kelvin"] == 6535 @pytest.mark.parametrize( @@ -939,7 +939,7 @@ async def test_controlling_state_via_topic_with_templates( hass, "test_light_rgb/color_temp/status", '{"hello": "300"}' ) state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 300 + assert state.attributes.get("color_temp_kelvin") == 3333 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -1160,7 +1160,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 60 - assert state.attributes.get("color_temp") == 125 + assert state.attributes.get("color_temp_kelvin") == 8000 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -2103,7 +2103,7 @@ async def test_explicit_color_mode( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2119,7 +2119,7 @@ async def test_explicit_color_mode( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2248,7 +2248,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -2258,7 +2258,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index c6032678a47..988cce85653 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -456,7 +456,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.state == STATE_ON # Turn on the light with brightness or color_temp attributes @@ -466,7 +466,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP assert state.attributes.get("brightness") == 50 - assert state.attributes.get("color_temp") == 192 + assert state.attributes.get("color_temp_kelvin") == 5208 assert state.state == STATE_ON @@ -571,7 +571,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -582,7 +582,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -636,7 +636,7 @@ async def test_controlling_state_via_topic( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -657,7 +657,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("xy_color") == (0.323, 0.329) assert state.attributes.get("hs_color") == (0.0, 0.0) @@ -681,7 +681,7 @@ async def test_controlling_state_via_topic( 249, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("xy_color") == (0.328, 0.333) # temp converted to color assert state.attributes.get("hs_color") == (44.098, 2.43) # temp converted to color @@ -798,7 +798,7 @@ async def test_controlling_state_via_topic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -824,7 +824,7 @@ async def test_controlling_state_via_topic2( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "rgbww" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("hs_color") == (20.552, 70.98) assert state.attributes.get("rgb_color") == (255, 136, 74) @@ -890,7 +890,7 @@ async def test_controlling_state_via_topic2( ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 # White async_fire_mqtt_message( @@ -969,7 +969,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -994,7 +994,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "hs" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (15.765, 100.0) assert state.attributes.get("rgb_color") == (255, 67, 0) @@ -1016,7 +1016,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 353 + assert state.attributes.get("color_temp_kelvin") == 2832 assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (28.125, 61.661) assert state.attributes.get("rgb_color") == (255, 171, 98) @@ -1099,7 +1099,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP - assert state.attributes.get("color_temp") == 90 + assert state.attributes.get("color_temp_kelvin") == 11111 await common.async_turn_off(hass, "light.test") @@ -1227,7 +1227,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") == 95 assert state.attributes.get("color_mode") == "rgb" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "random" assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2200,7 +2200,7 @@ async def test_invalid_values( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # Turn on the light @@ -2218,7 +2218,7 @@ async def test_invalid_values( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None # Empty color value async_fire_mqtt_message( hass, @@ -2283,7 +2283,7 @@ async def test_invalid_values( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 # Bad color temperature async_fire_mqtt_message( @@ -2297,7 +2297,7 @@ async def test_invalid_values( # Color temperature should not have changed state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 5ffff578b5b..4d2b93ff159 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -252,7 +252,7 @@ async def test_state_change_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb", "on") @@ -261,7 +261,7 @@ async def test_state_change_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None async_fire_mqtt_message(hass, "test_light_rgb", "off") @@ -316,7 +316,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("effect") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # turn on the light @@ -326,7 +326,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 128, 64) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") is None # turn on the light @@ -340,7 +340,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( 255, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 145 + assert state.attributes.get("color_temp_kelvin") == 6896 assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") == (0.317, 0.317) # temp converted to color assert state.attributes.get("hs_color") == ( @@ -472,7 +472,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 70 + assert state.attributes.get("color_temp_kelvin") == 14285 # Set full brightness await common.async_turn_on(hass, "light.test", brightness=255) @@ -848,7 +848,7 @@ async def test_invalid_values( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) @@ -858,7 +858,7 @@ async def test_invalid_values( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("effect") == "rainbow" @@ -887,14 +887,14 @@ async def test_invalid_values( async_fire_mqtt_message(hass, "test_light_rgb", "on,,215,None-None-None") state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad color temp values async_fire_mqtt_message(hass, "test_light_rgb", "on,,off,") # color temp should not have changed state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad effect value async_fire_mqtt_message(hass, "test_light_rgb", "on,255,a-b-c,white") From 81c12db6cd5cb772ea2579e56d5c319fdab8eb15 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 20:19:56 +0100 Subject: [PATCH 0701/1198] Fix missing Fronius data_description translation for reconfigure flow (#133304) --- homeassistant/components/fronius/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 9a2b498f28c..51cb087efc2 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -18,6 +18,9 @@ "description": "Update your configuration information for {device}.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::fronius::config::step::user::data_description::host%]" } } }, From b77e42e8f3482a772fe84833d23dc9c985fbf6c3 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 11:23:56 -0800 Subject: [PATCH 0702/1198] Increase test coverage for google tasks init (#133252) --- .../components/google_tasks/quality_scale.yaml | 8 ++------ tests/components/google_tasks/test_init.py | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 671b744d080..79d216709e5 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -31,16 +31,12 @@ rules: # Silver log-when-unavailable: done config-entry-unloading: done - reauthentication-flow: - status: todo - comment: Missing a test that reauthenticates with the wrong account + reauthentication-flow: done action-exceptions: done docs-installation-parameters: todo integration-owner: done parallel-updates: done - test-coverage: - status: todo - comment: Test coverage for __init__.py is not above 95% yet + test-coverage: done docs-configuration-parameters: todo entity-unavailable: done diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 4bb2bd1eed7..9ad8c887a66 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -6,6 +6,7 @@ from http import HTTPStatus import time from unittest.mock import Mock +from aiohttp import ClientError from httplib2 import Response import pytest @@ -72,20 +73,28 @@ async def test_expired_token_refresh_success( @pytest.mark.parametrize( - ("expires_at", "status", "expected_state"), + ("expires_at", "status", "exc", "expected_state"), [ ( time.time() - 3600, http.HTTPStatus.UNAUTHORIZED, + None, ConfigEntryState.SETUP_ERROR, ), ( time.time() - 3600, http.HTTPStatus.INTERNAL_SERVER_ERROR, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + None, + ClientError("error"), ConfigEntryState.SETUP_RETRY, ), ], - ids=["unauthorized", "internal_server_error"], + ids=["unauthorized", "internal_server_error", "client_error"], ) async def test_expired_token_refresh_failure( hass: HomeAssistant, @@ -93,7 +102,8 @@ async def test_expired_token_refresh_failure( aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, - status: http.HTTPStatus, + status: http.HTTPStatus | None, + exc: Exception | None, expected_state: ConfigEntryState, ) -> None: """Test failure while refreshing token with a transient error.""" @@ -102,6 +112,7 @@ async def test_expired_token_refresh_failure( aioclient_mock.post( OAUTH2_TOKEN, status=status, + exc=exc, ) await integration_setup() From 5cc8d9e10509a699c00922fd05aad47739ca3492 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Sun, 15 Dec 2024 14:27:19 -0500 Subject: [PATCH 0703/1198] Full test coverage for Vodafone Station button platform (#133281) --- tests/components/vodafone_station/const.py | 6 +- .../vodafone_station/test_button.py | 56 +++++++++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 tests/components/vodafone_station/test_button.py diff --git a/tests/components/vodafone_station/const.py b/tests/components/vodafone_station/const.py index 9adf32b339d..fc6bbd01398 100644 --- a/tests/components/vodafone_station/const.py +++ b/tests/components/vodafone_station/const.py @@ -29,11 +29,13 @@ DEVICE_DATA_QUERY = { mac="xx:xx:xx:xx:xx:xx", type="laptop", wifi="2.4G", - ) + ), } +SERIAL = "m123456789" + SENSOR_DATA_QUERY = { - "sys_serial_number": "M123456789", + "sys_serial_number": SERIAL, "sys_firmware_version": "XF6_4.0.05.04", "sys_bootloader_version": "0220", "sys_hardware_version": "RHG3006 v1", diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py new file mode 100644 index 00000000000..8b9b0753caa --- /dev/null +++ b/tests/components/vodafone_station/test_button.py @@ -0,0 +1,56 @@ +"""Tests for Vodafone Station button platform.""" + +from unittest.mock import patch + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_registry import EntityRegistry + +from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY, SERIAL + +from tests.common import MockConfigEntry + + +async def test_button(hass: HomeAssistant, entity_registry: EntityRegistry) -> None: + """Test device restart button.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiovodafone.api.VodafoneStationSercommApi.login"), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", + return_value=DEVICE_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", + return_value=SENSOR_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.restart_router", + ) as mock_router_restart, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id = f"button.vodafone_station_{SERIAL}_restart" + + # restart button + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{SERIAL}_reboot" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert mock_router_restart.call_count == 1 From 89387760d3b6eb46e0c8001b87ff0eb1564758b0 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 15 Dec 2024 20:44:28 +0100 Subject: [PATCH 0704/1198] Cleanup tests for tedee (#133306) --- tests/components/tedee/__init__.py | 13 + tests/components/tedee/conftest.py | 6 +- .../tedee/snapshots/test_binary_sensor.ambr | 278 +++++++++++++++--- .../components/tedee/snapshots/test_init.ambr | 32 ++ .../components/tedee/snapshots/test_lock.ambr | 173 ++++++----- .../tedee/snapshots/test_sensor.ambr | 140 +++++++-- tests/components/tedee/test_binary_sensor.py | 19 +- tests/components/tedee/test_init.py | 52 ++-- tests/components/tedee/test_lock.py | 54 ++-- tests/components/tedee/test_sensor.py | 21 +- 10 files changed, 567 insertions(+), 221 deletions(-) diff --git a/tests/components/tedee/__init__.py b/tests/components/tedee/__init__.py index a72b1fbdd6a..0bff030d2df 100644 --- a/tests/components/tedee/__init__.py +++ b/tests/components/tedee/__init__.py @@ -1 +1,14 @@ """Add tests for Tedee components.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the acaia integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 8e028cb5300..d659560ee61 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -14,6 +14,8 @@ from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from . import setup_integration + from tests.common import MockConfigEntry, load_fixture WEBHOOK_ID = "bq33efxmdi3vxy55q2wbnudbra7iv8mjrq9x0gea33g4zqtd87093pwveg8xcb33" @@ -84,8 +86,6 @@ async def init_integration( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock ) -> MockConfigEntry: """Set up the Tedee integration for testing.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) return mock_config_entry diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 385e4ac9bc1..e3238dacda1 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_binary_sensors[entry-charging] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -32,7 +32,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-lock_uncalibrated] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-1A2B Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -65,7 +79,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-pullspring_enabled] +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-1A2B Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -98,7 +126,20 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -131,48 +172,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[state-charging] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Lock-1A2B Charging', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-lock_uncalibrated] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Lock-1A2B Lock uncalibrated', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-pullspring_enabled] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B Pullspring enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[state-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Lock-1A2B Semi locked', @@ -185,3 +185,189 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-2C3D Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock uncalibrated', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uncalibrated', + 'unique_id': '98765-uncalibrated', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-2C3D Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pullspring enabled', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_enabled', + 'unique_id': '98765-pullspring_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Semi locked', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'semi_locked', + 'unique_id': '98765-semi_locked', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Semi locked', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index 20d6bfcdc2a..af559f561b2 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -31,3 +31,35 @@ 'via_device_id': None, }) # --- +# name: test_lock_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tedee', + '12345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tedee', + 'model': 'Tedee PRO', + 'model_id': 'Tedee PRO', + 'name': 'Lock-1A2B', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 3eba6f3f0af..cca988663d2 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -1,83 +1,4 @@ # serializer version: 1 -# name: test_lock - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.lock_1a2b', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- -# name: test_lock.1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.lock_1a2b', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'tedee', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '12345-lock', - 'unit_of_measurement': None, - }) -# --- -# name: test_lock.2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tedee', - '12345', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tedee', - 'model': 'Tedee PRO', - 'model_id': 'Tedee PRO', - 'name': 'Lock-1A2B', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- # name: test_lock_without_pullspring StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -157,3 +78,97 @@ 'via_device_id': , }) # --- +# name: test_locks[lock.lock_1a2b-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_1a2b', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '12345-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_1a2b-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_1a2b', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- +# name: test_locks[lock.lock_2c3d-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_2c3d', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_2c3d-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_2c3d', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_sensor.ambr b/tests/components/tedee/snapshots/test_sensor.ambr index d5f4c8361c3..297fe9b0d37 100644 --- a/tests/components/tedee/snapshots/test_sensor.ambr +++ b/tests/components/tedee/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[entry-battery] +# name: test_sensors[sensor.lock_1a2b_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -34,7 +34,23 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[entry-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-1A2B Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_1a2b_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -69,23 +85,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[state-battery] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Lock-1A2B Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.lock_1a2b_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70', - }) -# --- -# name: test_sensors[state-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -101,3 +101,105 @@ 'state': '2', }) # --- +# name: test_sensors[sensor.lock_2c3d_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-battery_sensor', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-2C3D Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pullspring duration', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_duration', + 'unique_id': '98765-pullspring_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Lock-2C3D Pullspring duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index dfe70e7a2ea..ccfd12440ea 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -1,19 +1,20 @@ """Tests for the Tedee Binary Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed +from . import setup_integration -pytestmark = pytest.mark.usefixtures("init_integration") +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") @@ -22,21 +23,19 @@ BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalib async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee binary sensor.""" - for key in BINARY_SENSORS: - state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("init_integration") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index 63701bb1788..71bf5262f00 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -20,6 +20,7 @@ from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from . import setup_integration from .conftest import WEBHOOK_ID from tests.common import MockConfigEntry @@ -32,9 +33,7 @@ async def test_load_unload_config_entry( mock_tedee: MagicMock, ) -> None: """Test loading and unloading the integration.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -56,9 +55,7 @@ async def test_config_entry_not_ready( """Test the Tedee configuration entry not ready.""" mock_tedee.get_locks.side_effect = side_effect - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert len(mock_tedee.get_locks.mock_calls) == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY @@ -70,9 +67,7 @@ async def test_cleanup_on_shutdown( mock_tedee: MagicMock, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -88,9 +83,7 @@ async def test_webhook_cleanup_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -110,9 +103,7 @@ async def test_webhook_registration_errors( ) -> None: """Test the webhook is cleaned up on shutdown.""" mock_tedee.register_webhook.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -128,9 +119,7 @@ async def test_webhook_registration_cleanup_errors( ) -> None: """Test the errors during webhook cleanup during registration.""" mock_tedee.cleanup_webhooks_by_host.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -138,6 +127,21 @@ async def test_webhook_registration_cleanup_errors( assert "Failed to cleanup Tedee webhooks by host:" in caplog.text +async def test_lock_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_tedee: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the lock device is registered.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(mock_config_entry.domain, "12345")}) + assert device + assert device == snapshot + + async def test_bridge_device( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -146,9 +150,7 @@ async def test_bridge_device( snapshot: SnapshotAssertion, ) -> None: """Ensure the bridge device is registered.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) device = device_registry.async_get_device( {(mock_config_entry.domain, mock_tedee.get_local_bridge.return_value.serial)} @@ -192,9 +194,7 @@ async def test_webhook_post( ) -> None: """Test webhook callback.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -241,9 +241,7 @@ async def test_migration( "homeassistant.components.tedee.webhook_generate_id", return_value=WEBHOOK_ID, ): - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.version == 1 assert mock_config_entry.minor_version == 2 diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index d84acb212ea..e0fe9673a46 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -1,7 +1,7 @@ """Tests for tedee lock.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from urllib.parse import urlparse from aiotedee import TedeeLock, TedeeLockState @@ -22,43 +22,44 @@ from homeassistant.components.lock import ( LockState, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component +from . import setup_integration from .conftest import WEBHOOK_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator -pytestmark = pytest.mark.usefixtures("init_integration") - -async def test_lock( +async def test_locks( hass: HomeAssistant, mock_tedee: MagicMock, - device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, +) -> None: + """Test tedee locks.""" + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.LOCK]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("init_integration") +async def test_lock_service_calls( + hass: HomeAssistant, + mock_tedee: MagicMock, ) -> None: """Test the tedee lock.""" - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None - - state = hass.states.get("lock.lock_1a2b") - assert state - assert state == snapshot - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot - assert entry.device_id - - device = device_registry.async_get(entry.device_id) - assert device == snapshot await hass.services.async_call( LOCK_DOMAIN, @@ -106,6 +107,7 @@ async def test_lock( assert state.state == LockState.UNLOCKING +@pytest.mark.usefixtures("init_integration") async def test_lock_without_pullspring( hass: HomeAssistant, mock_tedee: MagicMock, @@ -116,9 +118,6 @@ async def test_lock_without_pullspring( """Test the tedee lock without pullspring.""" # Fetch translations await async_setup_component(hass, "homeassistant", {}) - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None state = hass.states.get("lock.lock_2c3d") assert state @@ -149,6 +148,7 @@ async def test_lock_without_pullspring( assert len(mock_tedee.open.mock_calls) == 0 +@pytest.mark.usefixtures("init_integration") async def test_lock_errors( hass: HomeAssistant, mock_tedee: MagicMock, @@ -191,6 +191,7 @@ async def test_lock_errors( assert exc_info.value.translation_key == "open_failed" +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "side_effect", [ @@ -217,6 +218,7 @@ async def test_update_failed( assert state.state == STATE_UNAVAILABLE +@pytest.mark.usefixtures("init_integration") async def test_cleanup_removed_locks( hass: HomeAssistant, mock_tedee: MagicMock, @@ -247,6 +249,7 @@ async def test_cleanup_removed_locks( assert "Lock-1A2B" not in locks +@pytest.mark.usefixtures("init_integration") async def test_new_lock( hass: HomeAssistant, mock_tedee: MagicMock, @@ -275,6 +278,7 @@ async def test_new_lock( assert state +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( ("lib_state", "expected_state"), [ diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index ddbcd5086af..3c03d340100 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -1,20 +1,20 @@ """Tests for the Tedee Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("init_integration") +from . import setup_integration +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform SENSORS = ( "battery", @@ -25,21 +25,18 @@ SENSORS = ( async def test_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee sensors.""" - for key in SENSORS: - state = hass.states.get(f"sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry.device_id - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +@pytest.mark.usefixtures("init_integration") async def test_new_sensors( hass: HomeAssistant, mock_tedee: MagicMock, From 0030a970a19bbb430861a39bc3cd853bd0ff26bc Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 15 Dec 2024 21:31:18 +0100 Subject: [PATCH 0705/1198] Split coordinator in lamarzocco (#133208) --- .../components/lamarzocco/__init__.py | 34 +++-- .../components/lamarzocco/binary_sensor.py | 2 +- homeassistant/components/lamarzocco/button.py | 2 +- .../components/lamarzocco/calendar.py | 2 +- .../components/lamarzocco/coordinator.py | 130 +++++++++--------- .../components/lamarzocco/diagnostics.py | 2 +- homeassistant/components/lamarzocco/number.py | 2 +- homeassistant/components/lamarzocco/select.py | 2 +- homeassistant/components/lamarzocco/sensor.py | 56 +++++--- homeassistant/components/lamarzocco/switch.py | 2 +- homeassistant/components/lamarzocco/update.py | 2 +- tests/components/lamarzocco/conftest.py | 2 +- tests/components/lamarzocco/test_init.py | 4 +- 13 files changed, 138 insertions(+), 104 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index b3021ef1543..d20616e1940 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -7,6 +7,7 @@ from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient from pylamarzocco.clients.cloud import LaMarzoccoCloudClient from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info @@ -25,7 +26,13 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import CONF_USE_BLUETOOTH, DOMAIN -from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator +from .coordinator import ( + LaMarzoccoConfigEntry, + LaMarzoccoConfigUpdateCoordinator, + LaMarzoccoFirmwareUpdateCoordinator, + LaMarzoccoRuntimeData, + LaMarzoccoStatisticsUpdateCoordinator, +) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -99,18 +106,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - address_or_ble_device=entry.data[CONF_MAC], ) - coordinator = LaMarzoccoUpdateCoordinator( - hass=hass, - entry=entry, - local_client=local_client, + device = LaMarzoccoMachine( + model=entry.data[CONF_MODEL], + serial_number=entry.unique_id, + name=entry.data[CONF_NAME], cloud_client=cloud_client, + local_client=local_client, bluetooth_client=bluetooth_client, ) - await coordinator.async_config_entry_first_refresh() - entry.runtime_data = coordinator + coordinators = LaMarzoccoRuntimeData( + LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client), + LaMarzoccoFirmwareUpdateCoordinator(hass, entry, device), + LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device), + ) - gateway_version = coordinator.device.firmware[FirmwareType.GATEWAY].current_version + # API does not like concurrent requests, so no asyncio.gather here + await coordinators.config_coordinator.async_config_entry_first_refresh() + await coordinators.firmware_coordinator.async_config_entry_first_refresh() + await coordinators.statistics_coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinators + + gateway_version = device.firmware[FirmwareType.GATEWAY].current_version if version.parse(gateway_version) < version.parse("v3.4-rc5"): # incompatible gateway firmware, create an issue ir.async_create_issue( diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 0e11c54d896..3d11992e7c1 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -64,7 +64,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensor entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoBinarySensorEntity(coordinator, description) diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index dabf01d817d..22e92f656ff 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -57,7 +57,7 @@ async def async_setup_entry( ) -> None: """Set up button entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoButtonEntity(coordinator, description) for description in ENTITIES diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 46bfe875c9f..1dcc7c324ac 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -36,7 +36,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoCalendarEntity(coordinator, CALENDAR_KEY, wake_up_sleep_entry) for wake_up_sleep_entry in coordinator.device.config.wake_up_sleep_entries.values() diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 1281b11db02..aca84fc4660 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -2,20 +2,18 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from abc import abstractmethod +from dataclasses import dataclass from datetime import timedelta import logging -from time import time from typing import Any -from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.clients.cloud import LaMarzoccoCloudClient from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -23,26 +21,35 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN SCAN_INTERVAL = timedelta(seconds=30) -FIRMWARE_UPDATE_INTERVAL = 3600 -STATISTICS_UPDATE_INTERVAL = 300 - +FIRMWARE_UPDATE_INTERVAL = timedelta(hours=1) +STATISTICS_UPDATE_INTERVAL = timedelta(minutes=5) _LOGGER = logging.getLogger(__name__) -type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoUpdateCoordinator] + +@dataclass +class LaMarzoccoRuntimeData: + """Runtime data for La Marzocco.""" + + config_coordinator: LaMarzoccoConfigUpdateCoordinator + firmware_coordinator: LaMarzoccoFirmwareUpdateCoordinator + statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator + + +type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData] class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): - """Class to handle fetching data from the La Marzocco API centrally.""" + """Base class for La Marzocco coordinators.""" + _default_update_interval = SCAN_INTERVAL config_entry: LaMarzoccoConfigEntry def __init__( self, hass: HomeAssistant, entry: LaMarzoccoConfigEntry, - cloud_client: LaMarzoccoCloudClient, - local_client: LaMarzoccoLocalClient | None, - bluetooth_client: LaMarzoccoBluetoothClient | None, + device: LaMarzoccoMachine, + local_client: LaMarzoccoLocalClient | None = None, ) -> None: """Initialize coordinator.""" super().__init__( @@ -50,24 +57,35 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): _LOGGER, config_entry=entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=self._default_update_interval, ) + self.device = device self.local_connection_configured = local_client is not None - - assert self.config_entry.unique_id - self.device = LaMarzoccoMachine( - model=self.config_entry.data[CONF_MODEL], - serial_number=self.config_entry.unique_id, - name=self.config_entry.data[CONF_NAME], - cloud_client=cloud_client, - local_client=local_client, - bluetooth_client=bluetooth_client, - ) - - self._last_firmware_data_update: float | None = None - self._last_statistics_data_update: float | None = None self._local_client = local_client + async def _async_update_data(self) -> None: + """Do the data update.""" + try: + await self._internal_async_update_data() + except AuthFail as ex: + _LOGGER.debug("Authentication failed", exc_info=True) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="authentication_failed" + ) from ex + except RequestNotSuccessful as ex: + _LOGGER.debug(ex, exc_info=True) + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex + + @abstractmethod + async def _internal_async_update_data(self) -> None: + """Actual data update logic.""" + + +class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Class to handle fetching data from the La Marzocco API centrally.""" + async def _async_setup(self) -> None: """Set up the coordinator.""" if self._local_client is not None: @@ -96,41 +114,29 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): ) self.config_entry.async_on_unload(websocket_close) - async def _async_update_data(self) -> None: + async def _internal_async_update_data(self) -> None: """Fetch data from API endpoint.""" - await self._async_handle_request(self.device.get_config) - - if ( - self._last_firmware_data_update is None - or (self._last_firmware_data_update + FIRMWARE_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_firmware) - self._last_firmware_data_update = time() - - if ( - self._last_statistics_data_update is None - or (self._last_statistics_data_update + STATISTICS_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_statistics) - self._last_statistics_data_update = time() - + await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) - async def _async_handle_request[**_P]( - self, - func: Callable[_P, Coroutine[None, None, None]], - *args: _P.args, - **kwargs: _P.kwargs, - ) -> None: - try: - await func(*args, **kwargs) - except AuthFail as ex: - _LOGGER.debug("Authentication failed", exc_info=True) - raise ConfigEntryAuthFailed( - translation_domain=DOMAIN, translation_key="authentication_failed" - ) from ex - except RequestNotSuccessful as ex: - _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed( - translation_domain=DOMAIN, translation_key="api_error" - ) from ex + +class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco firmware.""" + + _default_update_interval = FIRMWARE_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_firmware() + _LOGGER.debug("Current firmware: %s", str(self.device.firmware)) + + +class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco statistics.""" + + _default_update_interval = STATISTICS_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_statistics() + _LOGGER.debug("Current statistics: %s", str(self.device.statistics)) diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index 43ae51ee192..204a8b7142a 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -31,7 +31,7 @@ async def async_get_config_entry_diagnostics( entry: LaMarzoccoConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator device = coordinator.device # collect all data sources diagnostics_data = DiagnosticsData( diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index feeb7e4a282..a1389769194 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -210,7 +210,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[NumberEntity] = [ LaMarzoccoNumberEntity(coordinator, description) for description in ENTITIES diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index e6b5f9a3d94..595c157b823 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -107,7 +107,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoSelectEntity(coordinator, description) diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 6dda6e69a02..8d57c1b8403 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -33,24 +33,6 @@ class LaMarzoccoSensorEntityDescription( ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( - LaMarzoccoSensorEntityDescription( - key="drink_stats_coffee", - translation_key="drink_stats_coffee", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), - LaMarzoccoSensorEntityDescription( - key="drink_stats_flushing", - translation_key="drink_stats_flushing", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.total_flushes, - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), LaMarzoccoSensorEntityDescription( key="shot_timer", translation_key="shot_timer", @@ -88,6 +70,27 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( ), ) +STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( + LaMarzoccoSensorEntityDescription( + key="drink_stats_coffee", + translation_key="drink_stats_coffee", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), + LaMarzoccoSensorEntityDescription( + key="drink_stats_flushing", + translation_key="drink_stats_flushing", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.total_flushes, + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -95,14 +98,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor entities.""" - coordinator = entry.runtime_data + config_coordinator = entry.runtime_data.config_coordinator - async_add_entities( - LaMarzoccoSensorEntity(coordinator, description) + entities = [ + LaMarzoccoSensorEntity(config_coordinator, description) for description in ENTITIES - if description.supported_fn(coordinator) + if description.supported_fn(config_coordinator) + ] + + statistics_coordinator = entry.runtime_data.statistics_coordinator + entities.extend( + LaMarzoccoSensorEntity(statistics_coordinator, description) + for description in STATISTIC_ENTITIES + if description.supported_fn(statistics_coordinator) ) + async_add_entities(entities) + class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity): """Sensor representing espresso machine temperature data.""" diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index 263bb5dc6ec..54bd1ac2aed 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -68,7 +68,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[SwitchEntity] = [] entities.extend( diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index ca182909042..0833ee6e249 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -59,7 +59,7 @@ async def async_setup_entry( ) -> None: """Create update entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.firmware_coordinator async_add_entities( LaMarzoccoUpdateEntity(coordinator, description) for description in ENTITIES diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 0bd3fb2a737..997fa73604c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -143,7 +143,7 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: with ( patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine", + "homeassistant.components.lamarzocco.LaMarzoccoMachine", autospec=True, ) as lamarzocco_mock, ): diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 80c038c4948..446c8780b62 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -174,9 +174,7 @@ async def test_bluetooth_is_set_from_discovery( "homeassistant.components.lamarzocco.async_discovered_service_info", return_value=[service_info], ) as discovery, - patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine" - ) as init_device, + patch("homeassistant.components.lamarzocco.LaMarzoccoMachine") as init_device, ): await async_init_integration(hass, mock_config_entry) discovery.assert_called_once() From e24dc3325905079d515439edf514a52ee7661f67 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 15 Dec 2024 15:45:50 -0500 Subject: [PATCH 0706/1198] Conversation: Use [] when we know key exists (#133305) --- homeassistant/components/conversation/http.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index d9873c5cbce..8134ecb0eee 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -24,7 +24,7 @@ from .agent_manager import ( get_agent_manager, ) from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY -from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE, DefaultAgent +from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE from .entity import ConversationEntity from .models import ConversationInput @@ -162,8 +162,7 @@ async def websocket_list_sentences( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List custom registered sentences.""" - agent = hass.data.get(DATA_DEFAULT_ENTITY) - assert isinstance(agent, DefaultAgent) + agent = hass.data[DATA_DEFAULT_ENTITY] sentences = [] for trigger_data in agent.trigger_sentences: @@ -185,8 +184,7 @@ async def websocket_hass_agent_debug( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Return intents that would be matched by the default agent for a list of sentences.""" - agent = hass.data.get(DATA_DEFAULT_ENTITY) - assert isinstance(agent, DefaultAgent) + agent = hass.data[DATA_DEFAULT_ENTITY] # Return results for each sentence in the same order as the input. result_dicts: list[dict[str, Any] | None] = [] From 66dcd38701283e9e04d7eaa8257ad1d94448f6a6 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:10:37 +0100 Subject: [PATCH 0707/1198] Update docker base image to 2024.12.1 (#133323) --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index a8755bbbf5c..fafdd876f75 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From 909eb045cc0098749824d462c2876a50b88b32d5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:27:10 +0100 Subject: [PATCH 0708/1198] Set default min/max color temperature in abode lights (#133331) --- homeassistant/components/abode/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/abode/light.py b/homeassistant/components/abode/light.py index 9b21ee4eb74..e2d0a331f0a 100644 --- a/homeassistant/components/abode/light.py +++ b/homeassistant/components/abode/light.py @@ -11,6 +11,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) @@ -40,6 +42,8 @@ class AbodeLight(AbodeDevice, LightEntity): _device: Light _attr_name = None + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN def turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" From 5f2b1bd62282d0d55d1ad1e2c8ed00de30bacb15 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:45:59 +0100 Subject: [PATCH 0709/1198] Set default min/max color temperature in demo lights (#133330) --- homeassistant/components/demo/light.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/components/demo/light.py b/homeassistant/components/demo/light.py index 8bb4e403c3d..ec98a056b3e 100644 --- a/homeassistant/components/demo/light.py +++ b/homeassistant/components/demo/light.py @@ -13,6 +13,8 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_WHITE, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -100,6 +102,9 @@ class DemoLight(LightEntity): _attr_name = None _attr_should_poll = False + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + def __init__( self, unique_id: str, From 4566ebbb3dd016b35fb6204fa33601109b11f2cb Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Mon, 16 Dec 2024 01:51:01 -0600 Subject: [PATCH 0710/1198] Add reconfigure flow to Roku (#132986) * add reconfigure flow to roku * Update strings.json * aimplify * Apply suggestions from code review Co-authored-by: Josef Zweck * Update test_config_flow.py * Update config_flow.py * Update config_flow.py --------- Co-authored-by: Josef Zweck --- homeassistant/components/roku/config_flow.py | 43 +++++++++++-- homeassistant/components/roku/strings.json | 4 +- tests/components/roku/test_config_flow.py | 66 +++++++++++++++++++- 3 files changed, 103 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index b92ff819701..bc0092d6953 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,7 +10,12 @@ from rokuecp import Roku, RokuError import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -53,20 +58,38 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): self.discovery_info = {} @callback - def _show_form(self, errors: dict[str, Any] | None = None) -> ConfigFlowResult: + def _show_form( + self, + user_input: dict[str, Any] | None, + errors: dict[str, Any] | None = None, + ) -> ConfigFlowResult: """Show the form to the user.""" + suggested_values = user_input + if suggested_values is None and self.source == SOURCE_RECONFIGURE: + suggested_values = { + CONF_HOST: self._get_reconfigure_entry().data[CONF_HOST] + } + return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA, suggested_values + ), errors=errors or {}, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + return await self.async_step_user(user_input) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if not user_input: - return self._show_form() + return self._show_form(user_input) errors = {} @@ -75,13 +98,21 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): except RokuError: _LOGGER.debug("Roku Error", exc_info=True) errors["base"] = ERROR_CANNOT_CONNECT - return self._show_form(errors) + return self._show_form(user_input, errors) except Exception: _LOGGER.exception("Unknown error trying to connect") return self.async_abort(reason=ERROR_UNKNOWN) await self.async_set_unique_id(info["serial_number"]) - self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) + + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={CONF_HOST: user_input[CONF_HOST]}, + ) + + self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 9d657be6d61..bd47585db1b 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -21,7 +21,9 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_device": "This Roku device does not match the existing device id. Please make sure you entered the correct host information." } }, "options": { diff --git a/tests/components/roku/test_config_flow.py b/tests/components/roku/test_config_flow.py index 7144c77cad9..57ddf5d51a6 100644 --- a/tests/components/roku/test_config_flow.py +++ b/tests/components/roku/test_config_flow.py @@ -1,13 +1,18 @@ """Test the Roku config flow.""" import dataclasses -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest -from rokuecp import RokuConnectionError +from rokuecp import Device as RokuDevice, RokuConnectionError from homeassistant.components.roku.const import CONF_PLAY_MEDIA_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_HOMEKIT, + SOURCE_SSDP, + SOURCE_USER, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -23,6 +28,8 @@ from . import ( from tests.common import MockConfigEntry +RECONFIGURE_HOST = "192.168.1.190" + async def test_duplicate_error( hass: HomeAssistant, @@ -276,3 +283,56 @@ async def test_options_flow( assert result2.get("data") == { CONF_PLAY_MEDIA_APP_ID: "782875", } + + +async def _start_reconfigure_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_HOST: RECONFIGURE_HOST}, + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Test reconfigure flow.""" + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: RECONFIGURE_HOST, + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_device: RokuDevice, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Ensure reconfigure flow aborts when the device changes.""" + mock_device.info.serial_number = "RECONFIG" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" From 22d03afb9b5c5142d4ac944b4903a1e6d13c9c82 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 09:08:37 +0100 Subject: [PATCH 0711/1198] Set default min/max color temperature in wemo lights (#133338) --- homeassistant/components/wemo/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/wemo/light.py b/homeassistant/components/wemo/light.py index b39f4829605..6068cd3ff0b 100644 --- a/homeassistant/components/wemo/light.py +++ b/homeassistant/components/wemo/light.py @@ -11,6 +11,8 @@ from homeassistant.components.light import ( ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -77,6 +79,8 @@ def async_setup_bridge( class WemoLight(WemoEntity, LightEntity): """Representation of a WeMo light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN _attr_supported_features = LightEntityFeature.TRANSITION def __init__(self, coordinator: DeviceCoordinator, light: BridgeLight) -> None: From 06f6869da5dfaf0fcfeda28231ac2b7ea64297b1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 09:47:49 +0100 Subject: [PATCH 0712/1198] Avoid string manipulations in hassio backup reader/writer (#133339) --- homeassistant/components/hassio/backup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 53f3a226a09..e544a56a3c8 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -175,7 +175,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): hassio_agents: list[SupervisorBackupAgent] = [ cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) for agent_id in agent_ids - if agent_id.startswith(DOMAIN) + if manager.backup_agents[agent_id].domain == DOMAIN ] locations = {agent.location for agent in hassio_agents} @@ -254,7 +254,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): hassio_agents: list[SupervisorBackupAgent] = [ cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) for agent_id in agent_ids - if agent_id.startswith(DOMAIN) + if manager.backup_agents[agent_id].domain == DOMAIN ] locations = {agent.location for agent in hassio_agents} @@ -305,7 +305,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): else None ) - if not agent_id.startswith(DOMAIN): + manager = self._hass.data[DATA_MANAGER] + if manager.backup_agents[agent_id].domain != DOMAIN: # Download the backup to the supervisor. Supervisor will clean up the backup # two days after the restore is done. await self.async_receive_backup( From f2674f32623492d0b8a75d9293b456dc801997fb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 09:49:18 +0100 Subject: [PATCH 0713/1198] Set default min/max color temperature in deconz lights (#133333) --- homeassistant/components/deconz/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index acfbff98297..b1df32efc31 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -18,6 +18,8 @@ from homeassistant.components.light import ( ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN as LIGHT_DOMAIN, EFFECT_COLORLOOP, FLASH_LONG, @@ -191,6 +193,8 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( TYPE = LIGHT_DOMAIN _attr_color_mode = ColorMode.UNKNOWN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN def __init__(self, device: _LightDeviceT, hub: DeconzHub) -> None: """Set up light.""" From d78a24ba33b9ac8918ebe000849997a5fd77aef7 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Mon, 16 Dec 2024 09:54:01 +0100 Subject: [PATCH 0714/1198] Use `ConfigEntry.runtime_data` in Twitch (#133337) * Use `ConfigEntry.runtime_data` in Twitch * Process code review * Process code review --- homeassistant/components/twitch/__init__.py | 14 ++++++-------- homeassistant/components/twitch/coordinator.py | 11 +++++++++-- homeassistant/components/twitch/sensor.py | 9 +++------ tests/components/twitch/__init__.py | 2 +- tests/components/twitch/test_sensor.py | 2 +- 5 files changed, 20 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/twitch/__init__.py b/homeassistant/components/twitch/__init__.py index 6979a016447..22a1782f594 100644 --- a/homeassistant/components/twitch/__init__.py +++ b/homeassistant/components/twitch/__init__.py @@ -7,7 +7,6 @@ from typing import cast from aiohttp.client_exceptions import ClientError, ClientResponseError from twitchAPI.twitch import Twitch -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -17,11 +16,11 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( async_get_config_entry_implementation, ) -from .const import DOMAIN, OAUTH_SCOPES, PLATFORMS -from .coordinator import TwitchCoordinator +from .const import OAUTH_SCOPES, PLATFORMS +from .coordinator import TwitchConfigEntry, TwitchCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Set up Twitch from a config entry.""" implementation = cast( LocalOAuth2Implementation, @@ -47,18 +46,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client.auto_refresh_auth = False await client.set_user_authentication(access_token, scope=OAUTH_SCOPES) - coordinator = TwitchCoordinator(hass, client, session) - + coordinator = TwitchCoordinator(hass, client, session, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Unload Twitch config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twitch/coordinator.py b/homeassistant/components/twitch/coordinator.py index c34eeaa5325..c61e80bd2b8 100644 --- a/homeassistant/components/twitch/coordinator.py +++ b/homeassistant/components/twitch/coordinator.py @@ -15,6 +15,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES +type TwitchConfigEntry = ConfigEntry[TwitchCoordinator] + def chunk_list(lst: list, chunk_size: int) -> list[list]: """Split a list into chunks of chunk_size.""" @@ -44,12 +46,16 @@ class TwitchUpdate: class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): """Class to manage fetching Twitch data.""" - config_entry: ConfigEntry + config_entry: TwitchConfigEntry users: list[TwitchUser] current_user: TwitchUser def __init__( - self, hass: HomeAssistant, twitch: Twitch, session: OAuth2Session + self, + hass: HomeAssistant, + twitch: Twitch, + session: OAuth2Session, + entry: TwitchConfigEntry, ) -> None: """Initialize the coordinator.""" self.twitch = twitch @@ -58,6 +64,7 @@ class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): LOGGER, name=DOMAIN, update_interval=timedelta(minutes=5), + config_entry=entry, ) self.session = session diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index f78d33ea461..b407eae0319 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -5,15 +5,12 @@ from __future__ import annotations from typing import Any from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwitchCoordinator -from .const import DOMAIN -from .coordinator import TwitchUpdate +from .coordinator import TwitchConfigEntry, TwitchCoordinator, TwitchUpdate ATTR_GAME = "game" ATTR_TITLE = "title" @@ -34,11 +31,11 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: TwitchConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize entries.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( TwitchSensor(coordinator, channel_id) for channel_id in coordinator.data diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 2d70aaf9649..1887861f6e5 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -5,7 +5,7 @@ from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_array_fixture diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index 613c0919c49..c8cc009f3e1 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -7,7 +7,7 @@ from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from . import TwitchIterObject, get_generator_from_data, setup_integration From 9667a120309f566a85df8278ccd0da0bee1b926a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 10:32:57 +0100 Subject: [PATCH 0715/1198] Set default min/max color temperature in matter lights (#133340) --- homeassistant/components/matter/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 153e154e64e..c9d5c688f69 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -13,6 +13,8 @@ from homeassistant.components.light import ( ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityDescription, @@ -91,6 +93,8 @@ class MatterLight(MatterEntity, LightEntity): _supports_color_temperature = False _transitions_disabled = False _platform_translation_key = "light" + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN async def _set_xy_color( self, xy_color: tuple[float, float], transition: float = 0.0 From d062171be3e5dfdaa310b5e4f4f16a72a3e265d6 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:19:21 +0100 Subject: [PATCH 0716/1198] Suez_water: mark reached bronze scale level (#133352) --- homeassistant/components/suez_water/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 7e720a86afd..f39411e8afa 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], + "quality_scale": "bronze", "requirements": ["pysuezV2==1.3.5"] } From 4b3893eadf2488d5c7507a03138e8b2bb91cfdfe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:26:29 +0100 Subject: [PATCH 0717/1198] Set default min/max color temperature in homekit_controller lights (#133334) --- .../components/homekit_controller/light.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/homekit_controller/light.py b/homeassistant/components/homekit_controller/light.py index d8c48d81333..26f10768aa0 100644 --- a/homeassistant/components/homekit_controller/light.py +++ b/homeassistant/components/homekit_controller/light.py @@ -12,6 +12,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) @@ -53,6 +55,9 @@ async def async_setup_entry( class HomeKitLight(HomeKitEntity, LightEntity): """Representation of a Homekit light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + @callback def _async_reconfigure(self) -> None: """Reconfigure entity.""" @@ -98,24 +103,24 @@ class HomeKitLight(HomeKitEntity, LightEntity): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN min_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue return ( color_util.color_temperature_mired_to_kelvin(min_value_mireds) if min_value_mireds - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) @cached_property def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN max_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue return ( color_util.color_temperature_mired_to_kelvin(max_value_mireds) if max_value_mireds - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) @property From cd2cc1d99fa362e8d2f67840e5224f3ceca15723 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:10:15 +0100 Subject: [PATCH 0718/1198] Reduce false-positives in test-before-setup IQS check (#133349) --- .../test_before_setup.py | 41 ++++++++++++------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py index db737c99e37..5f21a9d2458 100644 --- a/script/hassfest/quality_scale_validation/test_before_setup.py +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -15,13 +15,31 @@ _VALID_EXCEPTIONS = { } -def _raises_exception(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: - """Check that a valid exception is raised within `async_setup_entry`.""" - for node in ast.walk(async_setup_entry_function): - if isinstance(node, ast.Raise): - if isinstance(node.exc, ast.Name) and node.exc.id in _VALID_EXCEPTIONS: - return True - if isinstance(node.exc, ast.Call) and node.exc.func.id in _VALID_EXCEPTIONS: +def _get_exception_name(expression: ast.expr) -> str: + """Get the name of the exception being raised.""" + if isinstance(expression, ast.Name): + return expression.id + + if isinstance(expression, ast.Call): + return _get_exception_name(expression.func) + + if isinstance(expression, ast.Attribute): + return _get_exception_name(expression.value) + + raise AssertionError( + f"Raise is neither Attribute nor Call nor Name: {type(expression)}" + ) + + +def _raises_exception(integration: Integration) -> bool: + """Check that a valid exception is raised.""" + for module_file in integration.path.rglob("*.py"): + module = ast_parse_module(module_file) + for node in ast.walk(module): + if ( + isinstance(node, ast.Raise) + and _get_exception_name(node.exc) in _VALID_EXCEPTIONS + ): return True return False @@ -59,11 +77,6 @@ def validate( if not (async_setup_entry := _get_setup_entry_function(init)): return [f"Could not find `async_setup_entry` in {init_file}"] - if not ( - _raises_exception(async_setup_entry) or _calls_first_refresh(async_setup_entry) - ): - return [ - f"Integration does not raise one of {_VALID_EXCEPTIONS} " - f"in async_setup_entry ({init_file})" - ] + if not (_calls_first_refresh(async_setup_entry) or _raises_exception(integration)): + return [f"Integration does not raise one of {_VALID_EXCEPTIONS}"] return None From 739832691e16c078eb6f96ce16c2f05f9df1bf46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Mon, 16 Dec 2024 12:14:01 +0000 Subject: [PATCH 0719/1198] Add Idasen Desk quality scale record (#132368) --- .../components/idasen_desk/quality_scale.yaml | 108 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/idasen_desk/quality_scale.yaml diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml new file mode 100644 index 00000000000..28381f98a3e --- /dev/null +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -0,0 +1,108 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration does not use polling. + brands: done + common-modules: + status: todo + comment: | + The cover and sensor entities could move common initialization to a base entity class. + config-flow-test-coverage: + status: todo + comment: | + - use mock_desk_api + - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. + config-flow: + status: todo + comment: | + Missing data description for user step. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration parameters. + docs-installation-parameters: + status: exempt + comment: | + This integration does not provide installation parameters. + entity-unavailable: done + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: + status: todo + comment: | + - remove the await hass.async_block_till_done() after service calls with blocking=True + - use constants (like SERVICE_PRESS and ATTR_ENTITY_ID) in the tests calling services + - rename test_buttons.py -> test_button.py + - rename test_sensors.py -> test_sensor.py + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration uses Bluetooth and addresses don't change. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: exempt + comment: | + This integration doesn't have any cases where a reconfiguration is needed. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration doesn't use websession. + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 23721d31fec..e0992914626 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -516,7 +516,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "iaqualink", "ibeacon", "icloud", - "idasen_desk", "idteck_prox", "ifttt", "iglo", From 34911a78bd93a3c375f1d2afcbb80eea0de1f3b1 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:17:38 +0100 Subject: [PATCH 0720/1198] Add Habitica quality scale record (#131429) Co-authored-by: Franck Nijhof Co-authored-by: Joost Lekkerkerker --- .../components/habitica/quality_scale.yaml | 84 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/habitica/quality_scale.yaml diff --git a/homeassistant/components/habitica/quality_scale.yaml b/homeassistant/components/habitica/quality_scale.yaml new file mode 100644 index 00000000000..cf54672bfed --- /dev/null +++ b/homeassistant/components/habitica/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: test already_configured, tests should finish with create_entry or abort, assert unique_id + config-flow: done + dependency-transparency: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: No events are registered by the integration. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: There is no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Integration represents a service + discovery: + status: exempt + comment: Integration represents a service + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: + status: exempt + comment: No supportable devices. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Integration is a service, no devices that could be added at runtime. + Button entities for casting skills are created/removed dynamically if unlocked or on class change + entity-category: + status: done + comment: Default categories are appropriate for currently available entities. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: translations for UpdateFailed missing + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: done + comment: Used to inform of deprecated entities and actions. + stale-devices: + status: done + comment: Not applicable. Only one device per config entry. Removed together with the config entry. + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index e0992914626..604ce5e51ea 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -473,7 +473,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "gstreamer", "gtfs", "guardian", - "habitica", "harman_kardon_avr", "harmony", "hassio", From 836fd94a5633e7dd3a9879e6293e9878078a9a89 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 13:31:13 +0100 Subject: [PATCH 0721/1198] Record current IQS state for LaMetric (#133040) --- .../components/lametric/quality_scale.yaml | 75 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/lametric/quality_scale.yaml diff --git a/homeassistant/components/lametric/quality_scale.yaml b/homeassistant/components/lametric/quality_scale.yaml new file mode 100644 index 00000000000..a8982bb938b --- /dev/null +++ b/homeassistant/components/lametric/quality_scale.yaml @@ -0,0 +1,75 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: todo + comment: | + Device are documented, but some are missing. For example, the their pro + strip is supported as well. + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 604ce5e51ea..43b4adc90e9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -576,7 +576,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "kwb", "lacrosse", "lacrosse_view", - "lametric", "landisgyr_heat_meter", "lannouncer", "lastfm", From cc27c95bada7b7e8c0174b9027e9f0f324a87adc Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Mon, 16 Dec 2024 13:35:55 +0100 Subject: [PATCH 0722/1198] Use unique_id in devolo Home Network tests (#133147) --- tests/components/devolo_home_network/__init__.py | 9 +++++++-- .../snapshots/test_diagnostics.ambr | 2 +- .../components/devolo_home_network/test_config_flow.py | 10 +++------- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/components/devolo_home_network/__init__.py b/tests/components/devolo_home_network/__init__.py index 05ccbca0c56..f6d1c13299a 100644 --- a/tests/components/devolo_home_network/__init__.py +++ b/tests/components/devolo_home_network/__init__.py @@ -4,7 +4,7 @@ from homeassistant.components.devolo_home_network.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant -from .const import IP +from .const import DISCOVERY_INFO, IP from tests.common import MockConfigEntry @@ -15,7 +15,12 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "test", } - entry = MockConfigEntry(domain=DOMAIN, data=config, entry_id="123456") + entry = MockConfigEntry( + domain=DOMAIN, + data=config, + entry_id="123456", + unique_id=DISCOVERY_INFO.properties["SN"], + ) entry.add_to_hass(hass) return entry diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 8fe6c7c2293..1288b7f3ef6 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -35,7 +35,7 @@ 'subentries': list([ ]), 'title': 'Mock Title', - 'unique_id': None, + 'unique_id': '1234567890', 'version': 1, }), }) diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 28e9059d588..92163b5cb95 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -29,8 +29,6 @@ from .const import ( ) from .mock import MockDevice -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant, info: dict[str, Any]) -> None: """Test we get the form.""" @@ -125,6 +123,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "", } + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == "1234567890" async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @@ -141,11 +141,7 @@ async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("info") async def test_abort_if_configured(hass: HomeAssistant) -> None: """Test we abort config flow if already configured.""" - serial_number = DISCOVERY_INFO.properties["SN"] - entry = MockConfigEntry( - domain=DOMAIN, unique_id=serial_number, data={CONF_IP_ADDRESS: IP} - ) - entry.add_to_hass(hass) + entry = configure_integration(hass) # Abort on concurrent user flow result = await hass.config_entries.flow.async_init( From 0a0f4827020e88a4804a23566d1b6ca45c6811d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 16 Dec 2024 13:39:46 +0100 Subject: [PATCH 0723/1198] Update myuplink quality scale (#133083) Updated documentation --- homeassistant/components/myuplink/quality_scale.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index ef64ce757f5..dbe771f7eb2 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -61,12 +61,12 @@ rules: comment: | Not possible to discover these devices. docs-data-update: done - docs-examples: todo + docs-examples: done docs-known-limitations: done - docs-supported-devices: todo + docs-supported-devices: done docs-supported-functions: todo docs-troubleshooting: done - docs-use-cases: todo + docs-use-cases: done dynamic-devices: todo entity-category: done entity-device-class: done From 38fdfba1693849792b6f75b06c6952c513a58f45 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Mon, 16 Dec 2024 13:56:17 +0100 Subject: [PATCH 0724/1198] Velbus finish config-flow-test-coverage (#133149) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/velbus/quality_scale.yaml | 5 +---- tests/components/velbus/test_config_flow.py | 8 +++++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 37e55fee19c..9a48e84da93 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -7,10 +7,7 @@ rules: This integration does not poll. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - Split test_flow_usb from the test that tests already_configured, test_flow_usb should also assert the unique_id of the entry + config-flow-test-coverage: done config-flow: status: todo comment: | diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 432fcea10db..5e81a3f8a36 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -156,12 +156,18 @@ async def test_flow_usb(hass: HomeAssistant) -> None: user_input={}, ) assert result + assert result["result"].unique_id == "0B1B:10CF_1234_Velleman_Velbus VMB1USB" assert result.get("type") is FlowResultType.CREATE_ENTRY - # test an already configured discovery + +@pytest.mark.usefixtures("controller") +@patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) +async def test_flow_usb_if_already_setup(hass: HomeAssistant) -> None: + """Test we abort if Velbus USB discovbery aborts in case it is already setup.""" entry = MockConfigEntry( domain=DOMAIN, data={CONF_PORT: PORT_SERIAL}, + unique_id="0B1B:10CF_1234_Velleman_Velbus VMB1USB", ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( From a953abf5c3ea000f52f934d711dfe47650645b95 Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Mon, 16 Dec 2024 15:00:06 +0200 Subject: [PATCH 0725/1198] Add reauth flow to Ituran (#132755) --- .../components/ituran/config_flow.py | 36 ++++++++++++++-- .../components/ituran/coordinator.py | 4 +- .../components/ituran/quality_scale.yaml | 2 +- homeassistant/components/ituran/strings.json | 11 +++-- tests/components/ituran/test_config_flow.py | 43 +++++++++++++++++++ 5 files changed, 86 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/ituran/config_flow.py b/homeassistant/components/ituran/config_flow.py index 48e898a9d0a..9709e471503 100644 --- a/homeassistant/components/ituran/config_flow.py +++ b/homeassistant/components/ituran/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping import logging from typing import Any @@ -9,7 +10,7 @@ from pyituran import Ituran from pyituran.exceptions import IturanApiError, IturanAuthError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from .const import ( CONF_ID_OR_PASSPORT, @@ -43,11 +44,12 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the inial step.""" + """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: await self.async_set_unique_id(user_input[CONF_ID_OR_PASSPORT]) - self._abort_if_unique_id_configured() + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() ituran = Ituran( user_input[CONF_ID_OR_PASSPORT], @@ -81,7 +83,7 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_otp( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the inial step.""" + """Handle the OTP step.""" errors: dict[str, str] = {} if user_input is not None: ituran = Ituran( @@ -99,6 +101,10 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._user_info + ) return self.async_create_entry( title=f"Ituran {self._user_info[CONF_ID_OR_PASSPORT]}", data=self._user_info, @@ -107,3 +113,25 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="otp", data_schema=STEP_OTP_DATA_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self._user_info = dict(entry_data) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauth confirmation message.""" + if user_input is not None: + return await self.async_step_user(self._user_info) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema({}), + description_placeholders={ + "phone_number": self._user_info[CONF_PHONE_NUMBER] + }, + ) diff --git a/homeassistant/components/ituran/coordinator.py b/homeassistant/components/ituran/coordinator.py index 93d07b71267..cd0949eb4c2 100644 --- a/homeassistant/components/ituran/coordinator.py +++ b/homeassistant/components/ituran/coordinator.py @@ -7,7 +7,7 @@ from pyituran.exceptions import IturanApiError, IturanAuthError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -54,7 +54,7 @@ class IturanDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Vehicle]]): translation_domain=DOMAIN, translation_key="api_error" ) from e except IturanAuthError as e: - raise ConfigEntryError( + raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="auth_error" ) from e diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml index 71f82aa1971..71d0d9698da 100644 --- a/homeassistant/components/ituran/quality_scale.yaml +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -35,7 +35,7 @@ rules: status: exempt comment: | This integration does not provide additional actions. - reauthentication-flow: todo + reauthentication-flow: done parallel-updates: status: exempt comment: | diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json index e9f785289b8..212dbd1b86a 100644 --- a/homeassistant/components/ituran/strings.json +++ b/homeassistant/components/ituran/strings.json @@ -7,7 +7,7 @@ "phone_number": "Mobile phone number" }, "data_description": { - "id_or_passport": "The goverment ID or passport number provided when registering with Ituran.", + "id_or_passport": "The government ID or passport number provided when registering with Ituran.", "phone_number": "The mobile phone number provided when registering with Ituran. A one-time password will be sent to this mobile number." } }, @@ -18,6 +18,10 @@ "data_description": { "otp": "A one-time-password sent as a text message to the mobile phone number provided before." } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "A new one-time password will be sent to {phone_number}." } }, "error": { @@ -27,15 +31,16 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, "exceptions": { "api_error": { - "message": "An error occured while communicating with the Ituran service." + "message": "An error occurred while communicating with the Ituran service." }, "auth_error": { - "message": "Failed authenticating with the Ituran service, please remove and re-add integration." + "message": "Failed authenticating with the Ituran service, please reauthenticate the integration." } } } diff --git a/tests/components/ituran/test_config_flow.py b/tests/components/ituran/test_config_flow.py index 0e0f6f63b9a..19253103ad7 100644 --- a/tests/components/ituran/test_config_flow.py +++ b/tests/components/ituran/test_config_flow.py @@ -16,8 +16,11 @@ from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration from .const import MOCK_CONFIG_DATA +from tests.common import MockConfigEntry + async def __do_successful_user_step( hass: HomeAssistant, result: ConfigFlowResult, mock_ituran: AsyncMock @@ -209,3 +212,43 @@ async def test_already_authenticated( assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] assert result["data"][CONF_MOBILE_ID] == MOCK_CONFIG_DATA[CONF_MOBILE_ID] assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + + +async def test_reauth( + hass: HomeAssistant, + mock_ituran: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauthenticating.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + await setup_integration(hass, mock_config_entry) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] is None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" From 6f278fb8560ffbb2d89e62ae0c266e9da3a939a3 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 16 Dec 2024 14:13:19 +0100 Subject: [PATCH 0726/1198] Remove custom "unknown" state from Fronius Enum sensor (#133361) --- homeassistant/components/fronius/const.py | 8 +++----- homeassistant/components/fronius/strings.json | 4 +--- .../fronius/snapshots/test_sensor.ambr | 16 ---------------- 3 files changed, 4 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 083085270e0..273f1acab41 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -42,8 +42,6 @@ class InverterStatusCodeOption(StrEnum): IDLE = "idle" READY = "ready" SLEEPING = "sleeping" - UNKNOWN = "unknown" - INVALID = "invalid" _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { @@ -61,13 +59,13 @@ _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { 11: InverterStatusCodeOption.IDLE, 12: InverterStatusCodeOption.READY, 13: InverterStatusCodeOption.SLEEPING, - 255: InverterStatusCodeOption.UNKNOWN, + # 255: "Unknown" is handled by `None` state - same as the invalid codes. } -def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption: +def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption | None: """Return a status message for a given status code.""" - return _INVERTER_STATUS_CODES.get(code, InverterStatusCodeOption.INVALID) # type: ignore[arg-type] + return _INVERTER_STATUS_CODES.get(code) # type: ignore[arg-type] class MeterLocationCodeOption(StrEnum): diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 51cb087efc2..e2740c76696 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -86,9 +86,7 @@ "error": "Error", "idle": "Idle", "ready": "Ready", - "sleeping": "Sleeping", - "unknown": "Unknown", - "invalid": "Invalid" + "sleeping": "Sleeping" } }, "led_state": { diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr index 700c09da2f6..8f8c9d919fc 100644 --- a/tests/components/fronius/snapshots/test_sensor.ambr +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -560,8 +560,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -605,8 +603,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -3815,8 +3811,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -3860,8 +3854,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -7234,8 +7226,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -7279,8 +7269,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -7949,8 +7937,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -7994,8 +7980,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , From a34992c0b517521b312f18812e431f5acedac664 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Mon, 16 Dec 2024 15:13:50 +0100 Subject: [PATCH 0727/1198] Velbus add PARALLEL_UPDATES to all platforms (#133155) --- homeassistant/components/velbus/binary_sensor.py | 2 ++ homeassistant/components/velbus/button.py | 2 ++ homeassistant/components/velbus/climate.py | 2 ++ homeassistant/components/velbus/cover.py | 2 ++ homeassistant/components/velbus/light.py | 2 ++ homeassistant/components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 2 ++ homeassistant/components/velbus/sensor.py | 2 ++ homeassistant/components/velbus/switch.py | 2 ++ 9 files changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index 584f28e394a..88dc994efe8 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -9,6 +9,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index 910ae59b69e..fc943159123 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index e9128ef7de1..b2f3077ecee 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -20,6 +20,8 @@ from . import VelbusConfigEntry from .const import DOMAIN, PRESET_MODES from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 9257dd3f36f..2ddea37f2d6 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -17,6 +17,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index afe3104aa9a..1adf52a8198 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -28,6 +28,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 9a48e84da93..477b6768e71 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -36,7 +36,7 @@ rules: entity-unavailable: todo integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index c0a0a5f532d..6c2dfe0a3b1 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -10,6 +10,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index 2c341ea851d..77833da3ee1 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index dccb0a02ffa..8256e716d4f 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -11,6 +11,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, From 14f4f8aeb59481776525663f75ddf4ec0f3a9cd3 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 15:37:29 +0100 Subject: [PATCH 0728/1198] Update hassio backup agents on mount added or removed (#133344) * Update hassio backup agents on mount added or removed * Address review comments --- homeassistant/components/hassio/backup.py | 34 +++++++++++++ tests/components/conftest.py | 3 ++ tests/components/hassio/test_backup.py | 62 +++++++++++++++++++++++ 3 files changed, 99 insertions(+) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index e544a56a3c8..0353255fe7b 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +import logging from pathlib import Path from typing import Any, cast @@ -32,6 +33,8 @@ from .const import DOMAIN, EVENT_SUPERVISOR_EVENT from .handler import get_supervisor_client LOCATION_CLOUD_BACKUP = ".cloud_backup" +MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount") +_LOGGER = logging.getLogger(__name__) async def async_get_backup_agents( @@ -49,6 +52,37 @@ async def async_get_backup_agents( return agents +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("name") not in MOUNT_JOBS + or event_data.get("done") is not True + ): + return + _LOGGER.debug("Mount added or removed %s, calling listener", data) + listener() + + unsub_signal = async_dispatcher_connect(hass, EVENT_SUPERVISOR_EVENT, handle_signal) + return unsub + + def _backup_details_to_agent_backup( details: supervisor_backups.BackupComplete, ) -> AgentBackup: diff --git a/tests/components/conftest.py b/tests/components/conftest.py index ac30d105299..3828cc5ff37 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -514,11 +514,14 @@ def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> As @pytest.fixture(name="supervisor_client") def supervisor_client() -> Generator[AsyncMock]: """Mock the supervisor client.""" + mounts_info_mock = AsyncMock(spec_set=["mounts"]) + mounts_info_mock.mounts = [] supervisor_client = AsyncMock() supervisor_client.addons = AsyncMock() supervisor_client.discovery = AsyncMock() supervisor_client.homeassistant = AsyncMock() supervisor_client.host = AsyncMock() + supervisor_client.mounts.info.return_value = mounts_info_mock supervisor_client.os = AsyncMock() supervisor_client.resolution = AsyncMock() supervisor_client.supervisor = AsyncMock() diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 660753bd815..3e928bc996b 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -231,6 +231,68 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) +@pytest.mark.usefixtures("hassio_client") +@pytest.mark.parametrize( + ("event_data", "mount_info_calls"), + [ + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": False}, + }, + 0, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": False}, + }, + 0, + ), + ({"event": "job", "data": {"name": "other_job", "done": True}}, 0), + ( + { + "event": "other_event", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 0, + ), + ], +) +async def test_agents_notify_on_mount_added_removed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + event_data: dict[str, Any], + mount_info_calls: int, +) -> None: + """Test the listener is called when mounts are added or removed.""" + client = await hass_ws_client(hass) + assert supervisor_client.mounts.info.call_count == 1 + assert supervisor_client.mounts.info.call_args[0] == () + supervisor_client.mounts.info.reset_mock() + + await client.send_json_auto_id({"type": "supervisor/event", "data": event_data}) + response = await client.receive_json() + assert response["success"] + await hass.async_block_till_done() + assert supervisor_client.mounts.info.call_count == mount_info_calls + + @pytest.mark.usefixtures("hassio_client") async def test_reader_writer_create( hass: HomeAssistant, From 5adb7f4542ad116672e16580348fb9b14ea211b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 16 Dec 2024 15:42:15 +0100 Subject: [PATCH 0729/1198] Translate exception messages in myUplink (#131626) * Translate exceptions * Add one more translation * Adding more translations * Make message easier to understand for end-user * Clarify message * Address review comments --- homeassistant/components/myuplink/__init__.py | 20 +++++++++++++++---- homeassistant/components/myuplink/number.py | 10 ++++++++-- .../components/myuplink/quality_scale.yaml | 4 +--- homeassistant/components/myuplink/select.py | 9 ++++++++- .../components/myuplink/strings.json | 20 +++++++++++++++++++ homeassistant/components/myuplink/switch.py | 8 ++++++-- 6 files changed, 59 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index e833c5fcd8e..5ad114e973e 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -55,13 +55,25 @@ async def async_setup_entry( await auth.async_get_access_token() except ClientResponseError as err: if err.status in {HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN}: - raise ConfigEntryAuthFailed from err - raise ConfigEntryNotReady from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="config_entry_auth_failed", + ) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err except ClientError as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err if set(config_entry.data["token"]["scope"].split(" ")) != set(OAUTH2_SCOPES): - raise ConfigEntryAuthFailed("Incorrect OAuth2 scope") + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="incorrect_oauth2_scope", + ) # Setup MyUplinkAPI and coordinator for data fetch api = MyUplinkAPI(auth) diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index 3d336953396..e1cbd393947 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -10,7 +10,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity, transform_model_series @@ -137,7 +137,13 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new value {value} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_number_error", + translation_placeholders={ + "entity": self.entity_id, + "point": self.point_id, + "value": str(value), + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index dbe771f7eb2..be0780a206c 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -78,9 +78,7 @@ rules: It is not feasible to use the API names as translation keys as they can change between firmware and API upgrades and the number of appliance models and firmware releases are huge. Entity names translations are therefore not implemented for the time being. - exception-translations: - status: todo - comment: PR pending review \#191937 + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/myuplink/select.py b/homeassistant/components/myuplink/select.py index 96058b916b3..0074d1c75ff 100644 --- a/homeassistant/components/myuplink/select.py +++ b/homeassistant/components/myuplink/select.py @@ -12,6 +12,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import DOMAIN from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity @@ -86,7 +87,13 @@ class MyUplinkSelect(MyUplinkEntity, SelectEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new option {self.options_rev[option]} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_select_error", + translation_placeholders={ + "entity": self.entity_id, + "option": self.options_rev[option], + "point": self.point_id, + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index d3d2f198448..939aa2f17c8 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -42,5 +42,25 @@ "name": "Status" } } + }, + "exceptions": { + "config_entry_auth_failed": { + "message": "Error while logging in to the API. Please check your credentials." + }, + "config_entry_not_ready": { + "message": "Error while loading the integration." + }, + "incorrect_oauth2_scope": { + "message": "Stored permissions are invalid. Please login again to update permissions." + }, + "set_number_error": { + "message": "Failed to set new value {value} for {point}/{entity}." + }, + "set_select_error": { + "message": "Failed to set new option {option} for {point}/{entity}." + }, + "set_switch_error": { + "message": "Failed to set state for {entity}." + } } } diff --git a/homeassistant/components/myuplink/switch.py b/homeassistant/components/myuplink/switch.py index 75ba6bd7819..3addc7ce6a9 100644 --- a/homeassistant/components/myuplink/switch.py +++ b/homeassistant/components/myuplink/switch.py @@ -12,7 +12,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity, transform_model_series @@ -129,7 +129,11 @@ class MyUplinkDevicePointSwitch(MyUplinkEntity, SwitchEntity): ) except aiohttp.ClientError as err: raise HomeAssistantError( - f"Failed to set state for {self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_switch_error", + translation_placeholders={ + "entity": self.entity_id, + }, ) from err await self.coordinator.async_request_refresh() From cefb4a4ccc37431f144781cabba23ad31d9d30bc Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Mon, 16 Dec 2024 10:08:14 -0600 Subject: [PATCH 0730/1198] Add HEOS reconfigure flow (#133326) * Add reconfig flow * Add reconfigure tests * Mark reconfigure_flow done * Review feedback * Update tests to always end in terminal state * Correct test name and docstring --- homeassistant/components/heos/config_flow.py | 46 +++++++++--- .../components/heos/quality_scale.yaml | 2 +- homeassistant/components/heos/strings.json | 21 ++++-- tests/components/heos/conftest.py | 5 +- tests/components/heos/test_config_flow.py | 74 ++++++++++++++++++- 5 files changed, 129 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index e8a4dbf7b63..f861247d1a9 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -15,7 +15,20 @@ from .const import DOMAIN def format_title(host: str) -> str: """Format the title for config entries.""" - return f"Controller ({host})" + return f"HEOS System (via {host})" + + +async def _validate_host(host: str, errors: dict[str, str]) -> bool: + """Validate host is reachable, return True, otherwise populate errors and return False.""" + heos = Heos(host) + try: + await heos.connect() + except HeosError: + errors[CONF_HOST] = "cannot_connect" + return False + finally: + await heos.disconnect() + return True class HeosFlowHandler(ConfigFlow, domain=DOMAIN): @@ -47,23 +60,17 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): self.hass.data.setdefault(DOMAIN, {}) await self.async_set_unique_id(DOMAIN) # Try connecting to host if provided - errors = {} + errors: dict[str, str] = {} host = None if user_input is not None: host = user_input[CONF_HOST] # Map host from friendly name if in discovered hosts host = self.hass.data[DOMAIN].get(host, host) - heos = Heos(host) - try: - await heos.connect() - self.hass.data.pop(DOMAIN) + if await _validate_host(host, errors): + self.hass.data.pop(DOMAIN) # Remove discovery data return self.async_create_entry( title=format_title(host), data={CONF_HOST: host} ) - except HeosError: - errors[CONF_HOST] = "cannot_connect" - finally: - await heos.disconnect() # Return form host_type = ( @@ -74,3 +81,22 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): host_type}), errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Allow reconfiguration of entry.""" + entry = self._get_reconfigure_entry() + host = entry.data[CONF_HOST] # Get current host value + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + if await _validate_host(host, errors): + return self.async_update_reload_and_abort( + entry, data_updates={CONF_HOST: host} + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}), + errors=errors, + ) diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml index 861ca750780..39c25486e52 100644 --- a/homeassistant/components/heos/quality_scale.yaml +++ b/homeassistant/components/heos/quality_scale.yaml @@ -88,7 +88,7 @@ rules: entity-translations: done exception-translations: todo icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: todo stale-devices: todo # Platinum diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 20a8a2e978b..fe4fc63b449 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -2,13 +2,23 @@ "config": { "step": { "user": { - "title": "Connect to Heos", - "description": "Please enter the host name or IP address of a Heos device (preferably one connected via wire to the network).", + "title": "Connect to HEOS", + "description": "Please enter the host name or IP address of a HEOS-capable product to access your HEOS System.", "data": { "host": "[%key:common::config_flow::data::host%]" }, "data_description": { - "host": "The hostname or IP address of your HEOS device." + "host": "Host name or IP address of a HEOS-capable product (preferrably one connected via wire to the network)." + } + }, + "reconfigure": { + "title": "Reconfigure HEOS", + "description": "Change the host name or IP address of the HEOS-capable product used to access your HEOS System.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::heos::config::step::user::data_description::host%]" } } }, @@ -17,13 +27,14 @@ }, "abort": { "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "services": { "sign_in": { "name": "Sign in", - "description": "Signs the controller in to a HEOS account.", + "description": "Signs in to a HEOS account.", "fields": { "username": { "name": "[%key:common::config_flow::data::username%]", @@ -37,7 +48,7 @@ }, "sign_out": { "name": "Sign out", - "description": "Signs the controller out of the HEOS account." + "description": "Signs out of the HEOS account." } } } diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 95a388d87a8..9ea3341304a 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -27,7 +27,10 @@ from tests.common import MockConfigEntry def config_entry_fixture(): """Create a mock HEOS config entry.""" return MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, title="Controller (127.0.0.1)" + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + title="HEOS System (via 127.0.0.1)", + unique_id=DOMAIN, ) diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 464b62df157..38382a81794 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -54,7 +54,7 @@ async def test_create_entry_when_host_valid(hass: HomeAssistant, controller) -> ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" + assert result["title"] == "HEOS System (via 127.0.0.1)" assert result["data"] == data assert controller.connect.call_count == 2 # Also called in async_setup_entry assert controller.disconnect.call_count == 1 @@ -73,7 +73,7 @@ async def test_create_entry_when_friendly_name_valid( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" + assert result["title"] == "HEOS System (via 127.0.0.1)" assert result["data"] == {CONF_HOST: "127.0.0.1"} assert controller.connect.call_count == 2 # Also called in async_setup_entry assert controller.disconnect.call_count == 1 @@ -120,3 +120,73 @@ async def test_discovery_flow_aborts_already_setup( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" + + +async def test_reconfigure_validates_and_updates_config( + hass: HomeAssistant, config_entry, controller +) -> None: + """Test reconfigure validates host and successfully updates.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + # Test reconfigure initially shows form with current host value. + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST + ) + assert host == "127.0.0.1" + assert result["errors"] == {} + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure successfully updates. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + + +async def test_reconfigure_cannot_connect_recovers( + hass: HomeAssistant, config_entry, controller +) -> None: + """Test reconfigure cannot connect and recovers.""" + controller.connect.side_effect = HeosError() + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + + assert controller.connect.call_count == 1 + assert controller.disconnect.call_count == 1 + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST + ) + assert host == "127.0.0.2" + assert result["errors"][CONF_HOST] == "cannot_connect" + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure recovers and successfully updates. + controller.connect.side_effect = None + controller.connect.reset_mock() + controller.disconnect.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT From 239767ee62a29950d4c3d694d3d237f73a08a5a1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:48:59 +0100 Subject: [PATCH 0731/1198] Set default min/max color temperature in mqtt lights (#133356) --- homeassistant/components/mqtt/light/schema_basic.py | 6 ++++-- homeassistant/components/mqtt/light/schema_json.py | 6 ++++-- homeassistant/components/mqtt/light/schema_template.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 635c552f37e..159a23d14d9 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -26,6 +26,8 @@ from homeassistant.components.light import ( ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -264,12 +266,12 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 5880a684ec0..f6efdd3281d 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -22,6 +22,8 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN as LIGHT_DOMAIN, ENTITY_ID_FORMAT, FLASH_LONG, @@ -276,12 +278,12 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index 7427d25533e..722bd864366 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -15,6 +15,8 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -129,12 +131,12 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) From 77fb440ed414e10c5771a9ad66f13756334441e4 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 16 Dec 2024 18:06:06 +0000 Subject: [PATCH 0732/1198] Bump `imgw-pib` to version 1.0.7 (#133364) --- homeassistant/components/imgw_pib/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index b5c35f3f1eb..ce3bc14d37b 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.6"] + "requirements": ["imgw_pib==1.0.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9ffc6a8f16e..5eecf96d096 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1193,7 +1193,7 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 25c4167a0bf..c10645dc293 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1007,7 +1007,7 @@ idasen-ha==2.6.2 ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 From 482ad6fbee4385eb06ea584be71e4190d06f0061 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Mon, 16 Dec 2024 19:12:15 +0100 Subject: [PATCH 0733/1198] Increase backup upload timeout (#132990) --- homeassistant/components/cloud/backup.py | 5 +++-- tests/components/cloud/test_backup.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index 2c7cc9d7bd5..d394daa7dc5 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -7,7 +7,7 @@ from collections.abc import AsyncIterator, Callable, Coroutine import hashlib from typing import Any, Self -from aiohttp import ClientError, StreamReader +from aiohttp import ClientError, ClientTimeout, StreamReader from hass_nabucasa import Cloud, CloudError from hass_nabucasa.cloud_api import ( async_files_delete_file, @@ -151,9 +151,10 @@ class CloudBackupAgent(BackupAgent): details["url"], data=await open_stream(), headers=details["headers"] | {"content-length": str(backup.size)}, + timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h ) upload_status.raise_for_status() - except ClientError as err: + except (TimeoutError, ClientError) as err: raise BackupAgentError("Failed to upload backup") from err async def async_delete_backup( diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index d5dc8751d82..ac0ef1826de 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -372,6 +372,7 @@ async def test_agents_upload( assert f"Uploading backup {backup_id}" in caplog.text +@pytest.mark.parametrize("put_mock_kwargs", [{"status": 500}, {"exc": TimeoutError}]) @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") async def test_agents_upload_fail_put( hass: HomeAssistant, @@ -379,6 +380,7 @@ async def test_agents_upload_fail_put( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, mock_get_upload_details: Mock, + put_mock_kwargs: dict[str, Any], ) -> None: """Test agent upload backup fails.""" client = await hass_client() @@ -395,7 +397,7 @@ async def test_agents_upload_fail_put( protected=True, size=0.0, ) - aioclient_mock.put(mock_get_upload_details.return_value["url"], status=500) + aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs) with ( patch( From e6e9788ecda78d45a4ec5e7ff96ca4e3a7ebff06 Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:18:09 +0000 Subject: [PATCH 0734/1198] Add quality scale to ElevenLabs (#133276) --- .../components/elevenlabs/__init__.py | 4 +- .../components/elevenlabs/config_flow.py | 12 +-- .../components/elevenlabs/quality_scale.yaml | 92 +++++++++++++++++++ homeassistant/components/elevenlabs/tts.py | 3 + script/hassfest/quality_scale.py | 1 - 5 files changed, 101 insertions(+), 11 deletions(-) create mode 100644 homeassistant/components/elevenlabs/quality_scale.yaml diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index db7a7f64c97..84b2b61b8ed 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -10,7 +10,7 @@ from elevenlabs.core import ApiError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError from homeassistant.helpers.httpx_client import get_async_client from .const import CONF_MODEL @@ -49,7 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) try: model = await get_model_by_id(client, model_id) except ApiError as err: - raise ConfigEntryError("Auth failed") from err + raise ConfigEntryAuthFailed("Auth failed") from err if model is None or (not model.languages): raise ConfigEntryError("Model could not be resolved") diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 55cdd3ea944..60df79d6eaa 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -9,12 +9,7 @@ from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.httpx_client import get_async_client @@ -24,6 +19,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) +from . import EleventLabsConfigEntry from .const import ( CONF_CONFIGURE_VOICE, CONF_MODEL, @@ -96,7 +92,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: EleventLabsConfigEntry, ) -> OptionsFlow: """Create the options flow.""" return ElevenLabsOptionsFlow(config_entry) @@ -105,7 +101,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self, config_entry: EleventLabsConfigEntry) -> None: """Initialize options flow.""" self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name diff --git a/homeassistant/components/elevenlabs/quality_scale.yaml b/homeassistant/components/elevenlabs/quality_scale.yaml new file mode 100644 index 00000000000..49f0d7518f5 --- /dev/null +++ b/homeassistant/components/elevenlabs/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: done + comment: > + Only entity services + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: > + We should have every test end in either ABORT or CREATE_ENTRY. + test_invalid_api_key should assert the kind of error that is raised. + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: > + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: todo + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: + status: exempt + comment: > + There is no state in the TTS platform and we can't check poll if the TTS service is available. + action-exceptions: done + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: todo + + # Gold + entity-translations: todo + entity-device-class: + status: exempt + comment: There is no device class for Text To Speech entities. + devices: done + entity-category: done + entity-disabled-by-default: todo + discovery: + status: exempt + comment: > + This is not possible because there is no physical device. + stale-devices: + status: exempt + comment: > + This is not possible because there is no physical device. + diagnostics: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: + status: todo + comment: > + I imagine this could be useful if the default voice is deleted from voice lab. + dynamic-devices: + status: exempt + comment: | + This is not possible because there is no physical device. + discovery-update-info: + status: exempt + comment: > + This is not needed because there are no physical devices. + repair-issues: todo + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: > + This integration does not support any devices. + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index 8b016b6af8b..c96a7161b72 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -16,6 +16,7 @@ from homeassistant.components.tts import ( TtsAudioType, Voice, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -38,6 +39,7 @@ from .const import ( ) _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: @@ -84,6 +86,7 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): """The ElevenLabs API entity.""" _attr_supported_options = [ATTR_VOICE] + _attr_entity_category = EntityCategory.CONFIG def __init__( self, diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 43b4adc90e9..5ad3467dd79 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -338,7 +338,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "eight_sleep", "electrasmart", "electric_kiwi", - "elevenlabs", "eliqonline", "elkm1", "elmax", From 34ab3e033f186fe3e980587eab30c10fac0a1e88 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 19:23:05 +0100 Subject: [PATCH 0735/1198] Remove support for live recorder data post migration of entity IDs (#133370) --- homeassistant/components/recorder/migration.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index ec9d290049f..b28ca4399c8 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2738,14 +2738,13 @@ class EventIDPostMigration(BaseRunTimeMigration): return DataMigrationStatus(needs_migrate=False, migration_done=True) -class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): +class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to remove old entity_id strings from states. Introduced in HA Core 2023.4 by PR #89557. """ migration_id = "entity_id_post_migration" - task = MigrationTask index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2758,16 +2757,16 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): return has_used_states_entity_ids() -NON_LIVE_DATA_MIGRATORS = ( +NON_LIVE_DATA_MIGRATORS: tuple[type[BaseOffLineMigration], ...] = ( StatesContextIDMigration, # Introduced in HA Core 2023.4 EventsContextIDMigration, # Introduced in HA Core 2023.4 EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 + EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) -LIVE_DATA_MIGRATORS = ( +LIVE_DATA_MIGRATORS: tuple[type[BaseRunTimeMigration], ...] = ( EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 - EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) From 6a54edce1991c60381fc21ad7d6a6bdfb2cef2b3 Mon Sep 17 00:00:00 2001 From: Alexandre CUER Date: Mon, 16 Dec 2024 19:26:47 +0100 Subject: [PATCH 0736/1198] Gives a friendly name to emoncms entities if unit is not specified (#133358) --- homeassistant/components/emoncms/sensor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 9273c24c7dc..291ecad0bd3 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -317,7 +317,7 @@ async def async_setup_entry( EmonCmsSensor( coordinator, unique_id, - elem["unit"], + elem.get("unit"), name, idx, ) @@ -353,6 +353,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): self.entity_description = description else: self._attr_native_unit_of_measurement = unit_of_measurement + self._attr_name = f"{name} {elem[FEED_NAME]}" self._update_attributes(elem) def _update_attributes(self, elem: dict[str, Any]) -> None: From 2da7a93139b868088924b0ba7e4632624d1f0ac1 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 16 Dec 2024 20:53:17 +0100 Subject: [PATCH 0737/1198] Add switch platform to local_slide (#133369) --- .../components/slide_local/__init__.py | 2 +- .../components/slide_local/strings.json | 5 ++ .../components/slide_local/switch.py | 56 +++++++++++++++++ .../slide_local/snapshots/test_switch.ambr | 48 +++++++++++++++ tests/components/slide_local/test_switch.py | 61 +++++++++++++++++++ 5 files changed, 171 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/slide_local/switch.py create mode 100644 tests/components/slide_local/snapshots/test_switch.ambr create mode 100644 tests/components/slide_local/test_switch.py diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index 6f329477600..5b4867bf337 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant from .coordinator import SlideCoordinator -PLATFORMS = [Platform.BUTTON, Platform.COVER] +PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SWITCH] type SlideConfigEntry = ConfigEntry[SlideCoordinator] diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index c593dea8ed7..24c03d2ff96 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -46,6 +46,11 @@ "calibrate": { "name": "Calibrate" } + }, + "switch": { + "touchgo": { + "name": "TouchGo" + } } }, "exceptions": { diff --git a/homeassistant/components/slide_local/switch.py b/homeassistant/components/slide_local/switch.py new file mode 100644 index 00000000000..6d357864c48 --- /dev/null +++ b/homeassistant/components/slide_local/switch.py @@ -0,0 +1,56 @@ +"""Support for Slide switch.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up switch for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideSwitch(coordinator)]) + + +class SlideSwitch(SlideEntity, SwitchEntity): + """Defines a Slide switch.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "touchgo" + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide switch.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-touchgo" + + @property + def is_on(self) -> bool: + """Return if switch is on.""" + return self.coordinator.data["touch_go"] + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + await self.coordinator.async_request_refresh() diff --git a/tests/components/slide_local/snapshots/test_switch.ambr b/tests/components/slide_local/snapshots/test_switch.ambr new file mode 100644 index 00000000000..e19467c283e --- /dev/null +++ b/tests/components/slide_local/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.slide_bedroom_touchgo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'TouchGo', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'touchgo', + 'unique_id': '1234567890ab-touchgo', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.slide_bedroom_touchgo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'slide bedroom TouchGo', + }), + 'context': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/slide_local/test_switch.py b/tests/components/slide_local/test_switch.py new file mode 100644 index 00000000000..0ac9820ca10 --- /dev/null +++ b/tests/components/slide_local/test_switch.py @@ -0,0 +1,61 @@ +"""Tests for the Slide Local switch platform.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_TOGGLE, + ], +) +async def test_services( + hass: HomeAssistant, + service: str, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test switch.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: "switch.slide_bedroom_touchgo", + }, + blocking=True, + ) + mock_slide_api.slide_set_touchgo.assert_called_once() From 40182fc197e22acc42976a5008c5b0de139d55ac Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 16 Dec 2024 21:35:55 +0100 Subject: [PATCH 0738/1198] Load sun via entity component (#132598) * Load sun via entity component * Remove unique id * Remove entity registry --- homeassistant/components/sun/__init__.py | 13 ++++++++++--- homeassistant/components/sun/entity.py | 13 ++++--------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/sun/__init__.py b/homeassistant/components/sun/__init__.py index 8f6f3098ee8..f42f5450462 100644 --- a/homeassistant/components/sun/__init__.py +++ b/homeassistant/components/sun/__init__.py @@ -2,10 +2,13 @@ from __future__ import annotations +import logging + from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType # The sensor platform is pre-imported here to ensure @@ -23,6 +26,8 @@ from .entity import Sun, SunConfigEntry CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track the state of the sun.""" @@ -42,7 +47,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool: """Set up from a config entry.""" - entry.runtime_data = sun = Sun(hass) + sun = Sun(hass) + component = EntityComponent[Sun](_LOGGER, DOMAIN, hass) + await component.async_add_entities([sun]) + entry.runtime_data = sun entry.async_on_unload(sun.remove_listeners) await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR]) return True @@ -53,6 +61,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool if unload_ok := await hass.config_entries.async_unload_platforms( entry, [Platform.SENSOR] ): - sun = entry.runtime_data - hass.states.async_remove(sun.entity_id) + await entry.runtime_data.async_remove() return unload_ok diff --git a/homeassistant/components/sun/entity.py b/homeassistant/components/sun/entity.py index 10d328afde7..925845c8b4d 100644 --- a/homeassistant/components/sun/entity.py +++ b/homeassistant/components/sun/entity.py @@ -100,9 +100,6 @@ class Sun(Entity): _attr_name = "Sun" entity_id = ENTITY_ID - # This entity is legacy and does not have a platform. - # We can't fix this easily without breaking changes. - _no_platform_reported = True location: Location elevation: Elevation @@ -122,18 +119,16 @@ class Sun(Entity): self.hass = hass self.phase: str | None = None - # This is normally done by async_internal_added_to_hass which is not called - # for sun because sun has no platform - self._state_info = { - "unrecorded_attributes": self._Entity__combined_unrecorded_attributes # type: ignore[attr-defined] - } - self._config_listener: CALLBACK_TYPE | None = None self._update_events_listener: CALLBACK_TYPE | None = None self._update_sun_position_listener: CALLBACK_TYPE | None = None self._config_listener = self.hass.bus.async_listen( EVENT_CORE_CONFIG_UPDATE, self.update_location ) + + async def async_added_to_hass(self) -> None: + """Update after entity has been added.""" + await super().async_added_to_hass() self.update_location(initial=True) @callback From 3a622218f45b8888f9aa9e1311000605c385793b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 21:47:31 +0100 Subject: [PATCH 0739/1198] Improvements to the LaMetric config flow tests (#133383) --- tests/components/lametric/test_config_flow.py | 330 +++++++++--------- tests/components/lametric/test_init.py | 2 +- 2 files changed, 166 insertions(+), 166 deletions(-) diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 3fbe606c7f1..4a546122e30 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -55,25 +55,24 @@ async def test_full_cloud_import_flow_multiple_devices( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -96,24 +95,26 @@ async def test_full_cloud_import_flow_multiple_devices( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.FORM - assert result3.get("step_id") == "cloud_select_device" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" - result4 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result4.get("type") is FlowResultType.CREATE_ENTRY - assert result4.get("title") == "Frenck's LaMetric" - assert result4.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result4 - assert result4["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -135,25 +136,24 @@ async def test_full_cloud_import_flow_single_device( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -181,17 +181,19 @@ async def test_full_cloud_import_flow_single_device( mock_lametric_cloud.devices.return_value = [ mock_lametric_cloud.devices.return_value[0] ] - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -209,31 +211,34 @@ async def test_full_manual( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 @@ -258,25 +263,24 @@ async def test_full_ssdp_with_cloud_import( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -299,17 +303,18 @@ async def test_full_ssdp_with_cloud_import( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -327,31 +332,32 @@ async def test_full_ssdp_manual_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 @@ -385,8 +391,8 @@ async def test_ssdp_abort_invalid_discovery( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=data ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason @pytest.mark.usefixtures("current_request_with_host") @@ -404,16 +410,15 @@ async def test_cloud_import_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -428,14 +433,14 @@ async def test_cloud_import_updates_existing_entry( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -458,18 +463,18 @@ async def test_manual_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -490,8 +495,8 @@ async def test_discovery_updates_existing_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-from-fixture", @@ -510,16 +515,15 @@ async def test_cloud_abort_no_devices( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -537,10 +541,10 @@ async def test_cloud_abort_no_devices( # Stage there are no devices mock_lametric_cloud.devices.return_value = [] - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "no_devices" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices" assert len(mock_lametric_cloud.devices.mock_calls) == 1 @@ -565,39 +569,42 @@ async def test_manual_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" + assert result["errors"] == {"base": reason} assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 @@ -628,16 +635,15 @@ async def test_cloud_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -652,16 +658,16 @@ async def test_cloud_errors( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "cloud_select_device" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" + assert result["errors"] == {"base": reason} assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -669,19 +675,21 @@ async def test_cloud_errors( assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 2 @@ -706,8 +714,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_API_KEY: "mock-from-fixture", CONF_HOST: "127.0.0.42", @@ -732,8 +740,8 @@ async def test_dhcp_unknown_device( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" @pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @@ -750,16 +758,14 @@ async def test_reauth_cloud_import( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -776,10 +782,10 @@ async def test_reauth_cloud_import( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -806,16 +812,14 @@ async def test_reauth_cloud_abort_device_not_found( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -832,10 +836,10 @@ async def test_reauth_cloud_abort_device_not_found( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_device_not_found" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_device_not_found" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 0 @@ -853,18 +857,16 @@ async def test_reauth_manual( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -887,18 +889,16 @@ async def test_reauth_manual_sky( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", diff --git a/tests/components/lametric/test_init.py b/tests/components/lametric/test_init.py index 7352721e992..2fd8219ea51 100644 --- a/tests/components/lametric/test_init.py +++ b/tests/components/lametric/test_init.py @@ -74,7 +74,7 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "choice_enter_manual_or_fetch_cloud" + assert flow["step_id"] == "choice_enter_manual_or_fetch_cloud" assert flow.get("handler") == DOMAIN assert "context" in flow From 308200781f16b7f4a75f45c8b7705361852e76d0 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 16 Dec 2024 14:49:15 -0600 Subject: [PATCH 0740/1198] Add required domain to vacuum intents (#133166) --- homeassistant/components/vacuum/intent.py | 2 ++ tests/components/vacuum/test_intent.py | 42 +++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/homeassistant/components/vacuum/intent.py b/homeassistant/components/vacuum/intent.py index 8952c13875d..48340252b6e 100644 --- a/homeassistant/components/vacuum/intent.py +++ b/homeassistant/components/vacuum/intent.py @@ -18,6 +18,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_START, description="Starts a vacuum", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) @@ -28,6 +29,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_RETURN_TO_BASE, description="Returns a vacuum to base", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) diff --git a/tests/components/vacuum/test_intent.py b/tests/components/vacuum/test_intent.py index cf96d32ad49..9ede7dbc04e 100644 --- a/tests/components/vacuum/test_intent.py +++ b/tests/components/vacuum/test_intent.py @@ -37,6 +37,27 @@ async def test_start_vacuum_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": entity_id} +async def test_start_vacuum_without_name(hass: HomeAssistant) -> None: + """Test starting a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_START) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_START, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_START + assert call.data == {"entity_id": entity_id} + + async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: """Test HassTurnOff intent for vacuums.""" await vacuum_intent.async_setup_intents(hass) @@ -59,3 +80,24 @@ async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: assert call.domain == DOMAIN assert call.service == SERVICE_RETURN_TO_BASE assert call.data == {"entity_id": entity_id} + + +async def test_stop_vacuum_without_name(hass: HomeAssistant) -> None: + """Test stopping a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_RETURN_TO_BASE) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_RETURN_TO_BASE, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_RETURN_TO_BASE + assert call.data == {"entity_id": entity_id} From 8c67819f507d823d1868d958e4d86b7bc37e125b Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:40:00 +0100 Subject: [PATCH 0741/1198] Update axis to v64 (#133385) --- homeassistant/components/axis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index 7163437361a..9758af60178 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -29,7 +29,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["axis"], - "requirements": ["axis==63"], + "requirements": ["axis==64"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/requirements_all.txt b/requirements_all.txt index 5eecf96d096..c4e9529c6c8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -539,7 +539,7 @@ av==13.1.0 # avion==0.10 # homeassistant.components.axis -axis==63 +axis==64 # homeassistant.components.fujitsu_fglair ayla-iot-unofficial==1.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c10645dc293..056d7422195 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -488,7 +488,7 @@ automower-ble==0.2.0 av==13.1.0 # homeassistant.components.axis -axis==63 +axis==64 # homeassistant.components.fujitsu_fglair ayla-iot-unofficial==1.4.4 From 9cdc36681a30d537020d2c4fca2cac47f718b240 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 23:01:24 +0100 Subject: [PATCH 0742/1198] Remove setup entry mock assert from LaMetric config flow (#133387) --- tests/components/lametric/conftest.py | 4 +-- tests/components/lametric/test_config_flow.py | 28 +++++-------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index c460834be6c..da86d1bc4de 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -49,8 +49,8 @@ def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.lametric.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup + ): + yield @pytest.fixture diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 4a546122e30..ccbbe005639 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -41,12 +41,11 @@ SSDP_DISCOVERY_INFO = SsdpServiceInfo( ) -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_multiple_devices( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -119,15 +118,13 @@ async def test_full_cloud_import_flow_multiple_devices( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_single_device( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -198,12 +195,11 @@ async def test_full_cloud_import_flow_single_device( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_manual( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow manual entry.""" @@ -246,15 +242,12 @@ async def test_full_manual( notification: Notification = mock_lametric.notify.mock_calls[0][2]["notification"] assert notification.model.sound == Sound(sound=NotificationSound.WIN) - assert len(mock_setup_entry.mock_calls) == 1 - -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_ssdp_with_cloud_import( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -319,12 +312,11 @@ async def test_full_ssdp_with_cloud_import( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_ssdp_manual_entry( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow triggered by SSDP, with manual API key entry.""" @@ -361,7 +353,6 @@ async def test_full_ssdp_manual_entry( assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 @pytest.mark.parametrize( @@ -549,6 +540,7 @@ async def test_cloud_abort_no_devices( assert len(mock_lametric_cloud.devices.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -561,7 +553,6 @@ async def test_cloud_abort_no_devices( async def test_manual_errors( hass: HomeAssistant, mock_lametric: MagicMock, - mock_setup_entry: MagicMock, side_effect: Exception, reason: str, ) -> None: @@ -586,7 +577,6 @@ async def test_manual_errors( assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -608,10 +598,9 @@ async def test_manual_errors( assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -625,7 +614,6 @@ async def test_cloud_errors( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, side_effect: Exception, @@ -672,7 +660,6 @@ async def test_cloud_errors( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -694,7 +681,6 @@ async def test_cloud_errors( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 async def test_dhcp_discovery_updates_entry( From a374c7e4ca6bdf243a7b697fa68972b2582afea6 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Mon, 16 Dec 2024 22:54:33 +0000 Subject: [PATCH 0743/1198] Add reauth flow to Ohme (#133275) * Add reauth flow to ohme * Reuse config flow user step for reauth * Tidying up * Add common _validate_account method for reauth and user config flow steps * Add reauth fail test --- homeassistant/components/ohme/__init__.py | 4 +- homeassistant/components/ohme/config_flow.py | 68 +++++++++++++++-- homeassistant/components/ohme/manifest.json | 2 +- .../components/ohme/quality_scale.yaml | 2 +- homeassistant/components/ohme/strings.json | 13 +++- tests/components/ohme/test_config_flow.py | 74 +++++++++++++++++++ 6 files changed, 150 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/ohme/__init__.py b/homeassistant/components/ohme/__init__.py index 8ca983cd72a..4dc75cb574c 100644 --- a/homeassistant/components/ohme/__init__.py +++ b/homeassistant/components/ohme/__init__.py @@ -7,7 +7,7 @@ from ohme import ApiException, AuthException, OhmeApiClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .const import DOMAIN, PLATFORMS from .coordinator import OhmeAdvancedSettingsCoordinator, OhmeChargeSessionCoordinator @@ -36,7 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool translation_key="device_info_failed", translation_domain=DOMAIN ) except AuthException as e: - raise ConfigEntryError( + raise ConfigEntryAuthFailed( translation_key="auth_failed", translation_domain=DOMAIN ) from e except ApiException as e: diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py index ea110f6df23..748ea558983 100644 --- a/homeassistant/components/ohme/config_flow.py +++ b/homeassistant/components/ohme/config_flow.py @@ -1,5 +1,6 @@ """Config flow for ohme integration.""" +from collections.abc import Mapping from typing import Any from ohme import ApiException, AuthException, OhmeApiClient @@ -32,6 +33,17 @@ USER_SCHEMA = vol.Schema( } ) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow.""" @@ -46,14 +58,9 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) - instance = OhmeApiClient(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) - try: - await instance.async_login() - except AuthException: - errors["base"] = "invalid_auth" - except ApiException: - errors["base"] = "unknown" - + errors = await self._validate_account( + user_input[CONF_EMAIL], user_input[CONF_PASSWORD] + ) if not errors: return self.async_create_entry( title=user_input[CONF_EMAIL], data=user_input @@ -62,3 +69,48 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=USER_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication confirmation.""" + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + if user_input is not None: + errors = await self._validate_account( + reauth_entry.data[CONF_EMAIL], + user_input[CONF_PASSWORD], + ) + if not errors: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + errors=errors, + ) + + async def _validate_account(self, email: str, password: str) -> dict[str, str]: + """Validate Ohme account and return dict of errors.""" + errors: dict[str, str] = {} + client = OhmeApiClient( + email, + password, + ) + try: + await client.async_login() + except AuthException: + errors["base"] = "invalid_auth" + except ApiException: + errors["base"] = "unknown" + + return errors diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index 2d387ce9e8a..c9e1ccf9ac2 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/ohme/", "integration_type": "device", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["ohme==1.1.1"] } diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index 15697cb11a3..7fc2f55e2f9 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -40,7 +40,7 @@ rules: integration-owner: done log-when-unavailable: done parallel-updates: done - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 42e0a60b83e..125babc1901 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -11,6 +11,16 @@ "email": "Enter the email address associated with your Ohme account.", "password": "Enter the password for your Ohme account" } + }, + "reauth_confirm": { + "description": "Please update your password for {email}", + "title": "[%key:common::config_flow::title::reauth%]", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Enter the password for your Ohme account" + } } }, "error": { @@ -18,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py index b9d4a10a76e..bb7ecc00bdc 100644 --- a/tests/components/ohme/test_config_flow.py +++ b/tests/components/ohme/test_config_flow.py @@ -108,3 +108,77 @@ async def test_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_form(hass: HomeAssistant, mock_client: MagicMock) -> None: + """Test reauth form.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + assert not result["errors"] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_reauth_fail( + hass: HomeAssistant, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test reauth errors.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + # Initial form load + result = await entry.start_reauth_flow(hass) + + assert result["step_id"] == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with success + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" From 73e3e91af25d9244ee3a3e5672f1a9ac8837df8d Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 16 Dec 2024 23:54:56 +0100 Subject: [PATCH 0744/1198] Nord Pool iqs platinum (#133389) --- homeassistant/components/nordpool/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json index b3a18eb040a..215494e10a0 100644 --- a/homeassistant/components/nordpool/manifest.json +++ b/homeassistant/components/nordpool/manifest.json @@ -7,6 +7,7 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pynordpool"], + "quality_scale": "platinum", "requirements": ["pynordpool==0.2.3"], "single_config_entry": true } From 1512cd5fb7a52e11f594caf6723a78396cd749da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Tue, 17 Dec 2024 00:03:32 +0100 Subject: [PATCH 0745/1198] Add Matter battery replacement description (#132974) --- homeassistant/components/matter/icons.json | 3 + homeassistant/components/matter/sensor.py | 14 + homeassistant/components/matter/strings.json | 3 + .../matter/snapshots/test_sensor.ambr | 276 ++++++++++++++++++ tests/components/matter/test_sensor.py | 20 ++ 5 files changed, 316 insertions(+) diff --git a/homeassistant/components/matter/icons.json b/homeassistant/components/matter/icons.json index 32c9f057e47..adcdcd05137 100644 --- a/homeassistant/components/matter/icons.json +++ b/homeassistant/components/matter/icons.json @@ -43,6 +43,9 @@ "air_quality": { "default": "mdi:air-filter" }, + "bat_replacement_description": { + "default": "mdi:battery-sync" + }, "hepa_filter_condition": { "default": "mdi:filter-check" }, diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index b2a5da2aa71..d71cd52a0c6 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -231,6 +231,20 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterSensor, required_attributes=(clusters.PowerSource.Attributes.BatVoltage,), ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="PowerSourceBatReplacementDescription", + translation_key="battery_replacement_description", + native_unit_of_measurement=None, + device_class=None, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.PowerSource.Attributes.BatReplacementDescription, + ), + ), MatterDiscoverySchema( platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index 69fa68765b3..ca15538997e 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -245,6 +245,9 @@ }, "valve_position": { "name": "Valve position" + }, + "battery_replacement_description": { + "name": "Battery type" } }, "switch": { diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 44ad02d4b1e..60a3d33a130 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1145,6 +1145,98 @@ 'state': '189.0', }) # --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1196,6 +1288,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_door_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Door Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_door_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1733,6 +1871,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1882,6 +2066,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Weather Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2735,6 +2965,52 @@ 'state': '94', }) # --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smoke sensor Battery type', + }), + 'context': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'CR123A', + }) +# --- # name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 27eb7da2c71..3215ec58116 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -174,6 +174,26 @@ async def test_battery_sensor_voltage( assert entry.entity_category == EntityCategory.DIAGNOSTIC +@pytest.mark.parametrize("node_fixture", ["smoke_detector"]) +async def test_battery_sensor_description( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test battery replacement description sensor.""" + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR123A" + + set_node_attribute(matter_node, 1, 47, 19, "CR2032") + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR2032" + + @pytest.mark.parametrize("node_fixture", ["eve_thermo"]) async def test_eve_thermo_sensor( hass: HomeAssistant, From 2d8e693cdbbc5877f130e5e3fdfea859ff08f4b5 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 17 Dec 2024 07:34:59 +0100 Subject: [PATCH 0746/1198] Update mypy-dev to 1.14.0a7 (#133390) --- homeassistant/components/image/__init__.py | 2 +- mypy.ini | 1 + requirements_test.txt | 2 +- script/hassfest/mypy_config.py | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index dbb5962eabf..ea235127894 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -348,7 +348,7 @@ async def async_get_still_stream( # While this results in additional bandwidth usage, # given the low frequency of image updates, it is acceptable. frame.extend(frame) - await response.write(frame) + await response.write(frame) # type: ignore[arg-type] return True event = asyncio.Event() diff --git a/mypy.ini b/mypy.ini index e76bc97585c..15b96e0a802 100644 --- a/mypy.ini +++ b/mypy.ini @@ -10,6 +10,7 @@ show_error_codes = true follow_imports = normal local_partial_types = true strict_equality = true +strict_bytes = true no_implicit_optional = true warn_incomplete_stub = true warn_redundant_casts = true diff --git a/requirements_test.txt b/requirements_test.txt index 50e5957bf96..98a948cd56e 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -12,7 +12,7 @@ coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.14.0a6 +mypy-dev==1.14.0a7 pre-commit==4.0.0 pydantic==2.10.3 pylint==3.3.2 diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 5767066c943..1d7f2b5ed88 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -47,6 +47,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { # Enable some checks globally. "local_partial_types": "true", "strict_equality": "true", + "strict_bytes": "true", "no_implicit_optional": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", From fc9d32ef65402e77add31c40bc55bc1e664e6390 Mon Sep 17 00:00:00 2001 From: Vivien Chene Date: Tue, 17 Dec 2024 07:57:43 +0000 Subject: [PATCH 0747/1198] Fix issue when no data, where the integer sensor value is given a string (#132123) * Fix issue when no data, where the integer sensor value is given a string * Use None and not '0' --- homeassistant/components/irish_rail_transport/sensor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/irish_rail_transport/sensor.py b/homeassistant/components/irish_rail_transport/sensor.py index 39bf39bcbe0..2765a14b7a3 100644 --- a/homeassistant/components/irish_rail_transport/sensor.py +++ b/homeassistant/components/irish_rail_transport/sensor.py @@ -194,9 +194,9 @@ class IrishRailTransportData: ATTR_STATION: self.station, ATTR_ORIGIN: "", ATTR_DESTINATION: dest, - ATTR_DUE_IN: "n/a", - ATTR_DUE_AT: "n/a", - ATTR_EXPECT_AT: "n/a", + ATTR_DUE_IN: None, + ATTR_DUE_AT: None, + ATTR_EXPECT_AT: None, ATTR_DIRECTION: direction, ATTR_STOPS_AT: stops_at, ATTR_TRAIN_TYPE: "", From 9ca9e787b238df3013e0a29d8a546bc7e9993629 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Tue, 17 Dec 2024 09:07:18 +0100 Subject: [PATCH 0748/1198] Add tests for Habitica integration (#131780) * Add tests for Habitica integration * update iqs --- .../components/habitica/quality_scale.yaml | 2 +- tests/components/habitica/fixtures/tasks.json | 50 +++++++++++++ tests/components/habitica/fixtures/user.json | 3 +- .../habitica/snapshots/test_calendar.ambr | 24 +++++-- .../habitica/snapshots/test_diagnostics.ambr | 61 ++++++++++++++++ .../habitica/snapshots/test_sensor.ambr | 41 ++++++++++- .../habitica/snapshots/test_todo.ambr | 9 ++- tests/components/habitica/test_button.py | 71 ++++++++++++++++++- tests/components/habitica/test_calendar.py | 15 +++- 9 files changed, 266 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/habitica/quality_scale.yaml b/homeassistant/components/habitica/quality_scale.yaml index cf54672bfed..9d505b85b8c 100644 --- a/homeassistant/components/habitica/quality_scale.yaml +++ b/homeassistant/components/habitica/quality_scale.yaml @@ -35,7 +35,7 @@ rules: log-when-unavailable: done parallel-updates: todo reauthentication-flow: todo - test-coverage: todo + test-coverage: done # Gold devices: done diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 7784b9c7f49..a4942063612 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -532,6 +532,56 @@ "updatedAt": "2024-07-07T17:51:53.266Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", "id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" + }, + { + "repeat": { + "m": false, + "t": false, + "w": false, + "th": false, + "f": false, + "s": false, + "su": true + }, + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "_id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef", + "frequency": "monthly", + "everyX": 1, + "streak": 1, + "nextDue": [ + "2024-12-14T23:00:00.000Z", + "2025-01-18T23:00:00.000Z", + "2025-02-15T23:00:00.000Z", + "2025-03-15T23:00:00.000Z", + "2025-04-19T23:00:00.000Z", + "2025-05-17T23:00:00.000Z" + ], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Arbeite an einem kreativen Projekt", + "notes": "Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!", + "tags": [], + "value": -0.9215181434950852, + "priority": 1, + "attribute": "str", + "byHabitica": false, + "startDate": "2024-09-20T23:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [3], + "checklist": [], + "reminders": [], + "createdAt": "2024-10-10T15:57:14.304Z", + "updatedAt": "2024-11-27T23:47:29.986Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef" } ], "notifications": [ diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json index a498de910ef..ed41a306a03 100644 --- a/tests/components/habitica/fixtures/user.json +++ b/tests/components/habitica/fixtures/user.json @@ -55,7 +55,8 @@ "e97659e0-2c42-4599-a7bb-00282adc410d", "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "6e53f1f5-a315-4edd-984d-8d762e4a08ef" ], "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] }, diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr index c2f9c8e83c9..5e010a33c84 100644 --- a/tests/components/habitica/snapshots/test_calendar.ambr +++ b/tests/components/habitica/snapshots/test_calendar.ambr @@ -1,5 +1,21 @@ # serializer version: 1 -# name: test_api_events[calendar.test_user_dailies] +# name: test_api_events[date range in the past-calendar.test_user_dailies] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_daily_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_s] + list([ + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_dailies] list([ dict({ 'description': 'Klicke um Deinen Terminplan festzulegen!', @@ -577,7 +593,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_daily_reminders] +# name: test_api_events[default date range-calendar.test_user_daily_reminders] list([ dict({ 'description': 'Klicke um Deinen Terminplan festzulegen!', @@ -819,7 +835,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_to_do_reminders] +# name: test_api_events[default date range-calendar.test_user_to_do_reminders] list([ dict({ 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', @@ -837,7 +853,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_to_do_s] +# name: test_api_events[default date range-calendar.test_user_to_do_s] list([ dict({ 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', diff --git a/tests/components/habitica/snapshots/test_diagnostics.ambr b/tests/components/habitica/snapshots/test_diagnostics.ambr index bb9371a4c68..0d5f07d9a6c 100644 --- a/tests/components/habitica/snapshots/test_diagnostics.ambr +++ b/tests/components/habitica/snapshots/test_diagnostics.ambr @@ -615,6 +615,66 @@ 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', 'value': 10, }), + dict({ + '_id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-10-10T15:57:14.304Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'isDue': False, + 'nextDue': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'startDate': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'tags': list([ + ]), + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'updatedAt': '2024-11-27T23:47:29.986Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -0.9215181434950852, + 'weeksOfMonth': list([ + 3, + ]), + 'yesterDaily': True, + }), ]), 'user': dict({ 'api_user': 'test-api-user', @@ -695,6 +755,7 @@ '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + '6e53f1f5-a315-4edd-984d-8d762e4a08ef', ]), 'habits': list([ '1d147de6-5c02-4740-8e2f-71d3015a37f4', diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr index 28dd7eb8c43..7e72d486276 100644 --- a/tests/components/habitica/snapshots/test_sensor.ambr +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -226,6 +226,45 @@ 'value': -2.9663035443712333, 'yester_daily': True, }), + '6e53f1f5-a315-4edd-984d-8d762e4a08ef': dict({ + 'created_at': '2024-10-10T15:57:14.304Z', + 'every_x': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'next_due': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'start_date': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'value': -0.9215181434950852, + 'weeks_of_month': list([ + 3, + ]), + 'yester_daily': True, + }), 'f2c85972-1a19-4426-bc6d-ce3337b9d99f': dict({ 'created_at': '2024-07-07T17:51:53.266Z', 'every_x': 1, @@ -270,7 +309,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3', + 'state': '4', }) # --- # name: test_sensors[sensor.test_user_display_name-entry] diff --git a/tests/components/habitica/snapshots/test_todo.ambr b/tests/components/habitica/snapshots/test_todo.ambr index 79eca9dbbb0..8c49cad5436 100644 --- a/tests/components/habitica/snapshots/test_todo.ambr +++ b/tests/components/habitica/snapshots/test_todo.ambr @@ -42,6 +42,13 @@ 'summary': 'Fitnessstudio besuchen', 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', }), + dict({ + 'description': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'due': '2024-12-14', + 'status': 'needs_action', + 'summary': 'Arbeite an einem kreativen Projekt', + 'uid': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + }), ]), }), }) @@ -137,7 +144,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2', + 'state': '3', }) # --- # name: test_todos[todo.test_user_to_do_s-entry] diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py index 979cefef923..09cc1c9d373 100644 --- a/tests/components/habitica/test_button.py +++ b/tests/components/habitica/test_button.py @@ -1,6 +1,7 @@ """Tests for Habitica button platform.""" from collections.abc import Generator +from datetime import timedelta from http import HTTPStatus import re from unittest.mock import patch @@ -15,10 +16,16 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util from .conftest import mock_called_with -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) from tests.test_util.aiohttp import AiohttpClientMocker @@ -340,3 +347,65 @@ async def test_button_unavailable( for entity_id in entity_ids: assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNAVAILABLE + + +async def test_class_change( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test removing and adding skills after class change.""" + mage_skills = [ + "button.test_user_chilling_frost", + "button.test_user_earthquake", + "button.test_user_ethereal_surge", + ] + healer_skills = [ + "button.test_user_healing_light", + "button.test_user_protective_aura", + "button.test_user_searing_brightness", + "button.test_user_blessing", + ] + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("wizard_fixture.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for skill in mage_skills: + assert hass.states.get(skill) + + aioclient_mock._mocks.pop(0) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("healer_fixture.json", DOMAIN), + ) + + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=60)) + await hass.async_block_till_done() + + for skill in mage_skills: + assert not hass.states.get(skill) + + for skill in healer_skills: + assert hass.states.get(skill) diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py index a6cdb1a9306..ff3ffbeb80d 100644 --- a/tests/components/habitica/test_calendar.py +++ b/tests/components/habitica/test_calendar.py @@ -59,6 +59,17 @@ async def test_calendar_platform( "calendar.test_user_to_do_reminders", ], ) +@pytest.mark.parametrize( + ("start_date", "end_date"), + [ + ("2024-08-29", "2024-10-08"), + ("2023-08-01", "2023-08-02"), + ], + ids=[ + "default date range", + "date range in the past", + ], +) @pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") @pytest.mark.usefixtures("mock_habitica") async def test_api_events( @@ -67,6 +78,8 @@ async def test_api_events( config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, entity: str, + start_date: str, + end_date: str, ) -> None: """Test calendar event.""" @@ -76,7 +89,7 @@ async def test_api_events( client = await hass_client() response = await client.get( - f"/api/calendars/{entity}?start=2024-08-29&end=2024-10-08" + f"/api/calendars/{entity}?start={start_date}&end={end_date}" ) assert await response.json() == snapshot From ac6d7180949358d8f8708ae4a903312ca0bb739d Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 17 Dec 2024 09:37:46 +0100 Subject: [PATCH 0749/1198] Fix mqtt reconfigure flow (#133315) * FIx mqtt reconfigure flow * Follow up on code review --- homeassistant/components/mqtt/config_flow.py | 17 ++++------- tests/components/mqtt/test_config_flow.py | 32 +++++--------------- 2 files changed, 13 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index ad3f3d35457..0081246c705 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -470,7 +470,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} fields: OrderedDict[Any, Any] = OrderedDict() validated_user_input: dict[str, Any] = {} - broker_config: dict[str, Any] = {} if is_reconfigure := (self.source == SOURCE_RECONFIGURE): reconfigure_entry = self._get_reconfigure_entry() if await async_get_broker_settings( @@ -482,29 +481,25 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors, ): if is_reconfigure: - broker_config.update( - update_password_from_user_input( - reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input - ), + update_password_from_user_input( + reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input ) - else: - broker_config = validated_user_input can_connect = await self.hass.async_add_executor_job( try_connection, - broker_config, + validated_user_input, ) if can_connect: if is_reconfigure: return self.async_update_reload_and_abort( reconfigure_entry, - data_updates=broker_config, + data=validated_user_input, ) validated_user_input[CONF_DISCOVERY] = DEFAULT_DISCOVERY return self.async_create_entry( - title=broker_config[CONF_BROKER], - data=broker_config, + title=validated_user_input[CONF_BROKER], + data=validated_user_input, ) errors["base"] = "cannot_connect" diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index fc1221956de..38dbda50cdd 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2162,7 +2162,7 @@ async def test_setup_with_advanced_settings( async def test_change_websockets_transport_to_tcp( hass: HomeAssistant, mock_try_connection: MagicMock ) -> None: - """Test option flow setup with websockets transport settings.""" + """Test reconfiguration flow changing websockets transport settings.""" config_entry = MockConfigEntry(domain=mqtt.DOMAIN) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( @@ -2178,7 +2178,7 @@ async def test_change_websockets_transport_to_tcp( mock_try_connection.return_value = True - result = await hass.config_entries.options.async_init(config_entry.entry_id) + result = await config_entry.start_reconfigure_flow(hass, show_advanced_options=True) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "broker" assert result["data_schema"].schema["transport"] @@ -2186,7 +2186,7 @@ async def test_change_websockets_transport_to_tcp( assert result["data_schema"].schema["ws_headers"] # Change transport to tcp - result = await hass.config_entries.options.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ mqtt.CONF_BROKER: "test-broker", @@ -2196,25 +2196,14 @@ async def test_change_websockets_transport_to_tcp( mqtt.CONF_WS_PATH: "/some_path", }, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "options" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # Check config entry result assert config_entry.data == { mqtt.CONF_BROKER: "test-broker", CONF_PORT: 1234, mqtt.CONF_TRANSPORT: "tcp", - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", } @@ -2238,15 +2227,8 @@ async def test_reconfigure_flow_form( ) -> None: """Test reconfigure flow.""" await mqtt_mock_entry() - entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - result = await hass.config_entries.flow.async_init( - mqtt.DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - "show_advanced_options": True, - }, - ) + entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + result = await entry.start_reconfigure_flow(hass, show_advanced_options=True) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "broker" assert result["errors"] == {} From c0264f73b0cbf6e6d582c983e4e92583cb136c1b Mon Sep 17 00:00:00 2001 From: dotvav Date: Tue, 17 Dec 2024 10:17:50 +0100 Subject: [PATCH 0750/1198] Add palazzetti status sensor (#131348) * Add status sensor * Lower the case of strings keys * Make const Final * Fix typo * Fix typo * Merge similar statuses * Increase readability * Update snapshot --- homeassistant/components/palazzetti/const.py | 52 +++++++ homeassistant/components/palazzetti/sensor.py | 19 ++- .../components/palazzetti/strings.json | 36 +++++ tests/components/palazzetti/conftest.py | 1 + .../palazzetti/snapshots/test_sensor.ambr | 146 ++++++++++++++++++ 5 files changed, 253 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/palazzetti/const.py b/homeassistant/components/palazzetti/const.py index 4cb8b1f14a6..b2e27b2a6fd 100644 --- a/homeassistant/components/palazzetti/const.py +++ b/homeassistant/components/palazzetti/const.py @@ -4,6 +4,8 @@ from datetime import timedelta import logging from typing import Final +from homeassistant.helpers.typing import StateType + DOMAIN: Final = "palazzetti" PALAZZETTI: Final = "Palazzetti" LOGGER = logging.getLogger(__package__) @@ -17,3 +19,53 @@ FAN_SILENT: Final = "silent" FAN_HIGH: Final = "high" FAN_AUTO: Final = "auto" FAN_MODES: Final = [FAN_SILENT, "1", "2", "3", "4", "5", FAN_HIGH, FAN_AUTO] + +STATUS_TO_HA: Final[dict[StateType, str]] = { + 0: "off", + 1: "off_timer", + 2: "test_fire", + 3: "heatup", + 4: "fueling", + 5: "ign_test", + 6: "burning", + 7: "burning_mod", + 8: "unknown", + 9: "cool_fluid", + 10: "fire_stop", + 11: "clean_fire", + 12: "cooling", + 50: "cleanup", + 51: "ecomode", + 241: "chimney_alarm", + 243: "grate_error", + 244: "pellet_water_error", + 245: "t05_error", + 247: "hatch_door_open", + 248: "pressure_error", + 249: "main_probe_failure", + 250: "flue_probe_failure", + 252: "exhaust_temp_high", + 253: "pellet_finished", + 501: "off", + 502: "fueling", + 503: "ign_test", + 504: "burning", + 505: "firewood_finished", + 506: "cooling", + 507: "clean_fire", + 1000: "general_error", + 1001: "general_error", + 1239: "door_open", + 1240: "temp_too_high", + 1241: "cleaning_warning", + 1243: "fuel_error", + 1244: "pellet_water_error", + 1245: "t05_error", + 1247: "hatch_door_open", + 1248: "pressure_error", + 1249: "main_probe_failure", + 1250: "flue_probe_failure", + 1252: "exhaust_temp_high", + 1253: "pellet_finished", + 1508: "general_error", +} diff --git a/homeassistant/components/palazzetti/sensor.py b/homeassistant/components/palazzetti/sensor.py index ead2b236b17..11462201f4e 100644 --- a/homeassistant/components/palazzetti/sensor.py +++ b/homeassistant/components/palazzetti/sensor.py @@ -14,6 +14,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import PalazzettiConfigEntry +from .const import STATUS_TO_HA from .coordinator import PalazzettiDataUpdateCoordinator from .entity import PalazzettiEntity @@ -23,10 +24,19 @@ class PropertySensorEntityDescription(SensorEntityDescription): """Describes a Palazzetti sensor entity that is read from a `PalazzettiClient` property.""" client_property: str + property_map: dict[StateType, str] | None = None presence_flag: None | str = None PROPERTY_SENSOR_DESCRIPTIONS: list[PropertySensorEntityDescription] = [ + PropertySensorEntityDescription( + key="status", + device_class=SensorDeviceClass.ENUM, + translation_key="status", + client_property="status", + property_map=STATUS_TO_HA, + options=list(STATUS_TO_HA.values()), + ), PropertySensorEntityDescription( key="pellet_quantity", device_class=SensorDeviceClass.WEIGHT, @@ -103,4 +113,11 @@ class PalazzettiSensor(PalazzettiEntity, SensorEntity): def native_value(self) -> StateType: """Return the state value of the sensor.""" - return getattr(self.coordinator.client, self.entity_description.client_property) + raw_value = getattr( + self.coordinator.client, self.entity_description.client_property + ) + + if self.entity_description.property_map: + return self.entity_description.property_map[raw_value] + + return raw_value diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index 60c6e20c402..ad7bc498bd1 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -57,6 +57,42 @@ } }, "sensor": { + "status": { + "name": "Status", + "state": { + "off": "Off", + "off_timer": "Timer-regulated switch off", + "test_fire": "Ignition test", + "heatup": "Pellet feed", + "fueling": "Ignition", + "ign_test": "Fuel check", + "burning": "Operating", + "burning_mod": "Operating - Modulating", + "unknown": "Unknown", + "cool_fluid": "Stand-by", + "fire_stop": "Switch off", + "clean_fire": "Burn pot cleaning", + "cooling": "Cooling in progress", + "cleanup": "Final cleaning", + "ecomode": "Ecomode", + "chimney_alarm": "Chimney alarm", + "grate_error": "Grate error", + "pellet_water_error": "Pellet probe or return water error", + "t05_error": "T05 error disconnected or faulty probe", + "hatch_door_open": "Feed hatch or door open", + "pressure_error": "Safety pressure switch error", + "main_probe_failure": "Main probe failure", + "flue_probe_failure": "Flue gas probe failure", + "exhaust_temp_high": "Too high exhaust gas temperature", + "pellet_finished": "Pellets finished or ignition failed", + "firewood_finished": "Firewood finished", + "general_error": "General error", + "door_open": "Door open", + "temp_too_high": "Temperature too high", + "cleaning_warning": "Cleaning warning", + "fuel_error": "Fuel error" + } + }, "pellet_quantity": { "name": "Pellet quantity" }, diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py index a9f76b259c3..fad535df914 100644 --- a/tests/components/palazzetti/conftest.py +++ b/tests/components/palazzetti/conftest.py @@ -66,6 +66,7 @@ def mock_palazzetti_client() -> Generator[AsyncMock]: mock_client.has_on_off_switch = True mock_client.has_pellet_level = False mock_client.connected = True + mock_client.status = 6 mock_client.is_heating = True mock_client.room_temperature = 18 mock_client.T1 = 21.5 diff --git a/tests/components/palazzetti/snapshots/test_sensor.ambr b/tests/components/palazzetti/snapshots/test_sensor.ambr index 107b818f195..aa98f3a4f59 100644 --- a/tests/components/palazzetti/snapshots/test_sensor.ambr +++ b/tests/components/palazzetti/snapshots/test_sensor.ambr @@ -305,6 +305,152 @@ 'state': '21.5', }) # --- +# name: test_all_entities[sensor.stove_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '11:22:33:44:55:66-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.stove_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Stove Status', + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'context': , + 'entity_id': 'sensor.stove_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'burning', + }) +# --- # name: test_all_entities[sensor.stove_tank_water_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From d8e853941a3847db28c518f48df7331e403a9956 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 11:10:38 +0100 Subject: [PATCH 0751/1198] Bump holidays to 0.63 (#133391) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 7edc140da11..33cae231595 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.62", "babel==2.15.0"] + "requirements": ["holidays==0.63", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 842c6f1f1ad..de9cbe694d8 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.62"] + "requirements": ["holidays==0.63"] } diff --git a/requirements_all.txt b/requirements_all.txt index c4e9529c6c8..f2ab0a938d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1131,7 +1131,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 056d7422195..c6631388041 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -960,7 +960,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 From 0dbd5bffe6014f7c5f613acf71962f1fc4b8548e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:26:51 +0100 Subject: [PATCH 0752/1198] Fix incorrect schema in config tests (#133404) --- tests/components/config/test_config_entries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 4d37f3c871b..0a1ffbe87b3 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -415,7 +415,7 @@ async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={ "url": "https://example.com", "show_advanced_options": self.show_advanced_options, @@ -804,7 +804,7 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -842,7 +842,7 @@ async def test_get_progress_flow_unauth( return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -874,7 +874,7 @@ async def test_options_flow(hass: HomeAssistant, client: TestClient) -> None: schema[vol.Required("enabled")] = bool return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={"enabled": "Set to true to be true"}, ) From 084ef206955a428106c347cc6d60bf3b83a0c080 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:33:04 +0100 Subject: [PATCH 0753/1198] Add quality_scale.yaml to enphase_envoy (#132489) --- .../components/enphase_envoy/config_flow.py | 3 +- .../enphase_envoy/quality_scale.yaml | 124 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 3 files changed, 126 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/enphase_envoy/quality_scale.yaml diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 23c769293c8..70ba3570e91 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -31,6 +31,7 @@ from .const import ( OPTION_DISABLE_KEEP_ALIVE, OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, ) +from .coordinator import EnphaseConfigEntry _LOGGER = logging.getLogger(__name__) @@ -67,7 +68,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: EnphaseConfigEntry, ) -> EnvoyOptionsFlowHandler: """Options flow handler for Enphase_Envoy.""" return EnvoyOptionsFlowHandler() diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml new file mode 100644 index 00000000000..c4077b8df67 --- /dev/null +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -0,0 +1,124 @@ +rules: + # Bronze + action-setup: + status: done + comment: only actions implemented are platform native ones. + appropriate-polling: + status: done + comment: fixed 1 minute cycle based on Enphase Envoy device characteristics + brands: done + common-modules: + status: done + comment: | + In coordinator.py, you set self.entry = entry, while after the super constructor, + you can access the entry via self.config_entry (you would have to overwrite the + type to make sure you don't have to assert not None every time)done + config-flow-test-coverage: + status: todo + comment: | + - test_form is missing an assertion for the unique id of the resulting entry + - Let's also have test_user_no_serial_number assert the unique_id (as in, it can't be set to the serial_number since we dont have one, so let's assert what it will result in) + - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) + - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think + - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: + I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? + test_reauth -> Let's also assert result before we start with the async_configure part + config-flow: + status: todo + comment: | + - async_step_zeroconf -> a config entry title is considered userland, + so if someone renamed their entry, it will be reverted back with the code at L146. + - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow + - The config flow is missing data descriptions for the other fields + dependency-transparency: done + docs-actions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy/#actions + docs-high-level-description: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy + docs-installation-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#prerequisites + docs-removal-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#removing-the-integration + entity-event-setup: + status: done + comment: no events used. + entity-unique-id: done + has-entity-name: done + runtime-data: + status: done + comment: | + async_unload_entry- coordinator: EnphaseUpdateCoordinator = entry.runtime_data + You can remove the EnphaseUpdateCoordinator as the type can now be inferred thanks to the typed config entry + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + needs to raise appropriate error when exception occurs. + Pending https://github.com/pyenphase/pyenphase/pull/194 + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#configuration + docs-installation-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#required-manual-input + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: pending https://github.com/home-assistant/core/pull/132373 + reauthentication-flow: done + test-coverage: + status: todo + comment: | + - test_config_different_unique_id -> unique_id set to the mock config entry is an int, not a str + - Apart from the coverage, test_option_change_reload does not verify that the config entry is reloaded + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#data-updates + docs-examples: + status: todo + comment: add blue-print examples, if any + docs-known-limitations: todo + docs-supported-devices: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#supported-devices + docs-supported-functions: todo + docs-troubleshooting: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#troubleshooting + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: pending https://github.com/home-assistant/core/pull/132483 + icon-translations: todo + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no general issues or repair.py + stale-devices: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 5ad3467dd79..83335fa5c44 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -355,7 +355,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "energyzero", "enigma2", "enocean", - "enphase_envoy", "entur_public_transport", "environment_canada", "envisalink", From ce0117b2b82cda7900f18d781be2d6a8d0f807ed Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Tue, 17 Dec 2024 11:36:45 +0100 Subject: [PATCH 0754/1198] Fix fan setpoints for flexit_bacnet (#133388) --- .../components/flexit_bacnet/number.py | 52 ++++++++------ tests/components/flexit_bacnet/conftest.py | 20 +++--- .../flexit_bacnet/snapshots/test_number.ambr | 68 +++++++++---------- tests/components/flexit_bacnet/test_number.py | 8 +-- 4 files changed, 80 insertions(+), 68 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 6e6e2eea980..029ce896445 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -29,6 +29,8 @@ class FlexitNumberEntityDescription(NumberEntityDescription): """Describes a Flexit number entity.""" native_value_fn: Callable[[FlexitBACnet], float] + native_max_value_fn: Callable[[FlexitBACnet], int] + native_min_value_fn: Callable[[FlexitBACnet], int] set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] @@ -37,121 +39,121 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( key="away_extract_fan_setpoint", translation_key="away_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", translation_key="away_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", translation_key="cooker_hood_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", translation_key="cooker_hood_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", translation_key="fireplace_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", translation_key="fireplace_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", translation_key="high_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( key="high_supply_fan_setpoint", translation_key="high_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( key="home_extract_fan_setpoint", translation_key="home_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( key="home_supply_fan_setpoint", translation_key="home_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) @@ -192,6 +194,16 @@ class FlexitNumber(FlexitEntity, NumberEntity): """Return the state of the number.""" return self.entity_description.native_value_fn(self.coordinator.device) + @property + def native_max_value(self) -> float: + """Return the native max value of the number.""" + return self.entity_description.native_max_value_fn(self.coordinator.device) + + @property + def native_min_value(self) -> float: + """Return the native min value of the number.""" + return self.entity_description.native_min_value_fn(self.coordinator.device) + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" set_native_value_fn = self.entity_description.set_native_value_fn( diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index a6205bac506..6ce17261bfc 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -69,16 +69,16 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: flexit_bacnet.electric_heater = True # Mock fan setpoints - flexit_bacnet.fan_setpoint_extract_air_fire = 10 - flexit_bacnet.fan_setpoint_supply_air_fire = 20 - flexit_bacnet.fan_setpoint_extract_air_away = 30 - flexit_bacnet.fan_setpoint_supply_air_away = 40 - flexit_bacnet.fan_setpoint_extract_air_home = 50 - flexit_bacnet.fan_setpoint_supply_air_home = 60 - flexit_bacnet.fan_setpoint_extract_air_high = 70 - flexit_bacnet.fan_setpoint_supply_air_high = 80 - flexit_bacnet.fan_setpoint_extract_air_cooker = 90 - flexit_bacnet.fan_setpoint_supply_air_cooker = 100 + flexit_bacnet.fan_setpoint_extract_air_fire = 56 + flexit_bacnet.fan_setpoint_supply_air_fire = 77 + flexit_bacnet.fan_setpoint_extract_air_away = 40 + flexit_bacnet.fan_setpoint_supply_air_away = 42 + flexit_bacnet.fan_setpoint_extract_air_home = 70 + flexit_bacnet.fan_setpoint_supply_air_home = 74 + flexit_bacnet.fan_setpoint_extract_air_high = 100 + flexit_bacnet.fan_setpoint_supply_air_high = 100 + flexit_bacnet.fan_setpoint_extract_air_cooker = 50 + flexit_bacnet.fan_setpoint_supply_air_cooker = 70 yield flexit_bacnet diff --git a/tests/components/flexit_bacnet/snapshots/test_number.ambr b/tests/components/flexit_bacnet/snapshots/test_number.ambr index c4fb1e7c434..78eefd08345 100644 --- a/tests/components/flexit_bacnet/snapshots/test_number.ambr +++ b/tests/components/flexit_bacnet/snapshots/test_number.ambr @@ -5,8 +5,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, }), @@ -42,8 +42,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away extract fan setpoint', - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -53,7 +53,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '40', }) # --- # name: test_numbers[number.device_name_away_supply_fan_setpoint-entry] @@ -62,8 +62,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, }), @@ -99,8 +99,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away supply fan setpoint', - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -110,7 +110,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '40', + 'state': '42', }) # --- # name: test_numbers[number.device_name_cooker_hood_extract_fan_setpoint-entry] @@ -120,7 +120,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -157,7 +157,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -167,7 +167,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '90', + 'state': '50', }) # --- # name: test_numbers[number.device_name_cooker_hood_supply_fan_setpoint-entry] @@ -177,7 +177,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -214,7 +214,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -224,7 +224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '70', }) # --- # name: test_numbers[number.device_name_fireplace_extract_fan_setpoint-entry] @@ -234,7 +234,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -271,7 +271,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -281,7 +281,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10', + 'state': '56', }) # --- # name: test_numbers[number.device_name_fireplace_supply_fan_setpoint-entry] @@ -291,7 +291,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -328,7 +328,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -338,7 +338,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '20', + 'state': '77', }) # --- # name: test_numbers[number.device_name_high_extract_fan_setpoint-entry] @@ -348,7 +348,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, }), @@ -385,7 +385,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -395,7 +395,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '70', + 'state': '100', }) # --- # name: test_numbers[number.device_name_high_supply_fan_setpoint-entry] @@ -405,7 +405,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, }), @@ -442,7 +442,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -452,7 +452,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '80', + 'state': '100', }) # --- # name: test_numbers[number.device_name_home_extract_fan_setpoint-entry] @@ -462,7 +462,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, }), @@ -499,7 +499,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -509,7 +509,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50', + 'state': '70', }) # --- # name: test_numbers[number.device_name_home_supply_fan_setpoint-entry] @@ -519,7 +519,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, }), @@ -556,7 +556,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -566,6 +566,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '60', + 'state': '74', }) # --- diff --git a/tests/components/flexit_bacnet/test_number.py b/tests/components/flexit_bacnet/test_number.py index ad49908fa96..f566b623f12 100644 --- a/tests/components/flexit_bacnet/test_number.py +++ b/tests/components/flexit_bacnet/test_number.py @@ -64,21 +64,21 @@ async def test_numbers_implementation( assert len(mocked_method.mock_calls) == 1 assert hass.states.get(ENTITY_ID).state == "60" - mock_flexit_bacnet.fan_setpoint_supply_air_fire = 10 + mock_flexit_bacnet.fan_setpoint_supply_air_fire = 40 await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) mocked_method = getattr(mock_flexit_bacnet, "set_fan_setpoint_supply_air_fire") assert len(mocked_method.mock_calls) == 2 - assert hass.states.get(ENTITY_ID).state == "10" + assert hass.states.get(ENTITY_ID).state == "40" # Error recovery, when setting the value mock_flexit_bacnet.set_fan_setpoint_supply_air_fire.side_effect = DecodingError @@ -89,7 +89,7 @@ async def test_numbers_implementation( SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) From 991864b38c9b145a91763addef3ade6a2fc89708 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 12:02:53 +0100 Subject: [PATCH 0755/1198] Fix schema translation checks for nested config-flow sections (#133392) --- tests/components/conftest.py | 94 +++++++++++++++++++++++++----------- 1 file changed, 66 insertions(+), 28 deletions(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 3828cc5ff37..e95147b8664 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -19,6 +19,7 @@ from aiohasupervisor.models import ( StoreInfo, ) import pytest +import voluptuous as vol from homeassistant.components import repairs from homeassistant.config_entries import ( @@ -34,6 +35,7 @@ from homeassistant.data_entry_flow import ( FlowHandler, FlowManager, FlowResultType, + section, ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir @@ -644,6 +646,61 @@ def _get_integration_quality_scale_rule(integration: str, rule: str) -> str: return status if isinstance(status, str) else status["status"] +async def _check_step_or_section_translations( + hass: HomeAssistant, + translation_errors: dict[str, str], + category: str, + integration: str, + translation_prefix: str, + description_placeholders: dict[str, str], + data_schema: vol.Schema | None, +) -> None: + # neither title nor description are required + # - title defaults to integration name + # - description is optional + for header in ("title", "description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}", + description_placeholders, + translation_required=False, + ) + + if not data_schema: + return + + for data_key, data_value in data_schema.schema.items(): + if isinstance(data_value, section): + # check the nested section + await _check_step_or_section_translations( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.sections.{data_key}", + description_placeholders, + data_value.schema, + ) + return + iqs_config_flow = _get_integration_quality_scale_rule( + integration, "config-flow" + ) + # data and data_description are compulsory + for header in ("data", "data_description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}.{data_key}", + description_placeholders, + translation_required=(iqs_config_flow == "done"), + ) + + async def _check_config_flow_result_translations( manager: FlowManager, flow: FlowHandler, @@ -675,35 +732,16 @@ async def _check_config_flow_result_translations( setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) if result["type"] is FlowResultType.FORM: - iqs_config_flow = _get_integration_quality_scale_rule( - integration, "config-flow" - ) if step_id := result.get("step_id"): - # neither title nor description are required - # - title defaults to integration name - # - description is optional - for header in ("title", "description"): - await _validate_translation( - flow.hass, - translation_errors, - category, - integration, - f"{key_prefix}step.{step_id}.{header}", - result["description_placeholders"], - translation_required=False, - ) - if iqs_config_flow == "done" and (data_schema := result["data_schema"]): - # data and data_description are compulsory - for data_key in data_schema.schema: - for header in ("data", "data_description"): - await _validate_translation( - flow.hass, - translation_errors, - category, - integration, - f"{key_prefix}step.{step_id}.{header}.{data_key}", - result["description_placeholders"], - ) + await _check_step_or_section_translations( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}", + result["description_placeholders"], + result["data_schema"], + ) if errors := result.get("errors"): for error in errors.values(): From 637614299ce5e649fc5e01fda1b607ade7c1165b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 12:41:18 +0100 Subject: [PATCH 0756/1198] Fix strptime in python_script (#133159) Co-authored-by: Erik Montnemery --- .../components/python_script/__init__.py | 17 +++++++++++++ tests/components/python_script/test_init.py | 24 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 70e9c5b0d29..af773278029 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -1,5 +1,6 @@ """Component to allow running Python scripts.""" +from collections.abc import Mapping, Sequence import datetime import glob import logging @@ -7,6 +8,7 @@ from numbers import Number import operator import os import time +import types from typing import Any from RestrictedPython import ( @@ -167,6 +169,20 @@ IOPERATOR_TO_OPERATOR = { } +def guarded_import( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, +) -> types.ModuleType: + """Guard imports.""" + # Allow import of _strptime needed by datetime.datetime.strptime + if name == "_strptime": + return __import__(name, globals, locals, fromlist, level) + raise ScriptError(f"Not allowed to import {name}") + + def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any: """Implement augmented-assign (+=, -=, etc.) operators for restricted code. @@ -232,6 +248,7 @@ def execute(hass, filename, source, data=None, return_response=False): return getattr(obj, name, default) extra_builtins = { + "__import__": guarded_import, "datetime": datetime, "sorted": sorted, "time": TimeWrapper(), diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index c4dc00c448a..2d151b4b81e 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -688,3 +688,27 @@ async def test_prohibited_augmented_assignment_operations( hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {}) await hass.async_block_till_done(wait_background_tasks=True) assert error in caplog.text + + +async def test_import_allow_strptime( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test calling datetime.datetime.strptime works.""" + source = """ +test_date = datetime.datetime.strptime('2024-04-01', '%Y-%m-%d') +logger.info(f'Date {test_date}') + """ + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import _strptime" not in caplog.text + assert "Date 2024-04-01 00:00:00" in caplog.text + + +async def test_no_other_imports_allowed( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test imports are not allowed.""" + source = "import sys" + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import sys" in caplog.text From e61142c2c2ce88bcfca3b141a77635e5d681c653 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 12:53:27 +0100 Subject: [PATCH 0757/1198] Check if requirement is typed in strict_typing IQS validation (#133415) * Check if requirement is typed in strict_typing IQS validation * Apply suggestions from code review * Apply suggestions from code review * Return a list * Adjust * Improve --- .../components/fritz/quality_scale.yaml | 5 +++- .../components/imap/quality_scale.yaml | 5 +++- .../components/mastodon/quality_scale.yaml | 5 +++- .../components/mqtt/quality_scale.yaml | 5 +++- .../components/stookwijzer/quality_scale.yaml | 5 +++- .../quality_scale_validation/strict_typing.py | 29 +++++++++++++++++++ 6 files changed, 49 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/fritz/quality_scale.yaml b/homeassistant/components/fritz/quality_scale.yaml index b832492cf9d..06c572f93a6 100644 --- a/homeassistant/components/fritz/quality_scale.yaml +++ b/homeassistant/components/fritz/quality_scale.yaml @@ -95,4 +95,7 @@ rules: comment: | the fritzconnection lib is not async and relies on requests changing this might need a bit more efforts to be spent - strict-typing: done + strict-typing: + status: todo + comment: | + Requirements 'fritzconnection==1.14.0' and 'xmltodict==0.13.0' appear untyped diff --git a/homeassistant/components/imap/quality_scale.yaml b/homeassistant/components/imap/quality_scale.yaml index 180aef93f91..1c75b527882 100644 --- a/homeassistant/components/imap/quality_scale.yaml +++ b/homeassistant/components/imap/quality_scale.yaml @@ -94,4 +94,7 @@ rules: status: exempt comment: | This integration does not use web sessions. - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'aioimaplib==1.1.0' appears untyped diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml index 315ef808701..86702095e95 100644 --- a/homeassistant/components/mastodon/quality_scale.yaml +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -93,4 +93,7 @@ rules: # Platinum async-dependency: todo inject-websession: todo - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'Mastodon.py==1.8.1' appears untyped diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index f31d3e25d15..26ce8cb08dd 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -125,4 +125,7 @@ rules: status: exempt comment: | This integration does not use web sessions. - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'paho-mqtt==1.6.1' appears untyped diff --git a/homeassistant/components/stookwijzer/quality_scale.yaml b/homeassistant/components/stookwijzer/quality_scale.yaml index 67fadc00b64..20e64efaa92 100644 --- a/homeassistant/components/stookwijzer/quality_scale.yaml +++ b/homeassistant/components/stookwijzer/quality_scale.yaml @@ -86,4 +86,7 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'stookwijzer==1.5.1' appears untyped diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py index a27ab752cf0..c1373032ff8 100644 --- a/script/hassfest/quality_scale_validation/strict_typing.py +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -4,6 +4,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/s """ from functools import lru_cache +from importlib import metadata from pathlib import Path import re @@ -24,6 +25,29 @@ def _strict_typing_components(strict_typing_file: Path) -> set[str]: ) +def _check_requirements_are_typed(integration: Integration) -> list[str]: + """Check if all requirements are typed.""" + invalid_requirements = [] + for requirement in integration.requirements: + requirement_name, requirement_version = requirement.split("==") + # Remove any extras + requirement_name = requirement_name.split("[")[0] + try: + distribution = metadata.distribution(requirement_name) + except metadata.PackageNotFoundError: + # Package not installed locally + continue + if distribution.version != requirement_version: + # Version out of date locally + continue + + if not any(file for file in distribution.files if file.name == "py.typed"): + # no py.typed file + invalid_requirements.append(requirement) + + return invalid_requirements + + def validate( config: Config, integration: Integration, *, rules_done: set[str] ) -> list[str] | None: @@ -35,4 +59,9 @@ def validate( "Integration does not have strict typing enabled " "(is missing from .strict-typing)" ] + if untyped_requirements := _check_requirements_are_typed(integration): + return [ + f"Requirements {untyped_requirements} do not conform PEP 561 (https://peps.python.org/pep-0561/)", + "They should be typed and have a 'py.typed' file", + ] return None From a4588c80d56adef47ef511ebecdc310cc52a3211 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 13:18:26 +0100 Subject: [PATCH 0758/1198] Bump aiohasupervisor to version 0.2.2b2 (#133417) * Bump aiohasupervisor to version 0.2.2b2 * Update test --- homeassistant/components/hassio/backup.py | 2 +- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hassio/test_backup.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 0353255fe7b..34c0701fdc4 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -211,7 +211,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): for agent_id in agent_ids if manager.backup_agents[agent_id].domain == DOMAIN ] - locations = {agent.location for agent in hassio_agents} + locations = [agent.location for agent in hassio_agents] backup = await self._client.backups.partial_backup( supervisor_backups.PartialBackupOptions( diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 8fe124e763c..70230701965 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b0"], + "requirements": ["aiohasupervisor==0.2.2b2"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 65a6890024f..add20ef0870 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.10 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 2930d381d2a..91acea30b52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b0", + "aiohasupervisor==0.2.2b2", "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index e80804569d3..e4346c3e517 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f2ab0a938d9..2540a297334 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c6631388041..fe528899ad3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 3e928bc996b..ab708438e51 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -332,7 +332,7 @@ async def test_reader_writer_create( folders=None, homeassistant_exclude_database=False, homeassistant=True, - location={None}, + location=[None], name="Test", password=None, ) From 89946348df69b607edc920d7e33b471c7169ec1f Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Tue, 17 Dec 2024 13:54:07 +0100 Subject: [PATCH 0759/1198] Add reconfigure to Cookidoo integration (#133144) * add reconfigure * merge steps * comments --- .../components/cookidoo/config_flow.py | 75 +++++++-- .../components/cookidoo/quality_scale.yaml | 2 +- .../components/cookidoo/strings.json | 6 +- tests/components/cookidoo/test_config_flow.py | 158 ++++++++++++++++++ 4 files changed, 221 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index 58e99a70907..120ab162a6c 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -17,7 +17,12 @@ from cookidoo_api import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -58,26 +63,43 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): user_input: dict[str, Any] - async def async_step_user( - self, user_input: dict[str, Any] | None = None + async def async_step_reconfigure( + self, user_input: dict[str, Any] ) -> ConfigFlowResult: - """Handle the user step.""" + """Perform reconfigure upon an user action.""" + return await self.async_step_user(user_input) + + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the user step as well as serve for reconfiguration.""" errors: dict[str, str] = {} if user_input is not None and not ( errors := await self.validate_input(user_input) ): - self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + if self.source == SOURCE_USER: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) self.user_input = user_input return await self.async_step_language() await self.generate_country_schema() + suggested_values: dict = {} + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + suggested_values = { + **suggested_values, + **reconfigure_entry.data, + } + if user_input is not None: + suggested_values = {**suggested_values, **user_input} return self.async_show_form( step_id="user", data_schema=self.add_suggested_values_to_schema( data_schema=vol.Schema( {**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA} ), - suggested_values=user_input, + suggested_values=suggested_values, ), description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, @@ -92,8 +114,18 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): if language_input is not None and not ( errors := await self.validate_input(self.user_input, language_input) ): - return self.async_create_entry( - title="Cookidoo", data={**self.user_input, **language_input} + if self.source == SOURCE_USER: + return self.async_create_entry( + title="Cookidoo", data={**self.user_input, **language_input} + ) + reconfigure_entry = self._get_reconfigure_entry() + return self.async_update_reload_and_abort( + reconfigure_entry, + data={ + **reconfigure_entry.data, + **self.user_input, + **language_input, + }, ) await self.generate_language_schema() @@ -169,24 +201,35 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): async def validate_input( self, - user_input: Mapping[str, Any], - language_input: Mapping[str, Any] | None = None, + user_input: dict[str, Any], + language_input: dict[str, Any] | None = None, ) -> dict[str, str]: """Input Helper.""" errors: dict[str, str] = {} + data_input: dict[str, Any] = {} + + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + data_input = {**data_input, **reconfigure_entry.data} + data_input = {**data_input, **user_input} + if language_input: + data_input = {**data_input, **language_input} + else: + data_input[CONF_LANGUAGE] = ( + await get_localization_options(country=data_input[CONF_COUNTRY].lower()) + )[0] # Pick any language to test login + session = async_get_clientsession(self.hass) cookidoo = Cookidoo( session, CookidooConfig( - email=user_input[CONF_EMAIL], - password=user_input[CONF_PASSWORD], + email=data_input[CONF_EMAIL], + password=data_input[CONF_PASSWORD], localization=CookidooLocalizationConfig( - country_code=user_input[CONF_COUNTRY].lower(), - language=language_input[CONF_LANGUAGE] - if language_input - else "de-ch", + country_code=data_input[CONF_COUNTRY].lower(), + language=data_input[CONF_LANGUAGE], ), ), ) diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml index 25069c87c46..95a35829079 100644 --- a/homeassistant/components/cookidoo/quality_scale.yaml +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -66,7 +66,7 @@ rules: diagnostics: todo exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: status: exempt comment: No dynamic entities available diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 19f709ddaf8..14344bed13d 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "title": "Login to {cookidoo}", + "title": "Setup {cookidoo}", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]", @@ -11,11 +11,11 @@ "data_description": { "email": "Email used to access your {cookidoo} account.", "password": "Password used to access your {cookidoo} account.", - "country": "Pick your language for the {cookidoo} content." + "country": "Pick your country for the {cookidoo} content." } }, "language": { - "title": "Set language for {cookidoo}", + "title": "Setup {cookidoo}", "data": { "language": "[%key:common::config_flow::data::language%]" }, diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py index cfdc284dbfe..0057bb3767e 100644 --- a/tests/components/cookidoo/test_config_flow.py +++ b/tests/components/cookidoo/test_config_flow.py @@ -16,6 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import COUNTRY, EMAIL, LANGUAGE, PASSWORD +from .test_init import setup_integration from tests.common import MockConfigEntry @@ -182,6 +183,163 @@ async def test_flow_user_init_data_already_configured( assert result["reason"] == "already_configured" +async def test_flow_reconfigure_success( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test we get the reconfigure flow and create entry with success.""" + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + async def test_flow_reauth( hass: HomeAssistant, mock_cookidoo_client: AsyncMock, From 8b3cd41396942d1e644374425160b697fb6653a4 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 13:55:04 +0100 Subject: [PATCH 0760/1198] Improve hassio backup agent test coverage (#133424) --- tests/components/hassio/test_backup.py | 380 ++++++++++++++++++++++--- 1 file changed, 334 insertions(+), 46 deletions(-) diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index ab708438e51..9995425e6e1 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -1,13 +1,18 @@ """Test supervisor backup functionality.""" from collections.abc import AsyncGenerator, Generator +from dataclasses import replace from datetime import datetime from io import StringIO import os from typing import Any from unittest.mock import AsyncMock, patch -from aiohasupervisor.models import backups as supervisor_backups +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) +from aiohasupervisor.models.mounts import MountsInfo import pytest from homeassistant.components.backup import ( @@ -67,6 +72,94 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( type=TEST_BACKUP.type, ) +TEST_BACKUP_2 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=False, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_2.compressed, + date=TEST_BACKUP_2.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_2.location, + locations=TEST_BACKUP_2.locations, + name=TEST_BACKUP_2.name, + protected=TEST_BACKUP_2.protected, + repositories=[], + size=TEST_BACKUP_2.size, + size_bytes=TEST_BACKUP_2.size_bytes, + slug=TEST_BACKUP_2.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_2.type, +) + +TEST_BACKUP_3 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location="share", + locations={"share"}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_3.compressed, + date=TEST_BACKUP_3.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_3.location, + locations=TEST_BACKUP_3.locations, + name=TEST_BACKUP_3.name, + protected=TEST_BACKUP_3.protected, + repositories=[], + size=TEST_BACKUP_3.size, + size_bytes=TEST_BACKUP_3.size_bytes, + slug=TEST_BACKUP_3.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_3.type, +) + @pytest.fixture(autouse=True) def fixture_supervisor_environ() -> Generator[None]: @@ -76,73 +169,160 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.fixture(autouse=True) -async def setup_integration( +async def hassio_enabled( hass: HomeAssistant, supervisor_client: AsyncMock ) -> AsyncGenerator[None]: - """Set up Backup integration.""" + """Enable hassio.""" with ( patch("homeassistant.components.backup.is_hassio", return_value=True), patch("homeassistant.components.backup.backup.is_hassio", return_value=True), ): - assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) - await hass.async_block_till_done() yield +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Set up Backup integration.""" + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("hassio_client") +@pytest.mark.parametrize( + ("mounts", "expected_agents"), + [ + (MountsInfo(default_backup_mount=None, mounts=[]), ["hassio.local"]), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.BACKUP, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local", "hassio.test"], + ), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.MEDIA, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local"], + ), + ], +) async def test_agent_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + mounts: MountsInfo, + expected_agents: list[str], ) -> None: """Test backup agent info.""" client = await hass_ws_client(hass) + supervisor_client.mounts.info.return_value = mounts + + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) await client.send_json_auto_id({"type": "backup/agents/info"}) response = await client.receive_json() assert response["success"] assert response["result"] == { - "agents": [{"agent_id": "hassio.local"}], + "agents": [{"agent_id": agent_id} for agent_id in expected_agents], } -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("backup", "backup_details", "expected_response"), + [ + ( + TEST_BACKUP, + TEST_BACKUP_DETAILS, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": True, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ( + TEST_BACKUP_2, + TEST_BACKUP_DETAILS_2, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": False, + "homeassistant_version": None, + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ], +) async def test_agent_list_backups( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, + backup: supervisor_backups.Backup, + backup_details: supervisor_backups.BackupComplete, + expected_response: dict[str, Any], ) -> None: """Test agent list backups.""" client = await hass_ws_client(hass) - supervisor_client.backups.list.return_value = [TEST_BACKUP] - supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.list.return_value = [backup, TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = backup_details await client.send_json_auto_id({"type": "backup/info"}) response = await client.receive_json() assert response["success"] - assert response["result"]["backups"] == [ - { - "addons": [ - {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} - ], - "agent_ids": ["hassio.local"], - "backup_id": "abc123", - "database_included": True, - "date": "1970-01-01T00:00:00+00:00", - "failed_agent_ids": [], - "folders": ["share"], - "homeassistant_included": True, - "homeassistant_version": "2024.12.0", - "name": "Test", - "protected": False, - "size": 1048576, - "with_strategy_settings": False, - } - ] + assert response["result"]["backups"] == [expected_response] -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_download( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -162,7 +342,26 @@ async def test_agent_download( assert await resp.content.read() == b"backup data" -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_download_unavailable_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_3 + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 404 + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_upload( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -208,7 +407,7 @@ async def test_agent_upload( supervisor_client.backups.reload.assert_not_called() -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_delete_backup( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -231,7 +430,7 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( ("event_data", "mount_info_calls"), [ @@ -293,11 +492,55 @@ async def test_agents_notify_on_mount_added_removed( assert supervisor_client.mounts.info.call_count == mount_info_calls -@pytest.mark.usefixtures("hassio_client") +DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( + addons=None, + background=True, + compressed=True, + folders=None, + homeassistant_exclude_database=False, + homeassistant=True, + location=[None], + name="Test", + password=None, +) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options", "expected_supervisor_options"), + [ + ( + {}, + DEFAULT_BACKUP_OPTIONS, + ), + ( + {"include_addons": ["addon_1", "addon_2"]}, + replace(DEFAULT_BACKUP_OPTIONS, addons={"addon_1", "addon_2"}), + ), + ( + {"include_all_addons": True}, + DEFAULT_BACKUP_OPTIONS, + ), + ( + {"include_database": False}, + replace(DEFAULT_BACKUP_OPTIONS, homeassistant_exclude_database=True), + ), + ( + {"include_folders": ["media", "share"]}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share"}), + ), + ( + {"include_folders": ["media"], "include_homeassistant": False}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media"}, homeassistant=False), + ), + ], +) async def test_reader_writer_create( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], + expected_supervisor_options: supervisor_backups.PartialBackupOptions, ) -> None: """Test generating a backup.""" client = await hass_ws_client(hass) @@ -312,6 +555,7 @@ async def test_reader_writer_create( await client.send_json_auto_id( {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options ) response = await client.receive_json() assert response["event"] == { @@ -325,17 +569,7 @@ async def test_reader_writer_create( assert response["result"] == {"backup_job_id": "abc123"} supervisor_client.backups.partial_backup.assert_called_once_with( - supervisor_backups.PartialBackupOptions( - addons=None, - background=True, - compressed=True, - folders=None, - homeassistant_exclude_database=False, - homeassistant=True, - location=[None], - name="Test", - password=None, - ) + expected_supervisor_options ) await client.send_json_auto_id( @@ -365,7 +599,61 @@ async def test_reader_writer_create( } -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options"), + [ + {"include_homeassistant": False}, + ], +) +async def test_reader_writer_create_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "idle", + } + + response = await client.receive_json() + assert not response["success"] + assert response["error"] == {"code": "unknown_error", "message": "Unknown error"} + + supervisor_client.backups.partial_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -437,7 +725,7 @@ async def test_reader_writer_restore( ), ], ) -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore_wrong_parameters( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, From 4adfd52dc0259d1a81c31cdffe9ed50cbef8c026 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 15:08:03 +0100 Subject: [PATCH 0761/1198] Improve hassio backup agent test coverage (#133426) --- homeassistant/components/hassio/backup.py | 1 + tests/components/hassio/test_backup.py | 249 +++++++++++++++++++++- 2 files changed, 248 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 34c0701fdc4..5127c0326cc 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -173,6 +173,7 @@ class SupervisorBackupAgent(BackupAgent): except SupervisorBadRequestError as err: if err.args[0] != "Backup does not exist": raise + _LOGGER.debug("Backup %s does not exist", backup_id) class SupervisorBackupReaderWriter(BackupReaderWriter): diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 9995425e6e1..5b3f6ff44a2 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -1,13 +1,20 @@ """Test supervisor backup functionality.""" -from collections.abc import AsyncGenerator, Generator +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Callable, + Coroutine, + Generator, +) from dataclasses import replace from datetime import datetime from io import StringIO import os from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohasupervisor.exceptions import SupervisorBadRequestError from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -19,13 +26,17 @@ from homeassistant.components.backup import ( DOMAIN as BACKUP_DOMAIN, AddonInfo, AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, Folder, ) +from homeassistant.components.hassio.backup import LOCATION_CLOUD_BACKUP from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON +from tests.common import mock_platform from tests.typing import ClientSessionGenerator, WebSocketGenerator TEST_BACKUP = supervisor_backups.Backup( @@ -189,6 +200,57 @@ async def setup_integration( await hass.async_block_till_done() +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + domain = "test" + + def __init__(self, name: str) -> None: + """Initialize the backup agent.""" + self.name = name + + async def async_download_backup( + self, backup_id: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + await open_stream() + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return [] + + async def async_get_backup( + self, backup_id: str, **kwargs: Any + ) -> AgentBackup | None: + """Return a backup.""" + return None + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + + +async def _setup_backup_platform( + hass: HomeAssistant, + *, + domain: str, + platform: BackupAgentPlatformProtocol, +) -> None: + """Set up a mock domain.""" + mock_platform(hass, f"{domain}.backup", platform) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("hassio_client") @pytest.mark.parametrize( ("mounts", "expected_agents"), @@ -405,6 +467,8 @@ async def test_agent_upload( assert resp.status == 201 supervisor_client.backups.reload.assert_not_called() + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -430,6 +494,50 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("remove_side_effect", "expected_response"), + [ + ( + SupervisorBadRequestError("blah"), + { + "success": False, + "error": {"code": "unknown_error", "message": "Unknown error"}, + }, + ), + ( + SupervisorBadRequestError("Backup does not exist"), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), + ], +) +async def test_agent_delete_with_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + remove_side_effect: Exception, + expected_response: dict[str, Any], +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + supervisor_client.backups.remove_backup.side_effect = remove_side_effect + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response == {"id": 1, "type": "result"} | expected_response + supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + + @pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( ("event_data", "mount_info_calls"), @@ -598,6 +706,84 @@ async def test_reader_writer_create( "state": "completed", } + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_create_remote_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test generating a backup which will be uploaded to a remote agent.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["test.remote"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + replace(DEFAULT_BACKUP_OPTIONS, location=LOCATION_CLOUD_BACKUP), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + @pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( @@ -653,6 +839,65 @@ async def test_reader_writer_create_wrong_parameters( supervisor_client.backups.partial_backup.assert_not_called() +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_receive_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test receiving a backup which will be uploaded to a remote agent.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.upload_backup.return_value = "test_slug" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=test.remote", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + + @pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore( hass: HomeAssistant, From 9cc5f7ff843cea9d4ac254ea8b17d9a646767ebb Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 17 Dec 2024 15:41:34 +0100 Subject: [PATCH 0762/1198] Mark lamarzocco as platinum quality (#131609) --- homeassistant/components/lamarzocco/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 0d2111a2026..7505843850c 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,6 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], + "quality_scale": "platinum", "requirements": ["pylamarzocco==1.4.0"] } From a9f6982ac0814a3733088b7364981bc7f184deec Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 17 Dec 2024 15:45:16 +0100 Subject: [PATCH 0763/1198] Mark acaia as platinum quality (#131723) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/acaia/manifest.json | 1 + homeassistant/components/acaia/quality_scale.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index c1f1fdd7a81..36551e9c695 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -25,5 +25,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioacaia"], + "quality_scale": "platinum", "requirements": ["aioacaia==0.1.11"] } diff --git a/homeassistant/components/acaia/quality_scale.yaml b/homeassistant/components/acaia/quality_scale.yaml index 9f9f8da8d5d..62573e38799 100644 --- a/homeassistant/components/acaia/quality_scale.yaml +++ b/homeassistant/components/acaia/quality_scale.yaml @@ -16,7 +16,7 @@ rules: No custom actions are defined. docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | From 5b1c5bf9f6aa742493b1b6f6f559fda0e45519b2 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 17 Dec 2024 16:34:48 +0100 Subject: [PATCH 0764/1198] Record current IQS scale for Tailwind (#133158) Co-authored-by: Joost Lekkerkerker --- .../components/tailwind/quality_scale.yaml | 76 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/tailwind/quality_scale.yaml diff --git a/homeassistant/components/tailwind/quality_scale.yaml b/homeassistant/components/tailwind/quality_scale.yaml new file mode 100644 index 00000000000..90c5d0d5837 --- /dev/null +++ b/homeassistant/components/tailwind/quality_scale.yaml @@ -0,0 +1,76 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not register custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: exempt + comment: | + The coordinator needs translation when the update failed. + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 83335fa5c44..23320632a1a 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1006,7 +1006,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "systemmonitor", "tado", "tailscale", - "tailwind", "tami4", "tank_utility", "tankerkoenig", From a14aca31e534f998bcc7e55976e1b9c7d9c6ffba Mon Sep 17 00:00:00 2001 From: Krisjanis Lejejs Date: Tue, 17 Dec 2024 15:44:50 +0000 Subject: [PATCH 0765/1198] Add MFA login flow support for cloud component (#132497) * Add MFA login flow support for cloud component * Add tests for cloud MFA login * Update code to reflect used package changes * Update code to use underlying package changes * Remove unused change * Fix login required parameters * Fix parameter validation * Use cv.has_at_least_one_key for param validation --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/cloud/const.py | 2 + homeassistant/components/cloud/http_api.py | 58 ++++++++- tests/components/cloud/test_http_api.py | 129 ++++++++++++++++++++- 3 files changed, 186 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 4392bf94827..65d239f2b10 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -88,3 +88,5 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update") STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text" TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech" + +LOGIN_MFA_TIMEOUT = 60 diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 4f2ad0ddcf7..2f49d261792 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -9,6 +9,7 @@ import dataclasses from functools import wraps from http import HTTPStatus import logging +import time from typing import Any, Concatenate import aiohttp @@ -31,6 +32,7 @@ from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util.location import async_detect_location_info @@ -39,6 +41,7 @@ from .assist_pipeline import async_create_cloud_pipeline from .client import CloudClient from .const import ( DATA_CLOUD, + LOGIN_MFA_TIMEOUT, PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, PREF_ENABLE_ALEXA, @@ -69,6 +72,10 @@ _CLOUD_ERRORS: dict[type[Exception], tuple[HTTPStatus, str]] = { } +class MFAExpiredOrNotStarted(auth.CloudError): + """Multi-factor authentication expired, or not started.""" + + @callback def async_setup(hass: HomeAssistant) -> None: """Initialize the HTTP API.""" @@ -101,6 +108,11 @@ def async_setup(hass: HomeAssistant) -> None: _CLOUD_ERRORS.update( { + auth.InvalidTotpCode: (HTTPStatus.BAD_REQUEST, "Invalid TOTP code."), + auth.MFARequired: ( + HTTPStatus.UNAUTHORIZED, + "Multi-factor authentication required.", + ), auth.UserNotFound: (HTTPStatus.BAD_REQUEST, "User does not exist."), auth.UserNotConfirmed: (HTTPStatus.BAD_REQUEST, "Email not confirmed."), auth.UserExists: ( @@ -112,6 +124,10 @@ def async_setup(hass: HomeAssistant) -> None: HTTPStatus.BAD_REQUEST, "Password change required.", ), + MFAExpiredOrNotStarted: ( + HTTPStatus.BAD_REQUEST, + "Multi-factor authentication expired, or not started. Please try again.", + ), } ) @@ -206,19 +222,57 @@ class GoogleActionsSyncView(HomeAssistantView): class CloudLoginView(HomeAssistantView): """Login to Home Assistant cloud.""" + _mfa_tokens: dict[str, str] = {} + _mfa_tokens_set_time: float = 0 + url = "/api/cloud/login" name = "api:cloud:login" @require_admin @_handle_cloud_errors @RequestDataValidator( - vol.Schema({vol.Required("email"): str, vol.Required("password"): str}) + vol.Schema( + vol.All( + { + vol.Required("email"): str, + vol.Exclusive("password", "login"): str, + vol.Exclusive("code", "login"): str, + }, + cv.has_at_least_one_key("password", "code"), + ) + ) ) async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle login request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] - await cloud.login(data["email"], data["password"]) + + try: + email = data["email"] + password = data.get("password") + code = data.get("code") + + if email and password: + await cloud.login(email, password) + + else: + if ( + not self._mfa_tokens + or time.time() - self._mfa_tokens_set_time > LOGIN_MFA_TIMEOUT + ): + raise MFAExpiredOrNotStarted + + # Voluptuous should ensure that code is not None because password is + assert code is not None + + await cloud.login_verify_totp(email, code, self._mfa_tokens) + self._mfa_tokens = {} + self._mfa_tokens_set_time = 0 + + except auth.MFARequired as mfa_err: + self._mfa_tokens = mfa_err.mfa_tokens + self._mfa_tokens_set_time = time.time() + raise if "assist_pipeline" in hass.config.components: new_cloud_pipeline_id = await async_create_cloud_pipeline(hass) diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 216fc77db48..b35cc03ac73 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -8,7 +8,12 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import aiohttp from hass_nabucasa import thingtalk -from hass_nabucasa.auth import Unauthenticated, UnknownError +from hass_nabucasa.auth import ( + InvalidTotpCode, + MFARequired, + Unauthenticated, + UnknownError, +) from hass_nabucasa.const import STATE_CONNECTED from hass_nabucasa.voice import TTS_VOICES import pytest @@ -378,6 +383,128 @@ async def test_login_view_invalid_credentials( assert req.status == HTTPStatus.UNAUTHORIZED +async def test_login_view_mfa_required( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + +async def test_login_view_mfa_required_tokens_missing( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required, code is provided, but session tokens are missing.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get MFA expired error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "mfaexpiredornotstarted" + + +async def test_login_view_mfa_password_and_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when password and TOTP code provided at once.""" + cloud_client = await hass_client() + + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "password": "my_password", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + + +async def test_login_view_invalid_totp_code( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required and invalid code is provided.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + cloud.login_verify_totp.side_effect = InvalidTotpCode + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get invalid TOTP code error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "invalidtotpcode" + + +async def test_login_view_valid_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in with valid TOTP code.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get success response + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.OK + result = await req.json() + assert result == {"success": True, "cloud_pipeline": None} + + async def test_login_view_unknown_error( cloud: MagicMock, setup_cloud: None, From d9fb5a758232f3da6c0a86a6eb3fa684adabc22d Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 17:10:04 +0100 Subject: [PATCH 0766/1198] Record current IQS state for SABnzbd (#131656) * Record current IQS state for SAbnzbd * Convert review comments to IQS comments --- .../components/sabnzbd/quality_scale.yaml | 96 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 96 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/sabnzbd/quality_scale.yaml diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml new file mode 100644 index 00000000000..c3fea2427ce --- /dev/null +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: todo + comment: | + Do not remove services when all config entries are removed. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + const.py has unused variables. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration has deprecated the actions, thus the documentation has been removed. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + Raise ServiceValidationError in async_get_entry_for_service_call. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + The integration does not provide any additional options. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: + status: todo + comment: | + Coverage for loading and unloading config entries is missing. + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered. + discovery: + status: exempt + comment: | + This integration cannot be discovered. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: + status: todo + comment: | + Describe the state of the sensor and make it a enum sensor. + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + The integration connects to a single service per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration connect to a single service per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 23320632a1a..88e450409b4 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -887,7 +887,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "ruuvi_gateway", "ruuvitag_ble", "rympro", - "sabnzbd", "saj", "samsungtv", "sanix", From 44a86f537ff7f5d1f48bfc518a4c6d89de4c3ff4 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 17 Dec 2024 17:12:11 +0100 Subject: [PATCH 0767/1198] Add quality scale for Fronius (#131770) --- .../components/fronius/quality_scale.yaml | 89 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/fronius/quality_scale.yaml diff --git a/homeassistant/components/fronius/quality_scale.yaml b/homeassistant/components/fronius/quality_scale.yaml new file mode 100644 index 00000000000..2c4b892475b --- /dev/null +++ b/homeassistant/components/fronius/quality_scale.yaml @@ -0,0 +1,89 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: done + comment: | + Single platform only, so no entity.py file. + CoordinatorEntity is used. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration does not subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration options. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: | + Coordinators are used and asyncio.Lock mutex across them ensure proper + rate limiting. Platforms are read-only. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any known user-repairable issues. + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: + status: todo + comment: | + The pyfronius library isn't strictly typed and doesn't export type information. diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 88e450409b4..4e5cee2d16d 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -416,7 +416,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "freedompro", "fritzbox", "fritzbox_callmonitor", - "fronius", "frontier_silicon", "fujitsu_fglair", "fujitsu_hvac", From 25a63863cb1f1bdeb042d2883754ea89be2d692e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 17:21:13 +0100 Subject: [PATCH 0768/1198] Adapt hassio backup agent to supervisor changes (#133428) --- homeassistant/components/hassio/backup.py | 45 ++++++++++++++++++++--- tests/components/hassio/test_backup.py | 37 ++++++++++++++++--- 2 files changed, 70 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 5127c0326cc..4bc6dff44d2 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -8,7 +8,10 @@ import logging from pathlib import Path from typing import Any, cast -from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -130,7 +133,10 @@ class SupervisorBackupAgent(BackupAgent): **kwargs: Any, ) -> AsyncIterator[bytes]: """Download a backup file.""" - return await self._client.backups.download_backup(backup_id) + return await self._client.backups.download_backup( + backup_id, + options=supervisor_backups.DownloadBackupOptions(location=self.location), + ) async def async_upload_backup( self, @@ -169,11 +175,18 @@ class SupervisorBackupAgent(BackupAgent): async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: """Remove a backup.""" try: - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={self.location} + ), + ) except SupervisorBadRequestError as err: if err.args[0] != "Backup does not exist": raise _LOGGER.debug("Backup %s does not exist", backup_id) + except SupervisorNotFoundError: + _LOGGER.debug("Backup %s does not exist", backup_id) class SupervisorBackupReaderWriter(BackupReaderWriter): @@ -200,7 +213,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): """Create a backup.""" manager = self._hass.data[DATA_MANAGER] - include_addons_set = set(include_addons) if include_addons else None + include_addons_set: supervisor_backups.AddonSet | set[str] | None = None + if include_all_addons: + include_addons_set = supervisor_backups.AddonSet.ALL + elif include_addons: + include_addons_set = set(include_addons) include_folders_set = ( {supervisor_backups.Folder(folder) for folder in include_folders} if include_folders @@ -266,7 +283,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): async def remove_backup() -> None: if not remove_after_upload: return - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) details = await self._client.backups.backup_info(backup_id) @@ -306,7 +328,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): async def remove_backup() -> None: if locations: return - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) details = await self._client.backups.backup_info(backup_id) @@ -341,6 +368,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): ) manager = self._hass.data[DATA_MANAGER] + restore_location: str | None if manager.backup_agents[agent_id].domain != DOMAIN: # Download the backup to the supervisor. Supervisor will clean up the backup # two days after the restore is done. @@ -349,6 +377,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): stream=await open_stream(), suggested_filename=f"{backup_id}.tar", ) + restore_location = LOCATION_CLOUD_BACKUP + else: + agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + restore_location = agent.location job = await self._client.backups.partial_restore( backup_id, @@ -358,6 +390,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): homeassistant=restore_homeassistant, password=password, background=True, + location=restore_location, ), ) diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 5b3f6ff44a2..75cc049f7b5 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -14,7 +14,10 @@ import os from typing import Any from unittest.mock import AsyncMock, Mock, patch -from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -403,6 +406,10 @@ async def test_agent_download( assert resp.status == 200 assert await resp.content.read() == b"backup data" + supervisor_client.backups.download_backup.assert_called_once_with( + "abc123", options=supervisor_backups.DownloadBackupOptions(location=None) + ) + @pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_download_unavailable_backup( @@ -491,7 +498,9 @@ async def test_agent_delete_backup( assert response["success"] assert response["result"] == {"agent_errors": {}} - supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -512,6 +521,13 @@ async def test_agent_delete_backup( "result": {"agent_errors": {}}, }, ), + ( + SupervisorNotFoundError(), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), ], ) async def test_agent_delete_with_error( @@ -535,7 +551,9 @@ async def test_agent_delete_with_error( response = await client.receive_json() assert response == {"id": 1, "type": "result"} | expected_response - supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -627,7 +645,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - DEFAULT_BACKUP_OPTIONS, + replace(DEFAULT_BACKUP_OPTIONS, addons="all"), ), ( {"include_database": False}, @@ -782,7 +800,10 @@ async def test_reader_writer_create_remote_backup( } supervisor_client.backups.download_backup.assert_called_once_with("test_slug") - supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -895,7 +916,10 @@ async def test_agent_receive_remote_backup( assert resp.status == 201 supervisor_client.backups.download_backup.assert_called_once_with("test_slug") - supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -933,6 +957,7 @@ async def test_reader_writer_restore( background=True, folders=None, homeassistant=True, + location=None, password=None, ), ) From 3341e3d95b41d37a635a5b3f13b19d158e4d3a05 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Tue, 17 Dec 2024 17:43:56 +0100 Subject: [PATCH 0769/1198] Fix two occurrences of "HomeAssistant" adding the missing space (#133435) --- homeassistant/components/roon/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/roon/strings.json b/homeassistant/components/roon/strings.json index 85cb53b9010..463f0431891 100644 --- a/homeassistant/components/roon/strings.json +++ b/homeassistant/components/roon/strings.json @@ -10,8 +10,8 @@ } }, "link": { - "title": "Authorize HomeAssistant in Roon", - "description": "You must authorize Home Assistant in Roon. After you select **Submit**, go to the Roon Core application, open **Settings** and enable HomeAssistant on the **Extensions** tab." + "title": "Authorize Home Assistant in Roon", + "description": "You must authorize Home Assistant in Roon. After you select **Submit**, go to the Roon Core application, open **Settings** and enable Home Assistant on the **Extensions** tab." } }, "error": { From 89eda9e068870c3b33ad6d9368090bac5a0bd511 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 17:47:17 +0100 Subject: [PATCH 0770/1198] Don't raise when removing non-existing cloud backup (#133429) --- homeassistant/components/cloud/backup.py | 2 +- tests/components/cloud/test_backup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index d394daa7dc5..e826c229321 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -167,7 +167,7 @@ class CloudBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. """ if not await self.async_get_backup(backup_id): - raise BackupAgentError("Backup not found") + return try: await async_files_delete_file( diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index ac0ef1826de..5e607bbc70b 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -570,4 +570,4 @@ async def test_agents_delete_not_found( response = await client.receive_json() assert response["success"] - assert response["result"] == {"agent_errors": {"cloud.cloud": "Backup not found"}} + assert response["result"] == {"agent_errors": {}} From 1de8d63a63c2ca973ca54339402ab1f5bb0a0986 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Tue, 17 Dec 2024 17:48:18 +0100 Subject: [PATCH 0771/1198] Remove three duplicated space characters in strings.json (#133436) --- homeassistant/components/smartthings/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 7fbf966fa89..de94e5adfcd 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -7,14 +7,14 @@ }, "pat": { "title": "Enter Personal Access Token", - "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", + "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", "data": { "access_token": "[%key:common::config_flow::data::access_token%]" } }, "select_location": { "title": "Select Location", - "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", + "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", "data": { "location_id": "[%key:common::config_flow::data::location%]" } }, "authorize": { "title": "Authorize Home Assistant" } @@ -27,7 +27,7 @@ "token_invalid_format": "The token must be in the UID/GUID format", "token_unauthorized": "The token is invalid or no longer authorized.", "token_forbidden": "The token does not have the required OAuth scopes.", - "app_setup_error": "Unable to set up the SmartApp. Please try again.", + "app_setup_error": "Unable to set up the SmartApp. Please try again.", "webhook_error": "SmartThings could not validate the webhook URL. Please ensure the webhook URL is reachable from the internet and try again." } } From da85c497bf76f60cfaa44ab26dded8fe007107b0 Mon Sep 17 00:00:00 2001 From: DrBlokmeister <57352628+DrBlokmeister@users.noreply.github.com> Date: Tue, 17 Dec 2024 17:48:54 +0100 Subject: [PATCH 0772/1198] Add transmission download path to events + add_torrent service (#121371) Co-authored-by: Erik Montnemery Co-authored-by: Joost Lekkerkerker --- .../components/transmission/__init__.py | 18 ++++++++++++++-- .../components/transmission/const.py | 1 + .../components/transmission/coordinator.py | 21 ++++++++++++++++--- .../components/transmission/services.yaml | 5 +++++ .../components/transmission/strings.json | 4 ++++ 5 files changed, 44 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/transmission/__init__.py b/homeassistant/components/transmission/__init__.py index 1c108831acf..1a8ffdea0c2 100644 --- a/homeassistant/components/transmission/__init__.py +++ b/homeassistant/components/transmission/__init__.py @@ -42,6 +42,7 @@ from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_DELETE_DATA, + ATTR_DOWNLOAD_PATH, ATTR_TORRENT, CONF_ENTRY_ID, DEFAULT_DELETE_DATA, @@ -82,7 +83,12 @@ SERVICE_BASE_SCHEMA = vol.Schema( ) SERVICE_ADD_TORRENT_SCHEMA = vol.All( - SERVICE_BASE_SCHEMA.extend({vol.Required(ATTR_TORRENT): cv.string}), + SERVICE_BASE_SCHEMA.extend( + { + vol.Required(ATTR_TORRENT): cv.string, + vol.Optional(ATTR_DOWNLOAD_PATH, default=None): cv.string, + } + ), ) @@ -213,10 +219,18 @@ def setup_hass_services(hass: HomeAssistant) -> None: entry_id: str = service.data[CONF_ENTRY_ID] coordinator = _get_coordinator_from_service_data(hass, entry_id) torrent: str = service.data[ATTR_TORRENT] + download_path: str | None = service.data.get(ATTR_DOWNLOAD_PATH) if torrent.startswith( ("http", "ftp:", "magnet:") ) or hass.config.is_allowed_path(torrent): - await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) + if download_path: + await hass.async_add_executor_job( + partial( + coordinator.api.add_torrent, torrent, download_dir=download_path + ) + ) + else: + await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) await coordinator.async_request_refresh() else: _LOGGER.warning("Could not add torrent: unsupported type or no permission") diff --git a/homeassistant/components/transmission/const.py b/homeassistant/components/transmission/const.py index 120918b24a2..c232f26cefd 100644 --- a/homeassistant/components/transmission/const.py +++ b/homeassistant/components/transmission/const.py @@ -40,6 +40,7 @@ STATE_ATTR_TORRENT_INFO = "torrent_info" ATTR_DELETE_DATA = "delete_data" ATTR_TORRENT = "torrent" +ATTR_DOWNLOAD_PATH = "download_path" SERVICE_ADD_TORRENT = "add_torrent" SERVICE_REMOVE_TORRENT = "remove_torrent" diff --git a/homeassistant/components/transmission/coordinator.py b/homeassistant/components/transmission/coordinator.py index e0930bd9e9e..b998ab6fbdd 100644 --- a/homeassistant/components/transmission/coordinator.py +++ b/homeassistant/components/transmission/coordinator.py @@ -102,7 +102,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_completed_torrents: if torrent.id not in old_completed_torrents: self.hass.bus.fire( - EVENT_DOWNLOADED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_DOWNLOADED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._completed_torrents = current_completed_torrents @@ -118,7 +123,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_started_torrents: if torrent.id not in old_started_torrents: self.hass.bus.fire( - EVENT_STARTED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_STARTED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._started_torrents = current_started_torrents @@ -130,7 +140,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in self._all_torrents: if torrent.id not in current_torrents: self.hass.bus.fire( - EVENT_REMOVED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_REMOVED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._all_torrents = self.torrents.copy() diff --git a/homeassistant/components/transmission/services.yaml b/homeassistant/components/transmission/services.yaml index 2d61bda442f..8f9aadd5009 100644 --- a/homeassistant/components/transmission/services.yaml +++ b/homeassistant/components/transmission/services.yaml @@ -9,6 +9,11 @@ add_torrent: example: http://releases.ubuntu.com/19.04/ubuntu-19.04-desktop-amd64.iso.torrent selector: text: + download_path: + required: false + example: "/path/to/download/directory" + selector: + text: remove_torrent: fields: diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 578bc262589..aabc5827a88 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -101,6 +101,10 @@ "torrent": { "name": "Torrent", "description": "URL, magnet link or Base64 encoded file." + }, + "download_path": { + "name": "Download path", + "description": "Optional path to specify where the torrent should be downloaded. If not specified, the default download directory is used." } } }, From 98d50206900695d5108852b0c3c2340dff5ddb90 Mon Sep 17 00:00:00 2001 From: Kevin Stillhammer Date: Tue, 17 Dec 2024 18:00:23 +0100 Subject: [PATCH 0773/1198] Support units and filters in async_get_travel_times_service for waze_travel_time (#130776) --- .../components/waze_travel_time/__init__.py | 37 ++++++++++++++++++- .../components/waze_travel_time/sensor.py | 15 +------- .../components/waze_travel_time/services.yaml | 10 +++++ .../components/waze_travel_time/strings.json | 8 ++++ .../components/waze_travel_time/test_init.py | 10 ++--- 5 files changed, 59 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 1abcf9d391d..34f22c9218f 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -3,12 +3,13 @@ import asyncio from collections.abc import Collection import logging +from typing import Literal from pywaze.route_calculator import CalcRoutesResponse, WazeRouteCalculator, WRCError import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_REGION, Platform +from homeassistant.const import CONF_REGION, Platform, UnitOfLength from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -22,7 +23,10 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, + TextSelectorConfig, + TextSelectorType, ) +from homeassistant.util.unit_conversion import DistanceConverter from .const import ( CONF_AVOID_FERRIES, @@ -38,6 +42,7 @@ from .const import ( DEFAULT_FILTER, DEFAULT_VEHICLE_TYPE, DOMAIN, + IMPERIAL_UNITS, METRIC_UNITS, REGIONS, SEMAPHORE, @@ -80,6 +85,18 @@ SERVICE_GET_TRAVEL_TIMES_SCHEMA = vol.Schema( vol.Optional(CONF_AVOID_TOLL_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_SUBSCRIPTION_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_FERRIES, default=False): BooleanSelector(), + vol.Optional(CONF_INCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), + vol.Optional(CONF_EXCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), } ) @@ -107,6 +124,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b avoid_subscription_roads=service.data[CONF_AVOID_SUBSCRIPTION_ROADS], avoid_ferries=service.data[CONF_AVOID_FERRIES], realtime=service.data[CONF_REALTIME], + units=service.data[CONF_UNITS], + incl_filters=service.data.get(CONF_INCL_FILTER, DEFAULT_FILTER), + excl_filters=service.data.get(CONF_EXCL_FILTER, DEFAULT_FILTER), ) return {"routes": [vars(route) for route in response]} if response else None @@ -129,6 +149,7 @@ async def async_get_travel_times( avoid_subscription_roads: bool, avoid_ferries: bool, realtime: bool, + units: Literal["metric", "imperial"] = "metric", incl_filters: Collection[str] | None = None, excl_filters: Collection[str] | None = None, ) -> list[CalcRoutesResponse] | None: @@ -194,6 +215,20 @@ async def async_get_travel_times( route for route in incl_routes if not should_exclude_route(route) ] + if units == IMPERIAL_UNITS: + filtered_routes = [ + CalcRoutesResponse( + name=route.name, + distance=DistanceConverter.convert( + route.distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES + ), + duration=route.duration, + street_names=route.street_names, + ) + for route in filtered_routes + if route.distance is not None + ] + if len(filtered_routes) < 1: _LOGGER.warning("No routes found") return None diff --git a/homeassistant/components/waze_travel_time/sensor.py b/homeassistant/components/waze_travel_time/sensor.py index c2d3ee12cf8..a216a02f61e 100644 --- a/homeassistant/components/waze_travel_time/sensor.py +++ b/homeassistant/components/waze_travel_time/sensor.py @@ -20,7 +20,6 @@ from homeassistant.const import ( CONF_NAME, CONF_REGION, EVENT_HOMEASSISTANT_STARTED, - UnitOfLength, UnitOfTime, ) from homeassistant.core import CoreState, HomeAssistant @@ -28,7 +27,6 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.location import find_coordinates -from homeassistant.util.unit_conversion import DistanceConverter from . import async_get_travel_times from .const import ( @@ -44,7 +42,6 @@ from .const import ( CONF_VEHICLE_TYPE, DEFAULT_NAME, DOMAIN, - IMPERIAL_UNITS, SEMAPHORE, ) @@ -201,6 +198,7 @@ class WazeTravelTimeData: avoid_subscription_roads, avoid_ferries, realtime, + self.config_entry.options[CONF_UNITS], incl_filter, excl_filter, ) @@ -211,14 +209,5 @@ class WazeTravelTimeData: return self.duration = route.duration - distance = route.distance - - if self.config_entry.options[CONF_UNITS] == IMPERIAL_UNITS: - # Convert to miles. - self.distance = DistanceConverter.convert( - distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES - ) - else: - self.distance = distance - + self.distance = route.distance self.route = route.name diff --git a/homeassistant/components/waze_travel_time/services.yaml b/homeassistant/components/waze_travel_time/services.yaml index 7fba565dd47..fd5f2e9adea 100644 --- a/homeassistant/components/waze_travel_time/services.yaml +++ b/homeassistant/components/waze_travel_time/services.yaml @@ -55,3 +55,13 @@ get_travel_times: required: false selector: boolean: + incl_filter: + required: false + selector: + text: + multiple: true + excl_filter: + required: false + selector: + text: + multiple: true diff --git a/homeassistant/components/waze_travel_time/strings.json b/homeassistant/components/waze_travel_time/strings.json index f053f033307..cca1789bf7e 100644 --- a/homeassistant/components/waze_travel_time/strings.json +++ b/homeassistant/components/waze_travel_time/strings.json @@ -101,6 +101,14 @@ "avoid_subscription_roads": { "name": "[%key:component::waze_travel_time::options::step::init::data::avoid_subscription_roads%]", "description": "Whether to avoid subscription roads." + }, + "incl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::incl_filter%]", + "description": "Exact streetname which must be part of the selected route." + }, + "excl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::excl_filter%]", + "description": "Exact streetname which must NOT be part of the selected route." } } } diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index 9c59278ff99..89bccc00985 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -44,6 +44,8 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: "destination": "location2", "vehicle_type": "car", "region": "us", + "units": "imperial", + "incl_filter": ["IncludeThis"], }, blocking=True, return_response=True, @@ -51,17 +53,11 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: assert response_data == { "routes": [ { - "distance": 300, + "distance": pytest.approx(186.4113), "duration": 150, "name": "E1337 - Teststreet", "street_names": ["E1337", "IncludeThis", "Teststreet"], }, - { - "distance": 500, - "duration": 600, - "name": "E0815 - Otherstreet", - "street_names": ["E0815", "ExcludeThis", "Otherstreet"], - }, ] } From b5f6734197a83d93e00bec080edcf42126961370 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 19:23:54 +0100 Subject: [PATCH 0774/1198] Simplify modern_forms config flow (part 2) (#130494) --- .../components/modern_forms/config_flow.py | 67 +++++++++---------- 1 file changed, 30 insertions(+), 37 deletions(-) diff --git a/homeassistant/components/modern_forms/config_flow.py b/homeassistant/components/modern_forms/config_flow.py index 6799dbf97d3..3c217b5747f 100644 --- a/homeassistant/components/modern_forms/config_flow.py +++ b/homeassistant/components/modern_forms/config_flow.py @@ -22,7 +22,7 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - host: str | None = None + host: str mac: str | None = None name: str @@ -30,7 +30,13 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle setup by user for Modern Forms integration.""" - return await self._handle_config_flow(user_input) + if user_input is None: + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + ) + self.host = user_input[CONF_HOST] + return await self._handle_config_flow() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -44,40 +50,26 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self.mac = discovery_info.properties.get(CONF_MAC) self.name = name - # Prepare configuration flow - return await self._handle_config_flow({}, True) + # Loop through self._handle_config_flow to ensure we load the + # MAC if it is missing, and abort if already configured + return await self._handle_config_flow(True) async def async_step_zeroconf_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by zeroconf.""" - return await self._handle_config_flow(user_input) + return await self._handle_config_flow() async def _handle_config_flow( - self, user_input: dict[str, Any] | None = None, prepare: bool = False + self, initial_zeroconf: bool = False ) -> ConfigFlowResult: """Config flow handler for ModernForms.""" - # Request user input, unless we are preparing discovery flow - if user_input is None: - user_input = {} - if not prepare: - if self.source == SOURCE_ZEROCONF: - return self.async_show_form( - step_id="zeroconf_confirm", - description_placeholders={"name": self.name}, - ) - return self.async_show_form( - step_id="user", - data_schema=USER_SCHEMA, - ) - - if self.source == SOURCE_ZEROCONF: - user_input[CONF_HOST] = self.host - user_input[CONF_MAC] = self.mac - - if user_input.get(CONF_MAC) is None or not prepare: + if self.mac is None or not initial_zeroconf: + # User flow + # Or zeroconf without MAC + # Or zeroconf with MAC, but need to ensure device is still available session = async_get_clientsession(self.hass) - device = ModernFormsDevice(user_input[CONF_HOST], session=session) + device = ModernFormsDevice(self.host, session=session) try: device = await device.update() except ModernFormsConnectionError: @@ -88,20 +80,21 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=USER_SCHEMA, errors={"base": "cannot_connect"}, ) - user_input[CONF_MAC] = device.info.mac_address + self.mac = device.info.mac_address + if self.source != SOURCE_ZEROCONF: + self.name = device.info.device_name # Check if already configured - await self.async_set_unique_id(user_input[CONF_MAC]) - self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) + await self.async_set_unique_id(self.mac) + self._abort_if_unique_id_configured(updates={CONF_HOST: self.host}) - title = device.info.device_name - if self.source == SOURCE_ZEROCONF: - title = self.name - - if prepare: - return await self.async_step_zeroconf_confirm() + if initial_zeroconf: + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={"name": self.name}, + ) return self.async_create_entry( - title=title, - data={CONF_HOST: user_input[CONF_HOST], CONF_MAC: user_input[CONF_MAC]}, + title=self.name, + data={CONF_HOST: self.host, CONF_MAC: self.mac}, ) From af1222e97ba00eb1ebcec2049c25b77a70e064a1 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Tue, 17 Dec 2024 19:31:25 +0100 Subject: [PATCH 0775/1198] Distinct sources per zone in Onkyo (#130547) --- .../components/onkyo/media_player.py | 70 +++++++++++++------ homeassistant/components/onkyo/strings.json | 5 ++ 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 24d63c0d9e4..76194672bb7 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from functools import cache import logging from typing import Any, Literal @@ -19,6 +20,7 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -128,10 +130,17 @@ VIDEO_INFORMATION_MAPPING = [ ] ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" -type InputLibValue = str | tuple[str, ...] +type LibValue = str | tuple[str, ...] -def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: +def _get_single_lib_value(value: LibValue) -> str: + if isinstance(value, str): + return value + return value[0] + + +@cache +def _input_source_lib_mappings(zone: str) -> dict[InputSource, LibValue]: match zone: case "main": cmds = PYEISCP_COMMANDS["main"]["SLI"] @@ -142,7 +151,7 @@ def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: case "zone4": cmds = PYEISCP_COMMANDS["zone4"]["SL4"] - result: dict[InputSource, InputLibValue] = {} + result: dict[InputSource, LibValue] = {} for k, v in cmds["values"].items(): try: source = InputSource(k) @@ -153,6 +162,11 @@ def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: return result +@cache +def _rev_input_source_lib_mappings(zone: str) -> dict[LibValue, InputSource]: + return {value: key for key, value in _input_source_lib_mappings(zone).items()} + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -164,7 +178,7 @@ async def async_setup_platform( source_mapping: dict[str, InputSource] = {} for zone in ZONES: - for source, source_lib in _input_lib_cmds(zone).items(): + for source, source_lib in _input_source_lib_mappings(zone).items(): if isinstance(source_lib, str): source_mapping.setdefault(source_lib, source) else: @@ -353,14 +367,18 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self._volume_resolution = volume_resolution self._max_volume = max_volume - self._name_mapping = sources - self._reverse_name_mapping = {value: key for key, value in sources.items()} - self._lib_mapping = _input_lib_cmds(zone) - self._reverse_lib_mapping = { - value: key for key, value in self._lib_mapping.items() + self._source_lib_mapping = _input_source_lib_mappings(zone) + self._rev_source_lib_mapping = _rev_input_source_lib_mappings(zone) + self._source_mapping = { + key: value + for key, value in sources.items() + if key in self._source_lib_mapping + } + self._rev_source_mapping = { + value: key for key, value in self._source_mapping.items() } - self._attr_source_list = list(sources.values()) + self._attr_source_list = list(self._rev_source_mapping) self._attr_extra_state_attributes = {} async def async_added_to_hass(self) -> None: @@ -429,12 +447,18 @@ class OnkyoMediaPlayer(MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" - if self.source_list and source in self.source_list: - source_lib = self._lib_mapping[self._reverse_name_mapping[source]] - if isinstance(source_lib, str): - source_lib_single = source_lib - else: - source_lib_single = source_lib[0] + if not self.source_list or source not in self.source_list: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_source", + translation_placeholders={ + "invalid_source": source, + "entity_id": self.entity_id, + }, + ) + + source_lib = self._source_lib_mapping[self._rev_source_mapping[source]] + source_lib_single = _get_single_lib_value(source_lib) self._update_receiver( "input-selector" if self._zone == "main" else "selector", source_lib_single ) @@ -448,7 +472,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): ) -> None: """Play radio station by preset number.""" if self.source is not None: - source = self._reverse_name_mapping[self.source] + source = self._rev_source_mapping[self.source] if media_type.lower() == "radio" and source in DEFAULT_PLAYABLE_SOURCES: self._update_receiver("preset", media_id) @@ -520,15 +544,17 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self.async_write_ha_state() @callback - def _parse_source(self, source_lib: InputLibValue) -> None: - source = self._reverse_lib_mapping[source_lib] - if source in self._name_mapping: - self._attr_source = self._name_mapping[source] + def _parse_source(self, source_lib: LibValue) -> None: + source = self._rev_source_lib_mapping[source_lib] + if source in self._source_mapping: + self._attr_source = self._source_mapping[source] return source_meaning = source.value_meaning _LOGGER.error( - 'Input source "%s" not in source list: %s', source_meaning, self.entity_id + 'Input source "%s" is invalid for entity: %s', + source_meaning, + self.entity_id, ) self._attr_source = source_meaning diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json index 95ca1199a36..849171c7161 100644 --- a/homeassistant/components/onkyo/strings.json +++ b/homeassistant/components/onkyo/strings.json @@ -69,5 +69,10 @@ "title": "The Onkyo YAML configuration import failed", "description": "Configuring Onkyo using YAML is being removed but there was a connection error when importing your YAML configuration for host {host}.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." } + }, + "exceptions": { + "invalid_source": { + "message": "Cannot select input source \"{invalid_source}\" for entity: {entity_id}." + } } } From 633433709f0cf7744c2ec62e0a5cfcce68f5c120 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 20:00:02 +0100 Subject: [PATCH 0776/1198] Clean up backups after manual backup (#133434) * Clean up backups after manual backup * Address review comments --- homeassistant/components/backup/config.py | 40 ++-- homeassistant/components/backup/manager.py | 6 +- tests/components/backup/conftest.py | 4 +- tests/components/backup/test_websocket.py | 261 +++++++++++++++++++++ 4 files changed, 289 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 26ce691a4cc..ef21dc81ee5 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -323,25 +323,6 @@ class BackupSchedule: # and handled in the future LOGGER.exception("Unexpected error creating automatic backup") - # delete old backups more numerous than copies - - def _backups_filter( - backups: dict[str, ManagerBackup], - ) -> dict[str, ManagerBackup]: - """Return oldest backups more numerous than copies to delete.""" - # we need to check here since we await before - # this filter is applied - if config_data.retention.copies is None: - return {} - return dict( - sorted( - backups.items(), - key=lambda backup_item: backup_item[1].date, - )[: len(backups) - config_data.retention.copies] - ) - - await _delete_filtered_backups(manager, _backups_filter) - manager.remove_next_backup_event = async_track_point_in_time( manager.hass, _create_backup, next_time ) @@ -469,3 +450,24 @@ async def _delete_filtered_backups( "Error deleting old copies: %s", agent_errors, ) + + +async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None: + """Delete backups exceeding the configured retention count.""" + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return oldest backups more numerous than copies to delete.""" + # we need to check here since we await before + # this filter is applied + if manager.config.data.retention.copies is None: + return {} + return dict( + sorted( + backups.items(), + key=lambda backup_item: backup_item[1].date, + )[: len(backups) - manager.config.data.retention.copies] + ) + + await _delete_filtered_backups(manager, _backups_filter) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 66977e568e4..d6abc299317 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -33,7 +33,7 @@ from .agent import ( BackupAgentPlatformProtocol, LocalBackupAgent, ) -from .config import BackupConfig +from .config import BackupConfig, delete_backups_exceeding_configured_count from .const import ( BUF_SIZE, DATA_MANAGER, @@ -750,6 +750,10 @@ class BackupManager: self.known_backups.add( written_backup.backup, agent_errors, with_strategy_settings ) + + # delete old backups more numerous than copies + await delete_backups_exceeding_configured_count(self) + self.async_on_backup_event( CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) ) diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index 13f2537db47..ee855fb70f2 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -9,7 +9,7 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from homeassistant.components.backup.manager import WrittenBackup +from homeassistant.components.backup.manager import NewBackup, WrittenBackup from homeassistant.core import HomeAssistant from .common import TEST_BACKUP_PATH_ABC123 @@ -76,7 +76,7 @@ def mock_create_backup() -> Generator[AsyncMock]: with patch( "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" ) as mock_create_backup: - mock_create_backup.return_value = (MagicMock(), fut) + mock_create_backup.return_value = (NewBackup(backup_job_id="abc123"), fut) yield mock_create_backup diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 4a94689c19e..665512eca97 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1637,6 +1637,267 @@ async def test_config_retention_copies_logic( ) +@pytest.mark.parametrize( + ("backup_command", "backup_time"), + [ + ( + {"type": "backup/generate_with_strategy_settings"}, + "2024-11-11T12:00:00+01:00", + ), + ( + {"type": "backup/generate", "agent_ids": ["test.test-agent"]}, + None, + ), + ], +) +@pytest.mark.parametrize( + ( + "config_command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "backup_calls", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, # we get backups even if backup retention copies is None + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ], +) +async def test_config_retention_copies_logic_manual_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + delete_backup: AsyncMock, + get_backups: AsyncMock, + config_command: dict[str, Any], + backup_command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + backup_time: str, + backup_calls: int, + get_backups_calls: int, + delete_calls: int, + delete_args_list: Any, +) -> None: + """Test config backup retention copies logic for manual backup.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(config_command) + result = await client.receive_json() + assert result["success"] + + # Create a manual backup + await client.send_json_auto_id(backup_command) + result = await client.receive_json() + assert result["success"] + + # Wait for backup creation to complete + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == backup_time + ) + + @pytest.mark.parametrize( ( "command", From d22668a1662beef164fa769be836f106d37263a6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 20:02:12 +0100 Subject: [PATCH 0777/1198] Don't run recorder data migration on new databases (#133412) * Don't run recorder data migration on new databases * Add tests --- homeassistant/components/recorder/core.py | 1 + .../components/recorder/migration.py | 107 +++++++-- tests/components/recorder/test_init.py | 9 +- tests/components/recorder/test_migrate.py | 2 + ..._migration_run_time_migrations_remember.py | 205 ++++++++++++++++-- .../components/recorder/test_v32_migration.py | 53 ++++- 6 files changed, 330 insertions(+), 47 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 76cf0a7c05e..9d9b70586a6 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -970,6 +970,7 @@ class Recorder(threading.Thread): # which does not need migration or repair. new_schema_status = migration.SchemaValidationStatus( current_version=SCHEMA_VERSION, + initial_version=SCHEMA_VERSION, migration_needed=False, non_live_data_migration_needed=False, schema_errors=set(), diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index b28ca4399c8..74e3b08f51c 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -180,7 +180,27 @@ def raise_if_exception_missing_str(ex: Exception, match_substrs: Iterable[str]) raise ex -def _get_schema_version(session: Session) -> int | None: +def _get_initial_schema_version(session: Session) -> int | None: + """Get the schema version the database was created with.""" + res = ( + session.query(SchemaChanges.schema_version) + .order_by(SchemaChanges.change_id.asc()) + .first() + ) + return getattr(res, "schema_version", None) + + +def get_initial_schema_version(session_maker: Callable[[], Session]) -> int | None: + """Get the schema version the database was created with.""" + try: + with session_scope(session=session_maker(), read_only=True) as session: + return _get_initial_schema_version(session) + except Exception: + _LOGGER.exception("Error when determining DB schema version") + return None + + +def _get_current_schema_version(session: Session) -> int | None: """Get the schema version.""" res = ( session.query(SchemaChanges.schema_version) @@ -190,11 +210,11 @@ def _get_schema_version(session: Session) -> int | None: return getattr(res, "schema_version", None) -def get_schema_version(session_maker: Callable[[], Session]) -> int | None: +def get_current_schema_version(session_maker: Callable[[], Session]) -> int | None: """Get the schema version.""" try: with session_scope(session=session_maker(), read_only=True) as session: - return _get_schema_version(session) + return _get_current_schema_version(session) except Exception: _LOGGER.exception("Error when determining DB schema version") return None @@ -205,6 +225,7 @@ class SchemaValidationStatus: """Store schema validation status.""" current_version: int + initial_version: int migration_needed: bool non_live_data_migration_needed: bool schema_errors: set[str] @@ -227,8 +248,9 @@ def validate_db_schema( """ schema_errors: set[str] = set() - current_version = get_schema_version(session_maker) - if current_version is None: + current_version = get_current_schema_version(session_maker) + initial_version = get_initial_schema_version(session_maker) + if current_version is None or initial_version is None: return None if is_current := _schema_is_current(current_version): @@ -238,11 +260,15 @@ def validate_db_schema( schema_migration_needed = not is_current _non_live_data_migration_needed = non_live_data_migration_needed( - instance, session_maker, current_version + instance, + session_maker, + initial_schema_version=initial_version, + start_schema_version=current_version, ) return SchemaValidationStatus( current_version=current_version, + initial_version=initial_version, non_live_data_migration_needed=_non_live_data_migration_needed, migration_needed=schema_migration_needed or _non_live_data_migration_needed, schema_errors=schema_errors, @@ -377,17 +403,26 @@ def _get_migration_changes(session: Session) -> dict[str, int]: def non_live_data_migration_needed( instance: Recorder, session_maker: Callable[[], Session], - schema_version: int, + *, + initial_schema_version: int, + start_schema_version: int, ) -> bool: """Return True if non-live data migration is needed. + :param initial_schema_version: The schema version the database was created with. + :param start_schema_version: The schema version when starting the migration. + This must only be called if database schema is current. """ migration_needed = False with session_scope(session=session_maker()) as session: migration_changes = _get_migration_changes(session) for migrator_cls in NON_LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=initial_schema_version, + start_schema_version=start_schema_version, + migration_changes=migration_changes, + ) migration_needed |= migrator.needs_migrate(instance, session) return migration_needed @@ -406,7 +441,11 @@ def migrate_data_non_live( migration_changes = _get_migration_changes(session) for migrator_cls in NON_LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=schema_status.initial_version, + start_schema_version=schema_status.start_version, + migration_changes=migration_changes, + ) migrator.migrate_all(instance, session_maker) @@ -423,7 +462,11 @@ def migrate_data_live( migration_changes = _get_migration_changes(session) for migrator_cls in LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=schema_status.initial_version, + start_schema_version=schema_status.start_version, + migration_changes=migration_changes, + ) migrator.queue_migration(instance, session) @@ -2233,7 +2276,7 @@ def initialize_database(session_maker: Callable[[], Session]) -> bool: """Initialize a new database.""" try: with session_scope(session=session_maker(), read_only=True) as session: - if _get_schema_version(session) is not None: + if _get_current_schema_version(session) is not None: return True with session_scope(session=session_maker()) as session: @@ -2277,13 +2320,25 @@ class BaseMigration(ABC): """Base class for migrations.""" index_to_drop: tuple[str, str] | None = None - required_schema_version = 0 + required_schema_version = 0 # Schema version required to run migration queries + max_initial_schema_version: int # Skip migration if db created after this version migration_version = 1 migration_id: str - def __init__(self, schema_version: int, migration_changes: dict[str, int]) -> None: - """Initialize a new BaseRunTimeMigration.""" - self.schema_version = schema_version + def __init__( + self, + *, + initial_schema_version: int, + start_schema_version: int, + migration_changes: dict[str, int], + ) -> None: + """Initialize a new BaseRunTimeMigration. + + :param initial_schema_version: The schema version the database was created with. + :param start_schema_version: The schema version when starting the migration. + """ + self.initial_schema_version = initial_schema_version + self.start_schema_version = start_schema_version self.migration_changes = migration_changes @abstractmethod @@ -2324,7 +2379,15 @@ class BaseMigration(ABC): mark the migration as done in the database if its not already marked as done. """ - if self.schema_version < self.required_schema_version: + if self.initial_schema_version > self.max_initial_schema_version: + _LOGGER.debug( + "Data migration '%s' not needed, database created with version %s " + "after migrator was added", + self.migration_id, + self.initial_schema_version, + ) + return False + if self.start_schema_version < self.required_schema_version: # Schema is too old, we must have to migrate _LOGGER.info( "Data migration '%s' needed, schema too old", self.migration_id @@ -2426,6 +2489,7 @@ class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate states context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION + max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "state_context_id_as_binary" migration_version = 2 index_to_drop = ("states", "ix_states_context_id") @@ -2469,6 +2533,7 @@ class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate events context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION + max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "event_context_id_as_binary" migration_version = 2 index_to_drop = ("events", "ix_events_context_id") @@ -2512,6 +2577,7 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate event_type to event_type_ids.""" required_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION + max_initial_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION - 1 migration_id = "event_type_id_migration" def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2581,6 +2647,7 @@ class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate entity_ids to states_meta.""" required_schema_version = STATES_META_SCHEMA_VERSION + max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 migration_id = "entity_id_migration" def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2660,6 +2727,7 @@ class EventIDPostMigration(BaseRunTimeMigration): """Migration to remove old event_id index from states.""" migration_id = "event_id_post_migration" + max_initial_schema_version = LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION - 1 task = MigrationTask migration_version = 2 @@ -2728,7 +2796,7 @@ class EventIDPostMigration(BaseRunTimeMigration): self, instance: Recorder, session: Session ) -> DataMigrationStatus: """Return if the migration needs to run.""" - if self.schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: + if self.start_schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: return DataMigrationStatus(needs_migrate=False, migration_done=False) if get_index_by_name( session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX @@ -2745,6 +2813,7 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): """ migration_id = "entity_id_post_migration" + max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2758,8 +2827,8 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): NON_LIVE_DATA_MIGRATORS: tuple[type[BaseOffLineMigration], ...] = ( - StatesContextIDMigration, # Introduced in HA Core 2023.4 - EventsContextIDMigration, # Introduced in HA Core 2023.4 + StatesContextIDMigration, # Introduced in HA Core 2023.4 by PR #88942 + EventsContextIDMigration, # Introduced in HA Core 2023.4 by PR #88942 EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index d16712e0c70..7e5abf1b514 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -964,12 +964,17 @@ async def test_recorder_setup_failure(hass: HomeAssistant) -> None: hass.stop() -async def test_recorder_validate_schema_failure(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "function_to_patch", ["_get_current_schema_version", "_get_initial_schema_version"] +) +async def test_recorder_validate_schema_failure( + hass: HomeAssistant, function_to_patch: str +) -> None: """Test some exceptions.""" recorder_helper.async_initialize_recorder(hass) with ( patch( - "homeassistant.components.recorder.migration._get_schema_version" + f"homeassistant.components.recorder.migration.{function_to_patch}" ) as inspect_schema_version, patch("homeassistant.components.recorder.core.time.sleep"), ): diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 14978bee5a9..462db70496a 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -97,6 +97,7 @@ async def test_schema_update_calls( session_maker, migration.SchemaValidationStatus( current_version=0, + initial_version=0, migration_needed=True, non_live_data_migration_needed=True, schema_errors=set(), @@ -111,6 +112,7 @@ async def test_schema_update_calls( session_maker, migration.SchemaValidationStatus( current_version=42, + initial_version=0, migration_needed=True, non_live_data_migration_needed=True, schema_errors=set(), diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 7a333b0a2f5..fa14570bc6b 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,8 +1,9 @@ """Test run time migrations are remembered in the migration_changes table.""" +from collections.abc import Callable import importlib import sys -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from sqlalchemy import create_engine @@ -10,6 +11,7 @@ from sqlalchemy.orm import Session from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder.db_schema import SCHEMA_VERSION from homeassistant.components.recorder.migration import MigrationTask from homeassistant.components.recorder.queries import get_migration_changes from homeassistant.components.recorder.util import ( @@ -25,7 +27,8 @@ from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_CURRENT = "homeassistant.components.recorder.db_schema" @pytest.fixture @@ -46,26 +49,190 @@ def _get_migration_id(hass: HomeAssistant) -> dict[str, int]: return dict(execute_stmt_lambda_element(session, get_migration_changes())) -def _create_engine_test(*args, **kwargs): +def _create_engine_test( + schema_module: str, *, initial_version: int | None = None +) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - engine = create_engine(*args, **kwargs) - old_db_schema.Base.metadata.create_all(engine) - with Session(engine) as session: - session.add( - recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) - ) - session.add( - recorder.db_schema.SchemaChanges( - schema_version=old_db_schema.SCHEMA_VERSION + + def _create_engine_test(*args, **kwargs): + """Test version of create_engine that initializes with old schema. + + This simulates an existing db with the old schema. + """ + importlib.import_module(schema_module) + old_db_schema = sys.modules[schema_module] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) + if initial_version is not None: + session.add( + recorder.db_schema.SchemaChanges(schema_version=initial_version) + ) + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION + ) + ) + session.commit() + return engine + + return _create_engine_test + + +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +@pytest.mark.parametrize( + ("initial_version", "expected_migrator_calls"), + [ + ( + 27, + { + "state_context_id_as_binary": 1, + "event_context_id_as_binary": 1, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 1, + "entity_id_post_migration": 1, + }, + ), + ( + 28, + { + "state_context_id_as_binary": 1, + "event_context_id_as_binary": 1, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 36, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 37, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 38, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 0, + "event_id_post_migration": 0, + "entity_id_post_migration": 0, + }, + ), + ( + SCHEMA_VERSION, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 0, + "event_id_post_migration": 0, + "entity_id_post_migration": 0, + }, + ), + ], +) +async def test_data_migrator_new_database( + async_test_recorder: RecorderInstanceGenerator, + initial_version: int, + expected_migrator_calls: dict[str, int], +) -> None: + """Test that the data migrators are not executed on a new database.""" + config = {recorder.CONF_COMMIT_INTERVAL: 1} + + def needs_migrate_mock() -> Mock: + return Mock( + spec_set=[], + return_value=migration.DataMigrationStatus( + needs_migrate=False, migration_done=True + ), ) - session.commit() - return engine + + migrator_mocks = { + "state_context_id_as_binary": needs_migrate_mock(), + "event_context_id_as_binary": needs_migrate_mock(), + "event_type_id_migration": needs_migrate_mock(), + "entity_id_migration": needs_migrate_mock(), + "event_id_post_migration": needs_migrate_mock(), + "entity_id_post_migration": needs_migrate_mock(), + } + + with ( + patch.object( + migration.StatesContextIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["state_context_id_as_binary"], + ), + patch.object( + migration.EventsContextIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_context_id_as_binary"], + ), + patch.object( + migration.EventTypeIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_type_id_migration"], + ), + patch.object( + migration.EntityIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["entity_id_migration"], + ), + patch.object( + migration.EventIDPostMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_id_post_migration"], + ), + patch.object( + migration.EntityIDPostMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["entity_id_post_migration"], + ), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_CURRENT, initial_version=initial_version + ), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + await hass.async_stop() + + for migrator, mock in migrator_mocks.items(): + assert len(mock.mock_calls) == expected_migrator_calls[migrator] @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @@ -84,8 +251,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( """ config = {recorder.CONF_COMMIT_INTERVAL: 1} - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] # Start with db schema that needs migration (version 32) with ( @@ -98,7 +265,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index d59486b61f0..21f7037c370 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -30,7 +30,9 @@ SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" -def _create_engine_test(schema_module: str) -> Callable: +def _create_engine_test( + schema_module: str, *, initial_version: int | None = None +) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. @@ -49,6 +51,10 @@ def _create_engine_test(schema_module: str) -> Callable: session.add( recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) + if initial_version is not None: + session.add( + recorder.db_schema.SchemaChanges(schema_version=initial_version) + ) session.add( recorder.db_schema.SchemaChanges( schema_version=old_db_schema.SCHEMA_VERSION @@ -70,7 +76,10 @@ async def test_migrate_times( async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, ) -> None: - """Test we can migrate times in the events and states tables.""" + """Test we can migrate times in the events and states tables. + + Also tests entity id post migration. + """ importlib.import_module(SCHEMA_MODULE_30) old_db_schema = sys.modules[SCHEMA_MODULE_30] now = dt_util.utcnow() @@ -122,7 +131,13 @@ async def test_migrate_times( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_30, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -274,7 +289,13 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -394,7 +415,13 @@ async def test_migrate_can_resume_ix_states_event_id_removed( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -527,7 +554,13 @@ async def test_out_of_disk_space_while_rebuild_states_table( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -705,7 +738,13 @@ async def test_out_of_disk_space_while_removing_foreign_key( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, From e9e8228f07bac1a12cb6e256a5f9c9d13a5357fa Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Tue, 17 Dec 2024 20:18:16 +0100 Subject: [PATCH 0778/1198] Improve empty state handling for SomfyThermostat in Overkiz (#131700) --- .../overkiz/climate/somfy_thermostat.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/overkiz/climate/somfy_thermostat.py b/homeassistant/components/overkiz/climate/somfy_thermostat.py index 66a04af4e7a..d2aa1658302 100644 --- a/homeassistant/components/overkiz/climate/somfy_thermostat.py +++ b/homeassistant/components/overkiz/climate/somfy_thermostat.py @@ -57,10 +57,7 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_supported_features = ( - ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE ) _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] _attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] @@ -82,11 +79,12 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return hvac operation ie. heat, cool mode.""" - return OVERKIZ_TO_HVAC_MODES[ - cast( - str, self.executor.select_state(OverkizState.CORE_DEROGATION_ACTIVATION) - ) - ] + if derogation_activation := self.executor.select_state( + OverkizState.CORE_DEROGATION_ACTIVATION + ): + return OVERKIZ_TO_HVAC_MODES[cast(str, derogation_activation)] + + return HVACMode.AUTO @property def preset_mode(self) -> str: @@ -96,9 +94,10 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): else: state_key = OverkizState.SOMFY_THERMOSTAT_DEROGATION_HEATING_MODE - state = cast(str, self.executor.select_state(state_key)) + if state := self.executor.select_state(state_key): + return OVERKIZ_TO_PRESET_MODES[OverkizCommandParam(cast(str, state))] - return OVERKIZ_TO_PRESET_MODES[OverkizCommandParam(state)] + return PRESET_NONE @property def current_temperature(self) -> float | None: From d785c4b0b1eb6a8a8c57cb80c06d4a367e4bcc7c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 20:20:26 +0100 Subject: [PATCH 0779/1198] Add optional category in OptionsFlow to holiday (#129514) --- homeassistant/components/holiday/__init__.py | 11 +- homeassistant/components/holiday/calendar.py | 24 ++- .../components/holiday/config_flow.py | 178 +++++++++++++----- homeassistant/components/holiday/const.py | 1 + homeassistant/components/holiday/strings.json | 54 +++++- tests/components/holiday/test_config_flow.py | 151 +++++++++++++-- 6 files changed, 350 insertions(+), 69 deletions(-) diff --git a/homeassistant/components/holiday/__init__.py b/homeassistant/components/holiday/__init__.py index c9a58f29215..b364f2c67a4 100644 --- a/homeassistant/components/holiday/__init__.py +++ b/homeassistant/components/holiday/__init__.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_COUNTRY, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import SetupPhases, async_pause_setup -from .const import CONF_PROVINCE +from .const import CONF_CATEGORIES, CONF_PROVINCE PLATFORMS: list[Platform] = [Platform.CALENDAR] @@ -20,6 +20,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Holiday from a config entry.""" country: str = entry.data[CONF_COUNTRY] province: str | None = entry.data.get(CONF_PROVINCE) + categories: list[str] | None = entry.options.get(CONF_CATEGORIES) # We only import here to ensure that that its not imported later # in the event loop since the platforms will call country_holidays @@ -29,14 +30,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # the holidays library and it is not thread safe to import it in parallel # https://github.com/python/cpython/issues/83065 await hass.async_add_import_executor_job( - partial(country_holidays, country, subdiv=province) + partial(country_holidays, country, subdiv=province, categories=categories) ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/holiday/calendar.py b/homeassistant/components/holiday/calendar.py index 6a336870857..6dccd972164 100644 --- a/homeassistant/components/holiday/calendar.py +++ b/homeassistant/components/holiday/calendar.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from holidays import HolidayBase, country_holidays +from holidays import PUBLIC, HolidayBase, country_holidays from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.config_entries import ConfigEntry @@ -15,18 +15,27 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util -from .const import CONF_PROVINCE, DOMAIN +from .const import CONF_CATEGORIES, CONF_PROVINCE, DOMAIN def _get_obj_holidays_and_language( - country: str, province: str | None, language: str + country: str, + province: str | None, + language: str, + selected_categories: list[str] | None, ) -> tuple[HolidayBase, str]: """Get the object for the requested country and year.""" + if selected_categories is None: + categories = [PUBLIC] + else: + categories = [PUBLIC, *selected_categories] + obj_holidays = country_holidays( country, subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=language, + categories=categories, ) if language == "en": for lang in obj_holidays.supported_languages: @@ -36,6 +45,7 @@ def _get_obj_holidays_and_language( subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=lang, + categories=categories, ) language = lang break @@ -49,6 +59,7 @@ def _get_obj_holidays_and_language( subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=default_language, + categories=categories, ) language = default_language @@ -63,10 +74,11 @@ async def async_setup_entry( """Set up the Holiday Calendar config entry.""" country: str = config_entry.data[CONF_COUNTRY] province: str | None = config_entry.data.get(CONF_PROVINCE) + categories: list[str] | None = config_entry.options.get(CONF_CATEGORIES) language = hass.config.language obj_holidays, language = await hass.async_add_executor_job( - _get_obj_holidays_and_language, country, province, language + _get_obj_holidays_and_language, country, province, language, categories ) async_add_entities( @@ -76,6 +88,7 @@ async def async_setup_entry( country, province, language, + categories, obj_holidays, config_entry.entry_id, ) @@ -99,6 +112,7 @@ class HolidayCalendarEntity(CalendarEntity): country: str, province: str | None, language: str, + categories: list[str] | None, obj_holidays: HolidayBase, unique_id: str, ) -> None: @@ -107,6 +121,7 @@ class HolidayCalendarEntity(CalendarEntity): self._province = province self._location = name self._language = language + self._categories = categories self._attr_unique_id = unique_id self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, unique_id)}, @@ -172,6 +187,7 @@ class HolidayCalendarEntity(CalendarEntity): subdiv=self._province, years=list({start_date.year, end_date.year}), language=self._language, + categories=self._categories, ) event_list: list[CalendarEvent] = [] diff --git a/homeassistant/components/holiday/config_flow.py b/homeassistant/components/holiday/config_flow.py index 27b13e34851..00a71351ca7 100644 --- a/homeassistant/components/holiday/config_flow.py +++ b/homeassistant/components/holiday/config_flow.py @@ -5,11 +5,17 @@ from __future__ import annotations from typing import Any from babel import Locale, UnknownLocaleError -from holidays import list_supported_countries +from holidays import PUBLIC, country_holidays, list_supported_countries import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_COUNTRY +from homeassistant.core import callback from homeassistant.helpers.selector import ( CountrySelector, CountrySelectorConfig, @@ -17,12 +23,47 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, ) +from homeassistant.util import dt as dt_util -from .const import CONF_PROVINCE, DOMAIN +from .const import CONF_CATEGORIES, CONF_PROVINCE, DOMAIN SUPPORTED_COUNTRIES = list_supported_countries(include_aliases=False) +def get_optional_categories(country: str) -> list[str]: + """Return the country categories. + + public holidays are always included so they + don't need to be presented to the user. + """ + country_data = country_holidays(country, years=dt_util.utcnow().year) + return [ + category for category in country_data.supported_categories if category != PUBLIC + ] + + +def get_options_schema(country: str) -> vol.Schema: + """Return the options schema.""" + schema = {} + if provinces := SUPPORTED_COUNTRIES[country]: + schema[vol.Optional(CONF_PROVINCE)] = SelectSelector( + SelectSelectorConfig( + options=provinces, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + if categories := get_optional_categories(country): + schema[vol.Optional(CONF_CATEGORIES)] = SelectSelector( + SelectSelectorConfig( + options=categories, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + translation_key="categories", + ) + ) + return vol.Schema(schema) + + class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Holiday.""" @@ -32,6 +73,12 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self.data: dict[str, Any] = {} + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> HolidayOptionsFlowHandler: + """Get the options flow for this handler.""" + return HolidayOptionsFlowHandler() + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -41,8 +88,11 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): selected_country = user_input[CONF_COUNTRY] - if SUPPORTED_COUNTRIES[selected_country]: - return await self.async_step_province() + options_schema = await self.hass.async_add_executor_job( + get_options_schema, selected_country + ) + if options_schema.schema: + return await self.async_step_options() self._async_abort_entries_match({CONF_COUNTRY: user_input[CONF_COUNTRY]}) @@ -67,24 +117,22 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): } ) - return self.async_show_form(step_id="user", data_schema=user_schema) + return self.async_show_form(data_schema=user_schema) - async def async_step_province( + async def async_step_options( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the province step.""" + """Handle the options step.""" if user_input is not None: - combined_input: dict[str, Any] = {**self.data, **user_input} + country = self.data[CONF_COUNTRY] + data = {CONF_COUNTRY: country} + options: dict[str, Any] | None = None + if province := user_input.get(CONF_PROVINCE): + data[CONF_PROVINCE] = province + if categories := user_input.get(CONF_CATEGORIES): + options = {CONF_CATEGORIES: categories} - country = combined_input[CONF_COUNTRY] - province = combined_input.get(CONF_PROVINCE) - - self._async_abort_entries_match( - { - CONF_COUNTRY: country, - CONF_PROVINCE: province, - } - ) + self._async_abort_entries_match({**data, **(options or {})}) try: locale = Locale.parse(self.hass.config.language, sep="-") @@ -95,38 +143,33 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): province_str = f", {province}" if province else "" name = f"{locale.territories[country]}{province_str}" - return self.async_create_entry(title=name, data=combined_input) + return self.async_create_entry(title=name, data=data, options=options) - province_schema = vol.Schema( - { - vol.Optional(CONF_PROVINCE): SelectSelector( - SelectSelectorConfig( - options=SUPPORTED_COUNTRIES[self.data[CONF_COUNTRY]], - mode=SelectSelectorMode.DROPDOWN, - ) - ), - } + options_schema = await self.hass.async_add_executor_job( + get_options_schema, self.data[CONF_COUNTRY] + ) + return self.async_show_form( + step_id="options", + data_schema=options_schema, + description_placeholders={CONF_COUNTRY: self.data[CONF_COUNTRY]}, ) - - return self.async_show_form(step_id="province", data_schema=province_schema) async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the re-configuration of a province.""" + """Handle the re-configuration of the options.""" reconfigure_entry = self._get_reconfigure_entry() + if user_input is not None: - combined_input: dict[str, Any] = {**reconfigure_entry.data, **user_input} + country = reconfigure_entry.data[CONF_COUNTRY] + data = {CONF_COUNTRY: country} + options: dict[str, Any] | None = None + if province := user_input.get(CONF_PROVINCE): + data[CONF_PROVINCE] = province + if categories := user_input.get(CONF_CATEGORIES): + options = {CONF_CATEGORIES: categories} - country = combined_input[CONF_COUNTRY] - province = combined_input.get(CONF_PROVINCE) - - self._async_abort_entries_match( - { - CONF_COUNTRY: country, - CONF_PROVINCE: province, - } - ) + self._async_abort_entries_match({**data, **(options or {})}) try: locale = Locale.parse(self.hass.config.language, sep="-") @@ -137,21 +180,60 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): province_str = f", {province}" if province else "" name = f"{locale.territories[country]}{province_str}" + if options: + return self.async_update_reload_and_abort( + reconfigure_entry, title=name, data=data, options=options + ) return self.async_update_reload_and_abort( - reconfigure_entry, title=name, data=combined_input + reconfigure_entry, title=name, data=data ) - province_schema = vol.Schema( + options_schema = await self.hass.async_add_executor_job( + get_options_schema, reconfigure_entry.data[CONF_COUNTRY] + ) + + return self.async_show_form( + data_schema=options_schema, + description_placeholders={ + CONF_COUNTRY: reconfigure_entry.data[CONF_COUNTRY] + }, + ) + + +class HolidayOptionsFlowHandler(OptionsFlow): + """Handle Holiday options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage Holiday options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + categories = await self.hass.async_add_executor_job( + get_optional_categories, self.config_entry.data[CONF_COUNTRY] + ) + if not categories: + return self.async_abort(reason="no_categories") + + schema = vol.Schema( { - vol.Optional(CONF_PROVINCE): SelectSelector( + vol.Optional(CONF_CATEGORIES): SelectSelector( SelectSelectorConfig( - options=SUPPORTED_COUNTRIES[ - reconfigure_entry.data[CONF_COUNTRY] - ], + options=categories, + multiple=True, mode=SelectSelectorMode.DROPDOWN, + translation_key="categories", ) ) } ) - return self.async_show_form(step_id="reconfigure", data_schema=province_schema) + return self.async_show_form( + data_schema=self.add_suggested_values_to_schema( + schema, self.config_entry.options + ), + description_placeholders={ + CONF_COUNTRY: self.config_entry.data[CONF_COUNTRY] + }, + ) diff --git a/homeassistant/components/holiday/const.py b/homeassistant/components/holiday/const.py index ed283f82412..6a28ae1ffec 100644 --- a/homeassistant/components/holiday/const.py +++ b/homeassistant/components/holiday/const.py @@ -5,3 +5,4 @@ from typing import Final DOMAIN: Final = "holiday" CONF_PROVINCE: Final = "province" +CONF_CATEGORIES: Final = "categories" diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index ae4930ecdb4..d464f9e8bfd 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -2,7 +2,7 @@ "title": "Holiday", "config": { "abort": { - "already_configured": "Already configured. Only a single configuration for country/province combination possible.", + "already_configured": "Already configured. Only a single configuration for country/province/categories combination is possible.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "step": { @@ -11,16 +11,62 @@ "country": "Country" } }, - "province": { + "options": { "data": { - "province": "Province" + "province": "Province", + "categories": "Categories" + }, + "data_description": { + "province": "Optionally choose a province / subdivision of {country}", + "categories": "Optionally choose additional holiday categories, public holidays are already included" } }, "reconfigure": { "data": { - "province": "[%key:component::holiday::config::step::province::data::province%]" + "province": "[%key:component::holiday::config::step::options::data::province%]", + "categories": "[%key:component::holiday::config::step::options::data::categories%]" + }, + "data_description": { + "province": "[%key:component::holiday::config::step::options::data_description::province%]", + "categories": "[%key:component::holiday::config::step::options::data_description::categories%]" } } } + }, + "options": { + "abort": { + "already_configured": "[%key:component::holiday::config::abort::already_configured%]", + "no_categories": "The country has no additional categories to configure." + }, + "step": { + "init": { + "data": { + "categories": "[%key:component::holiday::config::step::options::data::categories%]" + }, + "data_description": { + "categories": "[%key:component::holiday::config::step::options::data_description::categories%]" + } + } + } + }, + "selector": { + "device_class": { + "options": { + "armed_forces": "Armed forces", + "bank": "Bank", + "catholic": "Catholic", + "chinese": "Chinese", + "christian": "Christian", + "government": "Government", + "half_day": "Half day", + "hebrew": "Hebrew", + "hindu": "Hindu", + "islamic": "Islamic", + "optional": "Optional", + "school": "School", + "unofficial": "Unofficial", + "workday": "Workday" + } + } } } diff --git a/tests/components/holiday/test_config_flow.py b/tests/components/holiday/test_config_flow.py index 466dbaffd8b..f561c4a4b9f 100644 --- a/tests/components/holiday/test_config_flow.py +++ b/tests/components/holiday/test_config_flow.py @@ -1,19 +1,25 @@ """Test the Holiday config flow.""" +from datetime import datetime from unittest.mock import AsyncMock +from freezegun.api import FrozenDateTimeFactory +from holidays import UNOFFICIAL import pytest from homeassistant import config_entries -from homeassistant.components.holiday.const import CONF_PROVINCE, DOMAIN -from homeassistant.const import CONF_COUNTRY +from homeassistant.components.holiday.const import ( + CONF_CATEGORIES, + CONF_PROVINCE, + DOMAIN, +) +from homeassistant.const import CONF_COUNTRY, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry -pytestmark = pytest.mark.usefixtures("mock_setup_entry") - async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" @@ -49,6 +55,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_no_subdivision(hass: HomeAssistant) -> None: """Test we get the forms correctly without subdivision.""" result = await hass.config_entries.flow.async_init( @@ -71,6 +78,7 @@ async def test_form_no_subdivision(hass: HomeAssistant) -> None: } +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_translated_title(hass: HomeAssistant) -> None: """Test the title gets translated.""" hass.config.language = "de" @@ -90,6 +98,7 @@ async def test_form_translated_title(hass: HomeAssistant) -> None: assert result2["title"] == "Schweden" +@pytest.mark.usefixtures("mock_setup_entry") async def test_single_combination_country_province(hass: HomeAssistant) -> None: """Test that configuring more than one instance is rejected.""" data_de = { @@ -129,6 +138,7 @@ async def test_single_combination_country_province(hass: HomeAssistant) -> None: assert result_de_step2["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_babel_unresolved_language(hass: HomeAssistant) -> None: """Test the config flow if using not babel supported language.""" hass.config.language = "en-XX" @@ -175,6 +185,7 @@ async def test_form_babel_unresolved_language(hass: HomeAssistant) -> None: } +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_babel_replace_dash_with_underscore(hass: HomeAssistant) -> None: """Test the config flow if using language with dash.""" hass.config.language = "en-GB" @@ -221,7 +232,8 @@ async def test_form_babel_replace_dash_with_underscore(hass: HomeAssistant) -> N } -async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfigure flow.""" entry = MockConfigEntry( domain=DOMAIN, @@ -248,9 +260,38 @@ async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> assert entry.data == {"country": "DE", "province": "NW"} -async def test_reconfigure_incorrect_language( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_with_categories(hass: HomeAssistant) -> None: + """Test reconfigure flow with categories.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Unites States, TX", + data={"country": "US", "province": "TX"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PROVINCE: "AL", + CONF_CATEGORIES: [UNOFFICIAL], + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.title == "United States, AL" + assert entry.data == {CONF_COUNTRY: "US", CONF_PROVINCE: "AL"} + assert entry.options == {CONF_CATEGORIES: ["unofficial"]} + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_incorrect_language(hass: HomeAssistant) -> None: """Test reconfigure flow default to English.""" hass.config.language = "en-XX" @@ -279,9 +320,8 @@ async def test_reconfigure_incorrect_language( assert entry.data == {"country": "DE", "province": "NW"} -async def test_reconfigure_entry_exists( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_entry_exists(hass: HomeAssistant) -> None: """Test reconfigure flow stops if other entry already exist.""" entry = MockConfigEntry( domain=DOMAIN, @@ -312,3 +352,92 @@ async def test_reconfigure_entry_exists( entry = hass.config_entries.async_get_entry(entry.entry_id) assert entry.title == "Germany, BW" assert entry.data == {"country": "DE", "province": "BW"} + + +async def test_form_with_options( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the flow with configuring options.""" + await hass.config.async_set_time_zone("America/Chicago") + zone = await dt_util.async_get_time_zone("America/Chicago") + # Oct 31st is a Friday. Unofficial holiday as Halloween + freezer.move_to(datetime(2024, 10, 31, 12, 0, 0, tzinfo=zone)) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_COUNTRY: "US", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PROVINCE: "TX", + CONF_CATEGORIES: [UNOFFICIAL], + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "United States, TX" + assert result["data"] == { + CONF_COUNTRY: "US", + CONF_PROVINCE: "TX", + } + assert result["options"] == { + CONF_CATEGORIES: ["unofficial"], + } + + state = hass.states.get("calendar.united_states_tx") + assert state + assert state.state == STATE_ON + + entries = hass.config_entries.async_entries(DOMAIN) + entry = entries[0] + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + {CONF_CATEGORIES: []}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_CATEGORIES: [], + } + + state = hass.states.get("calendar.united_states_tx") + assert state + assert state.state == STATE_OFF + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_abort_no_categories(hass: HomeAssistant) -> None: + """Test the options flow abort if no categories to select.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_COUNTRY: "SE"}, + title="Sweden", + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_categories" From a7ba63bf86d75a1f3b7c5907cfa08f087c851edf Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Tue, 17 Dec 2024 20:22:07 +0100 Subject: [PATCH 0780/1198] Add missing CozyTouch servers to ConfigFlow expection handler in Overkiz (#131696) --- homeassistant/components/overkiz/config_flow.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index 471a13d0de2..af7e277d928 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -151,9 +151,11 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): except BadCredentialsException as exception: # If authentication with CozyTouch auth server is valid, but token is invalid # for Overkiz API server, the hardware is not supported. - if user_input[CONF_HUB] == Server.ATLANTIC_COZYTOUCH and not isinstance( - exception, CozyTouchBadCredentialsException - ): + if user_input[CONF_HUB] in { + Server.ATLANTIC_COZYTOUCH, + Server.SAUTER_COZYTOUCH, + Server.THERMOR_COZYTOUCH, + } and not isinstance(exception, CozyTouchBadCredentialsException): description_placeholders["unsupported_device"] = "CozyTouch" errors["base"] = "unsupported_hardware" else: From 8bbbbb00d5df3acb0650df86b9b4f7f974d6d1ec Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:43:09 +0100 Subject: [PATCH 0781/1198] Limit unique_id migration to platform for BMW (#131582) --- .../bmw_connected_drive/__init__.py | 38 +++++++++++-------- .../bmw_connected_drive/test_init.py | 27 ++++++++++++- 2 files changed, 48 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 7b6fb4119db..05fa3e3cab0 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -73,23 +73,29 @@ async def _async_migrate_entries( @callback def update_unique_id(entry: er.RegistryEntry) -> dict[str, str] | None: replacements = { - "charging_level_hv": "fuel_and_battery.remaining_battery_percent", - "fuel_percent": "fuel_and_battery.remaining_fuel_percent", - "ac_current_limit": "charging_profile.ac_current_limit", - "charging_start_time": "fuel_and_battery.charging_start_time", - "charging_end_time": "fuel_and_battery.charging_end_time", - "charging_status": "fuel_and_battery.charging_status", - "charging_target": "fuel_and_battery.charging_target", - "remaining_battery_percent": "fuel_and_battery.remaining_battery_percent", - "remaining_range_total": "fuel_and_battery.remaining_range_total", - "remaining_range_electric": "fuel_and_battery.remaining_range_electric", - "remaining_range_fuel": "fuel_and_battery.remaining_range_fuel", - "remaining_fuel": "fuel_and_battery.remaining_fuel", - "remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent", - "activity": "climate.activity", + Platform.SENSOR.value: { + "charging_level_hv": "fuel_and_battery.remaining_battery_percent", + "fuel_percent": "fuel_and_battery.remaining_fuel_percent", + "ac_current_limit": "charging_profile.ac_current_limit", + "charging_start_time": "fuel_and_battery.charging_start_time", + "charging_end_time": "fuel_and_battery.charging_end_time", + "charging_status": "fuel_and_battery.charging_status", + "charging_target": "fuel_and_battery.charging_target", + "remaining_battery_percent": "fuel_and_battery.remaining_battery_percent", + "remaining_range_total": "fuel_and_battery.remaining_range_total", + "remaining_range_electric": "fuel_and_battery.remaining_range_electric", + "remaining_range_fuel": "fuel_and_battery.remaining_range_fuel", + "remaining_fuel": "fuel_and_battery.remaining_fuel", + "remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent", + "activity": "climate.activity", + } } - if (key := entry.unique_id.split("-")[-1]) in replacements: - new_unique_id = entry.unique_id.replace(key, replacements[key]) + if (key := entry.unique_id.split("-")[-1]) in replacements.get( + entry.domain, [] + ): + new_unique_id = entry.unique_id.replace( + key, replacements[entry.domain][key] + ) _LOGGER.debug( "Migrating entity '%s' unique_id from '%s' to '%s'", entry.entity_id, diff --git a/tests/components/bmw_connected_drive/test_init.py b/tests/components/bmw_connected_drive/test_init.py index e523b2b3d02..8507cacc376 100644 --- a/tests/components/bmw_connected_drive/test_init.py +++ b/tests/components/bmw_connected_drive/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.bmw_connected_drive.const import ( CONF_READ_ONLY, DOMAIN as BMW_DOMAIN, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -18,6 +18,9 @@ from . import FIXTURE_CONFIG_ENTRY from tests.common import MockConfigEntry +BINARY_SENSOR_DOMAIN = Platform.BINARY_SENSOR.value +SENSOR_DOMAIN = Platform.SENSOR.value + VIN = "WBYYYYYYYYYYYYYYY" VEHICLE_NAME = "i3 (+ REX)" VEHICLE_NAME_SLUG = "i3_rex" @@ -109,6 +112,28 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: f"{VIN}-mileage", f"{VIN}-mileage", ), + ( + { + "domain": SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-charging_status", + "suggested_object_id": f"{VEHICLE_NAME} Charging Status", + "disabled_by": None, + }, + f"{VIN}-charging_status", + f"{VIN}-fuel_and_battery.charging_status", + ), + ( + { + "domain": BINARY_SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-charging_status", + "suggested_object_id": f"{VEHICLE_NAME} Charging Status", + "disabled_by": None, + }, + f"{VIN}-charging_status", + f"{VIN}-charging_status", + ), ], ) async def test_migrate_unique_ids( From 5e5bebd7eb5d9b183697d3402ca0cffb19f7dbaf Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 20:43:53 +0100 Subject: [PATCH 0782/1198] Remove unused constants from SABnzbd (#133445) --- homeassistant/components/sabnzbd/const.py | 4 ---- homeassistant/components/sabnzbd/quality_scale.yaml | 5 +---- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/homeassistant/components/sabnzbd/const.py b/homeassistant/components/sabnzbd/const.py index 991490f5716..f05b3f19e98 100644 --- a/homeassistant/components/sabnzbd/const.py +++ b/homeassistant/components/sabnzbd/const.py @@ -1,15 +1,11 @@ """Constants for the Sabnzbd component.""" DOMAIN = "sabnzbd" -DATA_SABNZBD = "sabnzbd" ATTR_SPEED = "speed" ATTR_API_KEY = "api_key" -DEFAULT_HOST = "localhost" -DEFAULT_PORT = 8080 DEFAULT_SPEED_LIMIT = "100" -DEFAULT_SSL = False SERVICE_PAUSE = "pause" SERVICE_RESUME = "resume" diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index c3fea2427ce..f5bae1c692b 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -6,10 +6,7 @@ rules: Do not remove services when all config entries are removed. appropriate-polling: done brands: done - common-modules: - status: todo - comment: | - const.py has unused variables. + common-modules: done config-flow-test-coverage: done config-flow: done dependency-transparency: done From c9ca1f63eacacc265931411eac0c607b6707455d Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Tue, 17 Dec 2024 20:44:24 +0100 Subject: [PATCH 0783/1198] Allow only single instance of energyzero integration (#133443) --- .../components/energyzero/manifest.json | 4 +++- homeassistant/generated/integrations.json | 5 +++-- tests/components/energyzero/test_config_flow.py | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/energyzero/manifest.json b/homeassistant/components/energyzero/manifest.json index bb867e88d85..b647faebe1d 100644 --- a/homeassistant/components/energyzero/manifest.json +++ b/homeassistant/components/energyzero/manifest.json @@ -4,6 +4,8 @@ "codeowners": ["@klaasnicolaas"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/energyzero", + "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["energyzero==2.1.1"] + "requirements": ["energyzero==2.1.1"], + "single_config_entry": true } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index a94962b458b..7cb7a5a1aef 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1653,9 +1653,10 @@ }, "energyzero": { "name": "EnergyZero", - "integration_type": "hub", + "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "enigma2": { "name": "Enigma2 (OpenWebif)", diff --git a/tests/components/energyzero/test_config_flow.py b/tests/components/energyzero/test_config_flow.py index a9fe8534fd5..4c4e831e448 100644 --- a/tests/components/energyzero/test_config_flow.py +++ b/tests/components/energyzero/test_config_flow.py @@ -9,6 +9,8 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_full_user_flow( hass: HomeAssistant, @@ -33,3 +35,18 @@ async def test_full_user_flow( assert result2 == snapshot assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_single_instance( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up a duplicate entry.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "single_instance_allowed" From 9c26654db7a0d9de95c33f2da532b9535558b8bc Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Tue, 17 Dec 2024 20:44:38 +0100 Subject: [PATCH 0784/1198] Use entity services in bluesound integration (#129266) --- .../components/bluesound/__init__.py | 2 - .../components/bluesound/manifest.json | 2 +- .../components/bluesound/media_player.py | 191 +++++++++++------- .../components/bluesound/services.py | 68 ------- homeassistant/components/bluesound/utils.py | 13 ++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/bluesound/conftest.py | 8 +- .../components/bluesound/test_media_player.py | 18 +- 9 files changed, 142 insertions(+), 164 deletions(-) delete mode 100644 homeassistant/components/bluesound/services.py diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index 82fe9b00d57..b3facc0b8ac 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -14,7 +14,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .services import setup_services CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -36,7 +35,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Bluesound.""" if DOMAIN not in hass.data: hass.data[DOMAIN] = [] - setup_services(hass) return True diff --git a/homeassistant/components/bluesound/manifest.json b/homeassistant/components/bluesound/manifest.json index 462112a8b78..151c1512b74 100644 --- a/homeassistant/components/bluesound/manifest.json +++ b/homeassistant/components/bluesound/manifest.json @@ -6,7 +6,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/bluesound", "iot_class": "local_polling", - "requirements": ["pyblu==1.0.4"], + "requirements": ["pyblu==2.0.0"], "zeroconf": [ { "type": "_musc._tcp.local." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 38ef78fad3a..4882d543617 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -28,18 +28,26 @@ from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + issue_registry as ir, +) from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE -from .utils import format_unique_id +from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id if TYPE_CHECKING: from . import BluesoundConfigEntry @@ -51,6 +59,11 @@ SCAN_INTERVAL = timedelta(minutes=15) DATA_BLUESOUND = DOMAIN DEFAULT_PORT = 11000 +SERVICE_CLEAR_TIMER = "clear_sleep_timer" +SERVICE_JOIN = "join" +SERVICE_SET_TIMER = "set_sleep_timer" +SERVICE_UNJOIN = "unjoin" + NODE_OFFLINE_CHECK_TIMEOUT = 180 NODE_RETRY_INITIATION = timedelta(minutes=3) @@ -130,6 +143,18 @@ async def async_setup_entry( config_entry.runtime_data.sync_status, ) + platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_SET_TIMER, None, "async_increase_timer" + ) + platform.async_register_entity_service( + SERVICE_CLEAR_TIMER, None, "async_clear_timer" + ) + platform.async_register_entity_service( + SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join" + ) + platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin") + hass.data[DATA_BLUESOUND].append(bluesound_player) async_add_entities([bluesound_player], update_before_add=True) @@ -175,13 +200,12 @@ class BluesoundPlayer(MediaPlayerEntity): self._status: Status | None = None self._inputs: list[Input] = [] self._presets: list[Preset] = [] - self._muted = False - self._master: BluesoundPlayer | None = None - self._is_master = False self._group_name: str | None = None self._group_list: list[str] = [] self._bluesound_device_name = sync_status.name self._player = player + self._is_leader = False + self._leader: BluesoundPlayer | None = None self._attr_unique_id = format_unique_id(sync_status.mac, port) # there should always be one player with the default port per mac @@ -250,6 +274,22 @@ class BluesoundPlayer(MediaPlayerEntity): name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}", ) + assert self._sync_status.id is not None + self.async_on_remove( + async_dispatcher_connect( + self.hass, + dispatcher_join_signal(self.entity_id), + self.async_add_follower, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + dispatcher_unjoin_signal(self._sync_status.id), + self.async_remove_follower, + ) + ) + async def async_will_remove_from_hass(self) -> None: """Stop the polling task.""" await super().async_will_remove_from_hass() @@ -317,25 +357,25 @@ class BluesoundPlayer(MediaPlayerEntity): self._group_list = self.rebuild_bluesound_group() - if sync_status.master is not None: - self._is_master = False - master_id = f"{sync_status.master.ip}:{sync_status.master.port}" - master_device = [ + if sync_status.leader is not None: + self._is_leader = False + leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}" + leader_device = [ device for device in self.hass.data[DATA_BLUESOUND] - if device.id == master_id + if device.id == leader_id ] - if master_device and master_id != self.id: - self._master = master_device[0] + if leader_device and leader_id != self.id: + self._leader = leader_device[0] else: - self._master = None - _LOGGER.error("Master not found %s", master_id) + self._leader = None + _LOGGER.error("Leader not found %s", leader_id) else: - if self._master is not None: - self._master = None - slaves = self._sync_status.slaves - self._is_master = slaves is not None + if self._leader is not None: + self._leader = None + followers = self._sync_status.followers + self._is_leader = followers is not None self.async_write_ha_state() @@ -355,7 +395,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return MediaPlayerState.OFF - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return MediaPlayerState.IDLE match self._status.state: @@ -369,7 +409,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_title(self) -> str | None: """Title of current playing media.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None return self._status.name @@ -380,7 +420,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return None - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return self._group_name return self._status.artist @@ -388,7 +428,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_album_name(self) -> str | None: """Artist of current playing media (Music track only).""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None return self._status.album @@ -396,7 +436,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_image_url(self) -> str | None: """Image url of current playing media.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None url = self._status.image @@ -411,7 +451,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_position(self) -> int | None: """Position of current playing media in seconds.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None mediastate = self.state @@ -430,7 +470,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_duration(self) -> int | None: """Duration of current playing media in seconds.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None duration = self._status.total_seconds @@ -489,7 +529,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def source_list(self) -> list[str] | None: """List of available input sources.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None sources = [x.text for x in self._inputs] @@ -500,7 +540,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def source(self) -> str | None: """Name of the current input source.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None if self._status.input_id is not None: @@ -520,7 +560,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return MediaPlayerEntityFeature(0) - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return ( MediaPlayerEntityFeature.VOLUME_STEP | MediaPlayerEntityFeature.VOLUME_SET @@ -560,14 +600,17 @@ class BluesoundPlayer(MediaPlayerEntity): return supported @property - def is_master(self) -> bool: - """Return true if player is a coordinator.""" - return self._is_master + def is_leader(self) -> bool: + """Return true if player is leader of a group.""" + return self._sync_status.followers is not None @property def is_grouped(self) -> bool: - """Return true if player is a coordinator.""" - return self._master is not None or self._is_master + """Return true if player is member or leader of a group.""" + return ( + self._sync_status.followers is not None + or self._sync_status.leader is not None + ) @property def shuffle(self) -> bool: @@ -580,25 +623,25 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_join(self, master: str) -> None: """Join the player to a group.""" - master_device = [ - device - for device in self.hass.data[DATA_BLUESOUND] - if device.entity_id == master - ] + if master == self.entity_id: + raise ServiceValidationError("Cannot join player to itself") - if len(master_device) > 0: - if self.id == master_device[0].id: - raise ServiceValidationError("Cannot join player to itself") + _LOGGER.debug("Trying to join player: %s", self.id) + async_dispatcher_send( + self.hass, dispatcher_join_signal(master), self.host, self.port + ) - _LOGGER.debug( - "Trying to join player: %s to master: %s", - self.id, - master_device[0].id, - ) + async def async_unjoin(self) -> None: + """Unjoin the player from a group.""" + if self._sync_status.leader is None: + return - await master_device[0].async_add_slave(self) - else: - _LOGGER.error("Master not found %s", master_device) + leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}" + + _LOGGER.debug("Trying to unjoin player: %s", self.id) + async_dispatcher_send( + self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port + ) @property def extra_state_attributes(self) -> dict[str, Any] | None: @@ -607,31 +650,31 @@ class BluesoundPlayer(MediaPlayerEntity): if self._group_list: attributes = {ATTR_BLUESOUND_GROUP: self._group_list} - attributes[ATTR_MASTER] = self._is_master + attributes[ATTR_MASTER] = self.is_leader return attributes def rebuild_bluesound_group(self) -> list[str]: """Rebuild the list of entities in speaker group.""" - if self.sync_status.master is None and self.sync_status.slaves is None: + if self.sync_status.leader is None and self.sync_status.followers is None: return [] player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND] leader_sync_status: SyncStatus | None = None - if self.sync_status.master is None: + if self.sync_status.leader is None: leader_sync_status = self.sync_status else: - required_id = f"{self.sync_status.master.ip}:{self.sync_status.master.port}" + required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}" for x in player_entities: if x.sync_status.id == required_id: leader_sync_status = x.sync_status break - if leader_sync_status is None or leader_sync_status.slaves is None: + if leader_sync_status is None or leader_sync_status.followers is None: return [] - follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.slaves] + follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers] follower_names = [ x.sync_status.name for x in player_entities @@ -640,21 +683,13 @@ class BluesoundPlayer(MediaPlayerEntity): follower_names.insert(0, leader_sync_status.name) return follower_names - async def async_unjoin(self) -> None: - """Unjoin the player from a group.""" - if self._master is None: - return + async def async_add_follower(self, host: str, port: int) -> None: + """Add follower to leader.""" + await self._player.add_follower(host, port) - _LOGGER.debug("Trying to unjoin player: %s", self.id) - await self._master.async_remove_slave(self) - - async def async_add_slave(self, slave_device: BluesoundPlayer) -> None: - """Add slave to master.""" - await self._player.add_slave(slave_device.host, slave_device.port) - - async def async_remove_slave(self, slave_device: BluesoundPlayer) -> None: - """Remove slave to master.""" - await self._player.remove_slave(slave_device.host, slave_device.port) + async def async_remove_follower(self, host: str, port: int) -> None: + """Remove follower to leader.""" + await self._player.remove_follower(host, port) async def async_increase_timer(self) -> int: """Increase sleep time on player.""" @@ -672,7 +707,7 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return # presets and inputs might have the same name; presets have priority @@ -691,49 +726,49 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_clear_playlist(self) -> None: """Clear players playlist.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.clear() async def async_media_next_track(self) -> None: """Send media_next command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.skip() async def async_media_previous_track(self) -> None: """Send media_previous command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.back() async def async_media_play(self) -> None: """Send media_play command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.play() async def async_media_pause(self) -> None: """Send media_pause command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.pause() async def async_media_stop(self) -> None: """Send stop command.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.stop() async def async_media_seek(self, position: float) -> None: """Send media_seek command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.play(seek=int(position)) @@ -742,7 +777,7 @@ class BluesoundPlayer(MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Send the play_media command to the media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return if media_source.is_media_source_id(media_id): diff --git a/homeassistant/components/bluesound/services.py b/homeassistant/components/bluesound/services.py deleted file mode 100644 index 06a507420f8..00000000000 --- a/homeassistant/components/bluesound/services.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Support for Bluesound devices.""" - -from __future__ import annotations - -from typing import NamedTuple - -import voluptuous as vol - -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv - -from .const import ATTR_MASTER, DOMAIN - -SERVICE_CLEAR_TIMER = "clear_sleep_timer" -SERVICE_JOIN = "join" -SERVICE_SET_TIMER = "set_sleep_timer" -SERVICE_UNJOIN = "unjoin" - -BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) - -BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) - - -class ServiceMethodDetails(NamedTuple): - """Details for SERVICE_TO_METHOD mapping.""" - - method: str - schema: vol.Schema - - -SERVICE_TO_METHOD = { - SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), - SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), - SERVICE_SET_TIMER: ServiceMethodDetails( - method="async_increase_timer", schema=BS_SCHEMA - ), - SERVICE_CLEAR_TIMER: ServiceMethodDetails( - method="async_clear_timer", schema=BS_SCHEMA - ), -} - - -def setup_services(hass: HomeAssistant) -> None: - """Set up services for Bluesound component.""" - - async def async_service_handler(service: ServiceCall) -> None: - """Map services to method of Bluesound devices.""" - if not (method := SERVICE_TO_METHOD.get(service.service)): - return - - params = { - key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID - } - if entity_ids := service.data.get(ATTR_ENTITY_ID): - target_players = [ - player for player in hass.data[DOMAIN] if player.entity_id in entity_ids - ] - else: - target_players = hass.data[DOMAIN] - - for player in target_players: - await getattr(player, method.method)(**params) - - for service, method in SERVICE_TO_METHOD.items(): - hass.services.async_register( - DOMAIN, service, async_service_handler, schema=method.schema - ) diff --git a/homeassistant/components/bluesound/utils.py b/homeassistant/components/bluesound/utils.py index 89a6fd1e787..5df5b32de95 100644 --- a/homeassistant/components/bluesound/utils.py +++ b/homeassistant/components/bluesound/utils.py @@ -6,3 +6,16 @@ from homeassistant.helpers.device_registry import format_mac def format_unique_id(mac: str, port: int) -> str: """Generate a unique ID based on the MAC address and port number.""" return f"{format_mac(mac)}-{port}" + + +def dispatcher_join_signal(entity_id: str) -> str: + """Join an entity ID with a signal.""" + return f"bluesound_join_{entity_id}" + + +def dispatcher_unjoin_signal(leader_id: str) -> str: + """Unjoin an entity ID with a signal. + + Id is ip_address:port. This can be obtained from sync_status.id. + """ + return f"bluesound_unjoin_{leader_id}" diff --git a/requirements_all.txt b/requirements_all.txt index 2540a297334..2bcbf0535c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1803,7 +1803,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.4 +pyblu==2.0.0 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fe528899ad3..9cdb1039503 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1477,7 +1477,7 @@ pybalboa==1.0.2 pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.4 +pyblu==2.0.0 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index b4ee61dee57..717c9f61850 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -81,11 +81,11 @@ class PlayerMockData: volume_db=0.5, volume=50, group=None, - master=None, - slaves=None, + leader=None, + followers=None, zone=None, - zone_master=None, - zone_slave=None, + zone_leader=None, + zone_follower=None, mute_volume_db=None, mute_volume=None, ) diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index 217225628f2..a43696a0a7f 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -11,7 +11,7 @@ from syrupy.filters import props from homeassistant.components.bluesound import DOMAIN as BLUESOUND_DOMAIN from homeassistant.components.bluesound.const import ATTR_MASTER -from homeassistant.components.bluesound.services import ( +from homeassistant.components.bluesound.media_player import ( SERVICE_CLEAR_TIMER, SERVICE_JOIN, SERVICE_SET_TIMER, @@ -259,7 +259,7 @@ async def test_join( blocking=True, ) - player_mocks.player_data_secondary.player.add_slave.assert_called_once_with( + player_mocks.player_data_secondary.player.add_follower.assert_called_once_with( "1.1.1.1", 11000 ) @@ -273,7 +273,7 @@ async def test_unjoin( """Test the unjoin action.""" updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - master=PairedPlayer("2.2.2.2", 11000), + leader=PairedPlayer("2.2.2.2", 11000), ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -287,7 +287,7 @@ async def test_unjoin( blocking=True, ) - player_mocks.player_data_secondary.player.remove_slave.assert_called_once_with( + player_mocks.player_data_secondary.player.remove_follower.assert_called_once_with( "1.1.1.1", 11000 ) @@ -297,7 +297,7 @@ async def test_attr_master( setup_config_entry: None, player_mocks: PlayerMocks, ) -> None: - """Test the media player master.""" + """Test the media player leader.""" attr_master = hass.states.get("media_player.player_name1111").attributes[ ATTR_MASTER ] @@ -305,7 +305,7 @@ async def test_attr_master( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -333,7 +333,7 @@ async def test_attr_bluesound_group( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -361,7 +361,7 @@ async def test_attr_bluesound_group_for_follower( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -370,7 +370,7 @@ async def test_attr_bluesound_group_for_follower( updated_sync_status = dataclasses.replace( player_mocks.player_data_secondary.sync_status_long_polling_mock.get(), - master=PairedPlayer("1.1.1.1", 11000), + leader=PairedPlayer("1.1.1.1", 11000), ) player_mocks.player_data_secondary.sync_status_long_polling_mock.set( updated_sync_status From 935bf3fb112e609f50ba7cdecf54b9f27820acc8 Mon Sep 17 00:00:00 2001 From: jimmyd-be <34766203+jimmyd-be@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:49:42 +0100 Subject: [PATCH 0785/1198] Bump renson-endura-delta to 1.7.2 (#129491) --- homeassistant/components/renson/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renson/manifest.json b/homeassistant/components/renson/manifest.json index fa94207748e..fcc482959f2 100644 --- a/homeassistant/components/renson/manifest.json +++ b/homeassistant/components/renson/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/renson", "iot_class": "local_polling", - "requirements": ["renson-endura-delta==1.7.1"] + "requirements": ["renson-endura-delta==1.7.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2bcbf0535c6..37504e5ec41 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2566,7 +2566,7 @@ regenmaschine==2024.03.0 renault-api==0.2.8 # homeassistant.components.renson -renson-endura-delta==1.7.1 +renson-endura-delta==1.7.2 # homeassistant.components.reolink reolink-aio==0.11.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9cdb1039503..55bb0e6ac1f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2063,7 +2063,7 @@ regenmaschine==2024.03.0 renault-api==0.2.8 # homeassistant.components.renson -renson-endura-delta==1.7.1 +renson-endura-delta==1.7.2 # homeassistant.components.reolink reolink-aio==0.11.5 From b124ebeb1f58b83f7a1ec54398ac13854ed43268 Mon Sep 17 00:00:00 2001 From: benjamin-dcs <78026082+benjamin-dcs@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:54:30 +0100 Subject: [PATCH 0786/1198] Differentiate File integration entries by prefixing the title with the platform instead (#131016) Differentiate File integration entries by prefixes the title with the platform --- homeassistant/components/file/config_flow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 992635d05fd..1c4fdbe5c84 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -32,7 +32,7 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN +from .const import CONF_TIMESTAMP, DOMAIN BOOLEAN_SELECTOR = BooleanSelector(BooleanSelectorConfig()) TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) @@ -105,7 +105,7 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): if not await self.validate_file_path(user_input[CONF_FILE_PATH]): errors[CONF_FILE_PATH] = "not_allowed" else: - title = f"{DEFAULT_NAME} [{user_input[CONF_FILE_PATH]}]" + title = f"{platform.capitalize()} [{user_input[CONF_FILE_PATH]}]" data = deepcopy(user_input) options = {} for key, value in user_input.items(): From 5014f305bf06ce11977e4bc8167ee501565fe3e3 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 20:57:04 +0100 Subject: [PATCH 0787/1198] Mark docs-removal-instructions for SABnzbd as done (#133446) --- homeassistant/components/sabnzbd/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index f5bae1c692b..5539359d977 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -16,7 +16,7 @@ rules: The integration has deprecated the actions, thus the documentation has been removed. docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | From 21c3bf48f93d49703bcdfd73c53136f520b3aca6 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Tue, 17 Dec 2024 21:02:39 +0100 Subject: [PATCH 0788/1198] Allow only single instance of easyenergy integration (#133447) --- .../components/easyenergy/manifest.json | 4 +++- homeassistant/generated/integrations.json | 5 +++-- tests/components/easyenergy/test_config_flow.py | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/easyenergy/manifest.json b/homeassistant/components/easyenergy/manifest.json index 25432196169..5cecb1d49f6 100644 --- a/homeassistant/components/easyenergy/manifest.json +++ b/homeassistant/components/easyenergy/manifest.json @@ -4,6 +4,8 @@ "codeowners": ["@klaasnicolaas"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/easyenergy", + "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["easyenergy==2.1.2"] + "requirements": ["easyenergy==2.1.2"], + "single_config_entry": true } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 7cb7a5a1aef..5fc09fcd70f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1441,9 +1441,10 @@ }, "easyenergy": { "name": "easyEnergy", - "integration_type": "hub", + "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ebox": { "name": "EBox", diff --git a/tests/components/easyenergy/test_config_flow.py b/tests/components/easyenergy/test_config_flow.py index da7048793b3..44bc8421126 100644 --- a/tests/components/easyenergy/test_config_flow.py +++ b/tests/components/easyenergy/test_config_flow.py @@ -7,6 +7,8 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_full_user_flow( hass: HomeAssistant, @@ -31,3 +33,18 @@ async def test_full_user_flow( assert result2.get("data") == {} assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_single_instance( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up a duplicate entry.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "single_instance_allowed" From eae25023e78a718836c561959fa5b6712a51e2b3 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 21:27:41 +0100 Subject: [PATCH 0789/1198] Do not remove services when last config entry is unloaded in SABnzbd (#133449) --- homeassistant/components/sabnzbd/__init__.py | 16 +--------------- .../components/sabnzbd/quality_scale.yaml | 5 +---- 2 files changed, 2 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index 2e3d6dd613c..fee459340f3 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -8,7 +8,6 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError @@ -165,17 +164,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: """Unload a Sabnzbd config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - loaded_entries = [ - entry - for entry in hass.config_entries.async_entries(DOMAIN) - if entry.state == ConfigEntryState.LOADED - ] - if len(loaded_entries) == 1: - # If this is the last loaded instance of Sabnzbd, deregister any services - # defined during integration setup: - for service_name in SERVICES: - hass.services.async_remove(DOMAIN, service_name) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index 5539359d977..ef4e72b4936 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -1,9 +1,6 @@ rules: # Bronze - action-setup: - status: todo - comment: | - Do not remove services when all config entries are removed. + action-setup: done appropriate-polling: done brands: done common-modules: done From f8cd6204ca5d3722b8b60d97fb5fd2355907d008 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 21:30:49 +0100 Subject: [PATCH 0790/1198] Fix reconfigure in Nord Pool (#133431) --- homeassistant/components/nordpool/config_flow.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py index 1d75d825e47..b3b807badad 100644 --- a/homeassistant/components/nordpool/config_flow.py +++ b/homeassistant/components/nordpool/config_flow.py @@ -99,10 +99,10 @@ class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the reconfiguration step.""" + reconfigure_entry = self._get_reconfigure_entry() errors: dict[str, str] = {} if user_input: errors = await test_api(self.hass, user_input) - reconfigure_entry = self._get_reconfigure_entry() if not errors: return self.async_update_reload_and_abort( reconfigure_entry, data_updates=user_input @@ -110,6 +110,8 @@ class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="reconfigure", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA, user_input or reconfigure_entry.data + ), errors=errors, ) From 4c60e36f4f34590fd994fc4a2a23da8d61bfc944 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 21:59:20 +0100 Subject: [PATCH 0791/1198] Add Get price service to Nord Pool (#130185) * Add get_price service to Nord Pool * Tests and fixes * Fixes * Not used fixtures * update qs * Fixes * docstring * Remove selector from strings * Mod service --- homeassistant/components/nordpool/__init__.py | 12 ++ homeassistant/components/nordpool/icons.json | 5 + .../components/nordpool/quality_scale.yaml | 15 +- homeassistant/components/nordpool/services.py | 129 ++++++++++++++ .../components/nordpool/services.yaml | 48 +++++ .../components/nordpool/strings.json | 39 ++++ .../nordpool/snapshots/test_services.ambr | 127 ++++++++++++++ tests/components/nordpool/test_services.py | 166 ++++++++++++++++++ 8 files changed, 529 insertions(+), 12 deletions(-) create mode 100644 homeassistant/components/nordpool/services.py create mode 100644 homeassistant/components/nordpool/services.yaml create mode 100644 tests/components/nordpool/snapshots/test_services.ambr create mode 100644 tests/components/nordpool/test_services.py diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py index 82db98e2148..83f8edc8a8d 100644 --- a/homeassistant/components/nordpool/__init__.py +++ b/homeassistant/components/nordpool/__init__.py @@ -5,13 +5,25 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util from .const import DOMAIN, PLATFORMS from .coordinator import NordPoolDataUpdateCoordinator +from .services import async_setup_services type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Nord Pool service.""" + + async_setup_services(hass) + return True + async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: """Set up Nord Pool from a config entry.""" diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json index 85434a2d09b..5a1a3df3d92 100644 --- a/homeassistant/components/nordpool/icons.json +++ b/homeassistant/components/nordpool/icons.json @@ -38,5 +38,10 @@ "default": "mdi:cash-multiple" } } + }, + "services": { + "get_prices_for_date": { + "service": "mdi:cash-multiple" + } } } diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index dada1115715..9c5160d0ccb 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -14,27 +14,18 @@ rules: comment: | Entities doesn't subscribe to events. dependency-transparency: done - action-setup: - status: exempt - comment: | - This integration does not provide additional actions. + action-setup: done common-modules: done docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done - docs-actions: - status: exempt - comment: | - This integration does not provide additional actions. + docs-actions: done brands: done # Silver config-entry-unloading: done log-when-unavailable: done entity-unavailable: done - action-exceptions: - status: exempt - comment: | - No actions. + action-exceptions: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/nordpool/services.py b/homeassistant/components/nordpool/services.py new file mode 100644 index 00000000000..872bd5b1e6b --- /dev/null +++ b/homeassistant/components/nordpool/services.py @@ -0,0 +1,129 @@ +"""Services for Nord Pool integration.""" + +from __future__ import annotations + +from datetime import date, datetime +import logging +from typing import TYPE_CHECKING + +from pynordpool import ( + AREAS, + Currency, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, +) +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DATE +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import ConfigEntrySelector +from homeassistant.util import dt as dt_util +from homeassistant.util.json import JsonValueType + +if TYPE_CHECKING: + from . import NordPoolConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) +ATTR_CONFIG_ENTRY = "config_entry" +ATTR_AREAS = "areas" +ATTR_CURRENCY = "currency" + +SERVICE_GET_PRICES_FOR_DATE = "get_prices_for_date" +SERVICE_GET_PRICES_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_DATE): cv.date, + vol.Optional(ATTR_AREAS): vol.All(vol.In(list(AREAS)), cv.ensure_list, [str]), + vol.Optional(ATTR_CURRENCY): vol.All( + cv.string, vol.In([currency.value for currency in Currency]) + ), + } +) + + +def get_config_entry(hass: HomeAssistant, entry_id: str) -> NordPoolConfigEntry: + """Return config entry.""" + if not (entry := hass.config_entries.async_get_entry(entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_found", + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_loaded", + ) + return entry + + +def async_setup_services(hass: HomeAssistant) -> None: + """Set up services for Nord Pool integration.""" + + async def get_prices_for_date(call: ServiceCall) -> ServiceResponse: + """Get price service.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + asked_date: date = call.data[ATTR_DATE] + client = entry.runtime_data.client + + areas: list[str] = entry.data[ATTR_AREAS] + if _areas := call.data.get(ATTR_AREAS): + areas = _areas + + currency: str = entry.data[ATTR_CURRENCY] + if _currency := call.data.get(ATTR_CURRENCY): + currency = _currency + + areas = [area.upper() for area in areas] + currency = currency.upper() + + try: + price_data = await client.async_get_delivery_period( + datetime.combine(asked_date, dt_util.utcnow().time()), + Currency(currency), + areas, + ) + except NordPoolAuthenticationError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + except NordPoolEmptyResponseError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="empty_response", + ) from error + except NordPoolError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="connection_error", + ) from error + + result: dict[str, JsonValueType] = {} + for area in areas: + result[area] = [ + { + "start": price_entry.start.isoformat(), + "end": price_entry.end.isoformat(), + "price": price_entry.entry[area], + } + for price_entry in price_data.entries + ] + return result + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + get_prices_for_date, + schema=SERVICE_GET_PRICES_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/nordpool/services.yaml b/homeassistant/components/nordpool/services.yaml new file mode 100644 index 00000000000..dded8482c6f --- /dev/null +++ b/homeassistant/components/nordpool/services.yaml @@ -0,0 +1,48 @@ +get_prices_for_date: + fields: + config_entry: + required: true + selector: + config_entry: + integration: nordpool + date: + required: true + selector: + date: + areas: + selector: + select: + options: + - "EE" + - "LT" + - "LV" + - "AT" + - "BE" + - "FR" + - "GER" + - "NL" + - "PL" + - "DK1" + - "DK2" + - "FI" + - "NO1" + - "NO2" + - "NO3" + - "NO4" + - "NO5" + - "SE1" + - "SE2" + - "SE3" + - "SE4" + - "SYS" + mode: dropdown + currency: + selector: + select: + options: + - "DKK" + - "EUR" + - "NOK" + - "PLN" + - "SEK" + mode: dropdown diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index 96c22633c9e..d30898730b9 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -70,9 +70,48 @@ } } }, + "services": { + "get_prices_for_date": { + "name": "Get prices for date", + "description": "Retrieve the prices for a specific date.", + "fields": { + "config_entry": { + "name": "Select Nord Pool configuration entry", + "description": "Choose the configuration entry." + }, + "date": { + "name": "Date", + "description": "Only dates two months in the past and one day in the future is allowed." + }, + "areas": { + "name": "Areas", + "description": "One or multiple areas to get prices for. If left empty it will use the areas already configured." + }, + "currency": { + "name": "Currency", + "description": "Currency to get prices in. If left empty it will use the currency already configured." + } + } + } + }, "exceptions": { "initial_update_failed": { "message": "Initial update failed on startup with error {error}" + }, + "entry_not_found": { + "message": "The Nord Pool integration is not configured in Home Assistant." + }, + "entry_not_loaded": { + "message": "The Nord Pool integration is currently not loaded or disabled in Home Assistant." + }, + "authentication_error": { + "message": "There was an authentication error as you tried to retrieve data too far in the past." + }, + "empty_response": { + "message": "Nord Pool has not posted market prices for the provided date." + }, + "connection_error": { + "message": "There was a connection error connecting to the API. Try again later." } } } diff --git a/tests/components/nordpool/snapshots/test_services.ambr b/tests/components/nordpool/snapshots/test_services.ambr new file mode 100644 index 00000000000..6a57d7ecce9 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_services.ambr @@ -0,0 +1,127 @@ +# serializer version: 1 +# name: test_service_call + dict({ + 'SE3': list([ + dict({ + 'end': '2024-11-05T00:00:00+00:00', + 'price': 250.73, + 'start': '2024-11-04T23:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T01:00:00+00:00', + 'price': 76.36, + 'start': '2024-11-05T00:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T02:00:00+00:00', + 'price': 73.92, + 'start': '2024-11-05T01:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T03:00:00+00:00', + 'price': 61.69, + 'start': '2024-11-05T02:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T04:00:00+00:00', + 'price': 64.6, + 'start': '2024-11-05T03:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T05:00:00+00:00', + 'price': 453.27, + 'start': '2024-11-05T04:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T06:00:00+00:00', + 'price': 996.28, + 'start': '2024-11-05T05:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T07:00:00+00:00', + 'price': 1406.14, + 'start': '2024-11-05T06:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T08:00:00+00:00', + 'price': 1346.54, + 'start': '2024-11-05T07:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T09:00:00+00:00', + 'price': 1150.28, + 'start': '2024-11-05T08:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T10:00:00+00:00', + 'price': 1031.32, + 'start': '2024-11-05T09:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T11:00:00+00:00', + 'price': 927.37, + 'start': '2024-11-05T10:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T12:00:00+00:00', + 'price': 925.05, + 'start': '2024-11-05T11:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T13:00:00+00:00', + 'price': 949.49, + 'start': '2024-11-05T12:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T14:00:00+00:00', + 'price': 1042.03, + 'start': '2024-11-05T13:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T15:00:00+00:00', + 'price': 1258.89, + 'start': '2024-11-05T14:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T16:00:00+00:00', + 'price': 1816.45, + 'start': '2024-11-05T15:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T17:00:00+00:00', + 'price': 2512.65, + 'start': '2024-11-05T16:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T18:00:00+00:00', + 'price': 1819.83, + 'start': '2024-11-05T17:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T19:00:00+00:00', + 'price': 1011.77, + 'start': '2024-11-05T18:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T20:00:00+00:00', + 'price': 835.53, + 'start': '2024-11-05T19:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T21:00:00+00:00', + 'price': 796.19, + 'start': '2024-11-05T20:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T22:00:00+00:00', + 'price': 522.3, + 'start': '2024-11-05T21:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T23:00:00+00:00', + 'price': 289.14, + 'start': '2024-11-05T22:00:00+00:00', + }), + ]), + }) +# --- diff --git a/tests/components/nordpool/test_services.py b/tests/components/nordpool/test_services.py new file mode 100644 index 00000000000..224b4bc9981 --- /dev/null +++ b/tests/components/nordpool/test_services.py @@ -0,0 +1,166 @@ +"""Test services in Nord Pool.""" + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.components.nordpool.services import ( + ATTR_AREAS, + ATTR_CONFIG_ENTRY, + ATTR_CURRENCY, + SERVICE_GET_PRICES_FOR_DATE, +) +from homeassistant.const import ATTR_DATE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from tests.common import MockConfigEntry + +TEST_SERVICE_DATA = { + ATTR_CONFIG_ENTRY: "to_replace", + ATTR_DATE: "2024-11-05", + ATTR_AREAS: "SE3", + ATTR_CURRENCY: "SEK", +} +TEST_SERVICE_DATA_USE_DEFAULTS = { + ATTR_CONFIG_ENTRY: "to_replace", + ATTR_DATE: "2024-11-05", +} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, + snapshot: SnapshotAssertion, +) -> None: + """Test get_prices_for_date service call.""" + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + + assert response == snapshot + price_value = response["SE3"][0]["price"] + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + + assert "SE3" in response + assert response["SE3"][0]["price"] == price_value + + +@pytest.mark.parametrize( + ("error", "key"), + [ + (NordPoolAuthenticationError, "authentication_error"), + (NordPoolEmptyResponseError, "empty_response"), + (NordPoolError, "connection_error"), + ], +) +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call_failures( + hass: HomeAssistant, + load_int: MockConfigEntry, + error: Exception, + key: str, +) -> None: + """Test get_prices_for_date service call when it fails.""" + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == key + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call_config_entry_bad_state( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, +) -> None: + """Test get_prices_for_date service call when config entry bad state.""" + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + TEST_SERVICE_DATA, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == "entry_not_found" + + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + await hass.config_entries.async_unload(load_int.entry_id) + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == "entry_not_loaded" From e73512e11c9938be4c274f2e824cda7d9fac1306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Tue, 17 Dec 2024 22:49:04 +0000 Subject: [PATCH 0792/1198] Add integration_type to Idasen Desk (#132486) * Add Idasen Desk quality scale record * Update wrong checks * Add integration_type to Idasen Desk --- homeassistant/components/idasen_desk/manifest.json | 1 + homeassistant/components/idasen_desk/quality_scale.yaml | 5 ++++- homeassistant/generated/integrations.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 0f8c9eaafc9..2f53ec20e11 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -10,6 +10,7 @@ "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/idasen_desk", + "integration_type": "device", "iot_class": "local_push", "requirements": ["idasen-ha==2.6.2"] } diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 28381f98a3e..1b9ec8cd810 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -79,7 +79,10 @@ rules: docs-supported-functions: todo docs-troubleshooting: todo docs-use-cases: todo - dynamic-devices: todo + dynamic-devices: + status: exempt + comment: | + This integration has one device per config entry. entity-category: done entity-device-class: done entity-disabled-by-default: done diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 5fc09fcd70f..bd3c9eb04f9 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2818,7 +2818,7 @@ "name": "IKEA TR\u00c5DFRI" }, "idasen_desk": { - "integration_type": "hub", + "integration_type": "device", "config_flow": true, "iot_class": "local_push", "name": "IKEA Idasen Desk" From 9bff9c5e7bffc2239c9cf4d91d366214e8b42d92 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 17 Dec 2024 18:57:43 -0700 Subject: [PATCH 0793/1198] Ensure screenlogic retries if the protocol adapter is still booting (#133444) * Ensure screenlogic retries if the protocol adapter is still booting If the protocol adapter is still booting, it will disconnect and never retry ``` Traceback (most recent call last): File "/usr/src/homeassistant/homeassistant/config_entries.py", line 640, in __async_setup_with_context result = await component.async_setup_entry(hass, self) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/components/screenlogic/__init__.py", line 65, in async_setup_entry await gateway.async_connect(**connect_info) File "/usr/local/lib/python3.13/site-packages/screenlogicpy/gateway.py", line 142, in async_connect connectPkg = await async_connect_to_gateway( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<4 lines>... ) ^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 107, in async_connect_to_gateway mac_address = await async_gateway_connect(transport, protocol, max_retries) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 77, in async_gateway_connect raise ScreenLogicConnectionError("Host unexpectedly disconnected.") screenlogicpy.const.common.ScreenLogicConnectionError: Host unexpectedly disconnected. ``` * coverage --- .../components/screenlogic/__init__.py | 3 +- tests/components/screenlogic/test_init.py | 36 ++++++++++++++++++- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/screenlogic/__init__.py b/homeassistant/components/screenlogic/__init__.py index 6f58e9b3666..972837f7d75 100644 --- a/homeassistant/components/screenlogic/__init__.py +++ b/homeassistant/components/screenlogic/__init__.py @@ -4,6 +4,7 @@ import logging from typing import Any from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from screenlogicpy.const.data import SHARED_VALUES from homeassistant.config_entries import ConfigEntry @@ -64,7 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScreenLogicConfigEntry) try: await gateway.async_connect(**connect_info) await gateway.async_update() - except ScreenLogicError as ex: + except (ScreenLogicConnectionError, ScreenLogicError) as ex: raise ConfigEntryNotReady(ex.msg) from ex coordinator = ScreenlogicDataUpdateCoordinator( diff --git a/tests/components/screenlogic/test_init.py b/tests/components/screenlogic/test_init.py index 6416c93f779..f21a1118b4f 100644 --- a/tests/components/screenlogic/test_init.py +++ b/tests/components/screenlogic/test_init.py @@ -4,12 +4,14 @@ from dataclasses import dataclass from unittest.mock import DEFAULT, patch import pytest -from screenlogicpy import ScreenLogicGateway +from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify @@ -284,3 +286,35 @@ async def test_platform_setup( for entity_id in tested_entity_ids: assert hass.states.get(entity_id) is not None + + +@pytest.mark.parametrize( + "exception", + [ScreenLogicConnectionError, ScreenLogicError], +) +async def test_retry_on_connect_exception( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, exception: Exception +) -> None: + """Test setup retries on expected exceptions.""" + + def stub_connect(*args, **kwargs): + raise exception + + mock_config_entry.add_to_hass(hass) + + with ( + patch( + GATEWAY_DISCOVERY_IMPORT_PATH, + return_value={}, + ), + patch.multiple( + ScreenLogicGateway, + async_connect=stub_connect, + is_connected=False, + _async_connected_request=DEFAULT, + ), + ): + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY From dfdd83789ad858e60faa066aef7d6711cfbf2a9e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 08:05:39 +0100 Subject: [PATCH 0794/1198] Bump actions/upload-artifact from 4.4.3 to 4.5.0 (#133461) --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 22 +++++++++++----------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 8f419cca1da..20b1bd4c718 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d6f207382d..71924afecc8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -537,7 +537,7 @@ jobs: python --version uv pip freeze >> pip_freeze.txt - name: Upload pip_freeze artifact - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pip-freeze-${{ matrix.python-version }} path: pip_freeze.txt @@ -661,7 +661,7 @@ jobs: . venv/bin/activate python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: licenses-${{ github.run_number }}-${{ matrix.python-version }} path: licenses-${{ matrix.python-version }}.json @@ -877,7 +877,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest_buckets path: pytest_buckets.txt @@ -979,14 +979,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1106,7 +1106,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1114,7 +1114,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1236,7 +1236,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1244,7 +1244,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1378,14 +1378,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index a36b3073aab..9ea9a557105 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -79,7 +79,7 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: env_file path: ./.env_file @@ -87,7 +87,7 @@ jobs: overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: requirements_diff path: ./requirements_diff.txt @@ -99,7 +99,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From c10473844fc63374b6e4cb55fc173e33ab113093 Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Wed, 18 Dec 2024 09:36:42 +0200 Subject: [PATCH 0795/1198] Add sensors to Ituran integration (#133359) Add sensors to Ituran --- homeassistant/components/ituran/__init__.py | 1 + homeassistant/components/ituran/icons.json | 11 + .../components/ituran/quality_scale.yaml | 5 +- homeassistant/components/ituran/sensor.py | 119 +++++++ homeassistant/components/ituran/strings.json | 19 ++ tests/components/ituran/conftest.py | 6 +- .../ituran/snapshots/test_sensor.ambr | 297 ++++++++++++++++++ .../components/ituran/test_device_tracker.py | 7 +- tests/components/ituran/test_sensor.py | 76 +++++ 9 files changed, 533 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/ituran/sensor.py create mode 100644 tests/components/ituran/snapshots/test_sensor.ambr create mode 100644 tests/components/ituran/test_sensor.py diff --git a/homeassistant/components/ituran/__init__.py b/homeassistant/components/ituran/__init__.py index b0a26cf7db2..bf9cff238cd 100644 --- a/homeassistant/components/ituran/__init__.py +++ b/homeassistant/components/ituran/__init__.py @@ -9,6 +9,7 @@ from .coordinator import IturanConfigEntry, IturanDataUpdateCoordinator PLATFORMS: list[Platform] = [ Platform.DEVICE_TRACKER, + Platform.SENSOR, ] diff --git a/homeassistant/components/ituran/icons.json b/homeassistant/components/ituran/icons.json index a20ea5b7304..bd9182f1569 100644 --- a/homeassistant/components/ituran/icons.json +++ b/homeassistant/components/ituran/icons.json @@ -4,6 +4,17 @@ "car": { "default": "mdi:car" } + }, + "sensor": { + "address": { + "default": "mdi:map-marker" + }, + "battery_voltage": { + "default": "mdi:car-battery" + }, + "heading": { + "default": "mdi:compass" + } } } } diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml index 71d0d9698da..cd7e17c3b12 100644 --- a/homeassistant/components/ituran/quality_scale.yaml +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -55,10 +55,7 @@ rules: Only device_tracker platform. devices: done entity-category: todo - entity-disabled-by-default: - status: exempt - comment: | - No noisy entities + entity-disabled-by-default: done discovery: status: exempt comment: | diff --git a/homeassistant/components/ituran/sensor.py b/homeassistant/components/ituran/sensor.py new file mode 100644 index 00000000000..e962f5bd561 --- /dev/null +++ b/homeassistant/components/ituran/sensor.py @@ -0,0 +1,119 @@ +"""Sensors for Ituran vehicles.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from pyituran import Vehicle + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import ( + DEGREE, + UnitOfElectricPotential, + UnitOfLength, + UnitOfSpeed, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import IturanConfigEntry +from .coordinator import IturanDataUpdateCoordinator +from .entity import IturanBaseEntity + + +@dataclass(frozen=True, kw_only=True) +class IturanSensorEntityDescription(SensorEntityDescription): + """Describes Ituran sensor entity.""" + + value_fn: Callable[[Vehicle], StateType | datetime] + + +SENSOR_TYPES: list[IturanSensorEntityDescription] = [ + IturanSensorEntityDescription( + key="address", + translation_key="address", + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.address, + ), + IturanSensorEntityDescription( + key="battery_voltage", + translation_key="battery_voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=0, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.battery_voltage, + ), + IturanSensorEntityDescription( + key="heading", + translation_key="heading", + native_unit_of_measurement=DEGREE, + suggested_display_precision=0, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.heading, + ), + IturanSensorEntityDescription( + key="last_update_from_vehicle", + translation_key="last_update_from_vehicle", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.last_update, + ), + IturanSensorEntityDescription( + key="mileage", + translation_key="mileage", + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.KILOMETERS, + suggested_display_precision=2, + value_fn=lambda vehicle: vehicle.mileage, + ), + IturanSensorEntityDescription( + key="speed", + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR, + suggested_display_precision=0, + value_fn=lambda vehicle: vehicle.speed, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IturanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Ituran sensors from config entry.""" + coordinator = config_entry.runtime_data + async_add_entities( + IturanSensor(coordinator, license_plate, description) + for description in SENSOR_TYPES + for license_plate in coordinator.data + ) + + +class IturanSensor(IturanBaseEntity, SensorEntity): + """Ituran device tracker.""" + + entity_description: IturanSensorEntityDescription + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + description: IturanSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, license_plate, description.key) + self.entity_description = description + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the device.""" + return self.entity_description.value_fn(self.vehicle) diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json index 212dbd1b86a..efc60ef454b 100644 --- a/homeassistant/components/ituran/strings.json +++ b/homeassistant/components/ituran/strings.json @@ -35,6 +35,25 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, + "entity": { + "sensor": { + "address": { + "name": "Address" + }, + "battery_voltage": { + "name": "Battery voltage" + }, + "heading": { + "name": "Heading" + }, + "last_update_from_vehicle": { + "name": "Last update from vehicle" + }, + "mileage": { + "name": "Mileage" + } + } + }, "exceptions": { "api_error": { "message": "An error occurred while communicating with the Ituran service." diff --git a/tests/components/ituran/conftest.py b/tests/components/ituran/conftest.py index ef22c90591d..5093cc301a1 100644 --- a/tests/components/ituran/conftest.py +++ b/tests/components/ituran/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, PropertyMock, patch +from zoneinfo import ZoneInfo import pytest @@ -56,7 +57,10 @@ class MockVehicle: self.gps_coordinates = (25.0, -71.0) self.address = "Bermuda Triangle" self.heading = 150 - self.last_update = datetime(2024, 1, 1, 0, 0, 0) + self.last_update = datetime( + 2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Asia/Jerusalem") + ) + self.battery_voltage = 12.0 @pytest.fixture diff --git a/tests/components/ituran/snapshots/test_sensor.ambr b/tests/components/ituran/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c1512de912f --- /dev/null +++ b/tests/components/ituran/snapshots/test_sensor.ambr @@ -0,0 +1,297 @@ +# serializer version: 1 +# name: test_sensor[sensor.mock_model_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Address', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'address', + 'unique_id': '12345678-address', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.mock_model_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model Address', + }), + 'context': , + 'entity_id': 'sensor.mock_model_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Bermuda Triangle', + }) +# --- +# name: test_sensor[sensor.mock_model_battery_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_battery_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery voltage', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_voltage', + 'unique_id': '12345678-battery_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_battery_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'mock model Battery voltage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_battery_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_sensor[sensor.mock_model_heading-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_heading', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heading', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heading', + 'unique_id': '12345678-heading', + 'unit_of_measurement': '°', + }) +# --- +# name: test_sensor[sensor.mock_model_heading-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model Heading', + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.mock_model_heading', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150', + }) +# --- +# name: test_sensor[sensor.mock_model_last_update_from_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_last_update_from_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last update from vehicle', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_update_from_vehicle', + 'unique_id': '12345678-last_update_from_vehicle', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.mock_model_last_update_from_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock model Last update from vehicle', + }), + 'context': , + 'entity_id': 'sensor.mock_model_last_update_from_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-12-31T22:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.mock_model_mileage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_mileage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mileage', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mileage', + 'unique_id': '12345678-mileage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_mileage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'mock model Mileage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_mileage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000', + }) +# --- +# name: test_sensor[sensor.mock_model_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345678-speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'mock model Speed', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- diff --git a/tests/components/ituran/test_device_tracker.py b/tests/components/ituran/test_device_tracker.py index 7bcb314cde7..4fe92154e91 100644 --- a/tests/components/ituran/test_device_tracker.py +++ b/tests/components/ituran/test_device_tracker.py @@ -1,13 +1,13 @@ """Test the Ituran device_tracker.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory from pyituran.exceptions import IturanApiError from syrupy.assertion import SnapshotAssertion from homeassistant.components.ituran.const import UPDATE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -24,7 +24,8 @@ async def test_device_tracker( mock_config_entry: MockConfigEntry, ) -> None: """Test state of device_tracker.""" - await setup_integration(hass, mock_config_entry) + with patch("homeassistant.components.ituran.PLATFORMS", [Platform.DEVICE_TRACKER]): + await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/ituran/test_sensor.py b/tests/components/ituran/test_sensor.py new file mode 100644 index 00000000000..a057f59b81f --- /dev/null +++ b/tests/components/ituran/test_sensor.py @@ -0,0 +1,76 @@ +"""Test the Ituran device_tracker.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pyituran.exceptions import IturanApiError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.ituran.const import UPDATE_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state of sensor.""" + with patch("homeassistant.components.ituran.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_availability( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test sensor is marked as unavailable when we can't reach the Ituran service.""" + entities = [ + "sensor.mock_model_address", + "sensor.mock_model_battery_voltage", + "sensor.mock_model_heading", + "sensor.mock_model_last_update_from_vehicle", + "sensor.mock_model_mileage", + "sensor.mock_model_speed", + ] + + await setup_integration(hass, mock_config_entry) + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = IturanApiError + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = None + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE From fab92d1cf84a5aa99766d443d2ad9be7baad0082 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 02:40:27 -0500 Subject: [PATCH 0796/1198] Add reconfigure flow to Russound RIO (#133091) * Add reconfigure flow to Russound RIO * Mark reconfiguration flow as done * Update * Update --- .../components/russound_rio/config_flow.py | 23 ++++++- .../russound_rio/quality_scale.yaml | 7 +- .../components/russound_rio/strings.json | 19 +++++- tests/components/russound_rio/conftest.py | 7 +- tests/components/russound_rio/const.py | 12 ++-- .../russound_rio/snapshots/test_init.ambr | 2 +- .../russound_rio/test_config_flow.py | 65 ++++++++++++++++++- tests/components/russound_rio/test_init.py | 4 +- 8 files changed, 121 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index 15d002b3f49..e5efd309a23 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -9,7 +9,11 @@ from typing import Any from aiorussound import RussoundClient, RussoundTcpConnectionHandler import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv @@ -50,6 +54,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: await self.async_set_unique_id(controller.mac_address) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) self._abort_if_unique_id_configured() data = {CONF_HOST: host, CONF_PORT: port} return self.async_create_entry( @@ -60,6 +70,17 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + if not user_input: + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + ) + return await self.async_step_user(user_input) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 3a5e8f9adb7..63693ee6259 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -11,10 +11,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - The data_description fields in translations are missing. + config-flow: done dependency-transparency: done docs-actions: status: exempt @@ -65,7 +62,7 @@ rules: diagnostics: done exception-translations: done icon-translations: todo - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: todo discovery-update-info: todo repair-issues: done diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json index b8c29c08301..93544064e20 100644 --- a/homeassistant/components/russound_rio/strings.json +++ b/homeassistant/components/russound_rio/strings.json @@ -9,6 +9,21 @@ "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The IP address of the Russound controller.", + "port": "The port of the Russound controller." + } + }, + "reconfigure": { + "description": "Reconfigure your Russound controller.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "[%key:component::russound_rio::config::step::user::data_description::host%]", + "port": "[%key:component::russound_rio::config::step::user::data_description::port%]" } } }, @@ -17,7 +32,9 @@ }, "abort": { "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "wrong_device": "This Russound controller does not match the existing device id. Please make sure you entered the correct IP address." } }, "issues": { diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 5522c1e6ea2..3321d4160b9 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -9,9 +9,10 @@ from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant -from .const import API_VERSION, HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT +from .const import API_VERSION, HARDWARE_MAC, MOCK_CONFIG, MODEL from tests.common import MockConfigEntry, load_json_object_fixture @@ -68,7 +69,9 @@ def mock_russound_client() -> Generator[AsyncMock]: 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones ) } - client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) + client.connection_handler = RussoundTcpConnectionHandler( + MOCK_CONFIG[CONF_HOST], MOCK_CONFIG[CONF_PORT] + ) client.is_connected = Mock(return_value=True) client.unregister_state_update_callbacks.return_value = True client.rio_version = API_VERSION diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 8f8ae7b59ea..18f75838525 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -3,16 +3,20 @@ from collections import namedtuple from homeassistant.components.media_player import DOMAIN as MP_DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT -HOST = "127.0.0.1" -PORT = 9621 MODEL = "MCA-C5" HARDWARE_MAC = "00:11:22:33:44:55" API_VERSION = "1.08.00" MOCK_CONFIG = { - "host": HOST, - "port": PORT, + CONF_HOST: "192.168.20.75", + CONF_PORT: 9621, +} + +MOCK_RECONFIGURATION_CONFIG = { + CONF_HOST: "192.168.20.70", + CONF_PORT: 9622, } _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr index fcd59dd06f7..c92f06c4bc0 100644 --- a/tests/components/russound_rio/snapshots/test_init.ambr +++ b/tests/components/russound_rio/snapshots/test_init.ambr @@ -3,7 +3,7 @@ DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , - 'configuration_url': 'http://127.0.0.1', + 'configuration_url': 'http://192.168.20.75', 'connections': set({ tuple( 'mac', diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index 28cbf7eda5e..7a3b7fac7da 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -3,11 +3,12 @@ from unittest.mock import AsyncMock from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG, MODEL +from .const import MOCK_CONFIG, MOCK_RECONFIGURATION_CONFIG, MODEL from tests.common import MockConfigEntry @@ -117,3 +118,63 @@ async def test_import_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return reconfigure_result + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + + reconfigure_result = await _start_reconfigure_flow(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + MOCK_RECONFIGURATION_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: "192.168.20.70", + CONF_PORT: 9622, + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + mock_russound_client.controllers[1].mac_address = "different_mac" + + reconfigure_result = await _start_reconfigure_flow(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + MOCK_RECONFIGURATION_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py index e7022fa6ac1..d654eea32bd 100644 --- a/tests/components/russound_rio/test_init.py +++ b/tests/components/russound_rio/test_init.py @@ -59,8 +59,8 @@ async def test_disconnect_reconnect_log( mock_russound_client.is_connected = Mock(return_value=False) await mock_state_update(mock_russound_client, CallbackType.CONNECTION) - assert "Disconnected from device at 127.0.0.1" in caplog.text + assert "Disconnected from device at 192.168.20.75" in caplog.text mock_russound_client.is_connected = Mock(return_value=True) await mock_state_update(mock_russound_client, CallbackType.CONNECTION) - assert "Reconnected to device at 127.0.0.1" in caplog.text + assert "Reconnected to device at 192.168.20.75" in caplog.text From 4c91d1b402a5cdd8c9251c0ee49ac4aa983e2bbd Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Wed, 18 Dec 2024 08:48:37 +0100 Subject: [PATCH 0797/1198] Add support for ACB batteries to Enphase Envoy (#131298) * Add support for ACB batteries to Enphase Envoy * Add tests for ACB battery support in ENphase Envoy * make acb state sensordeviceclass ENUM * Capitalize strings and use common idle --- .../components/enphase_envoy/sensor.py | 145 + .../components/enphase_envoy/strings.json | 24 + tests/components/enphase_envoy/conftest.py | 6 + .../fixtures/envoy_acb_batt.json | 274 + .../enphase_envoy/snapshots/test_sensor.ambr | 4854 +++++++++++++++++ tests/components/enphase_envoy/test_sensor.py | 101 + 6 files changed, 5404 insertions(+) create mode 100644 tests/components/enphase_envoy/fixtures/envoy_acb_batt.json diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index fadbf191840..62ae5b621ac 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -10,6 +10,8 @@ from operator import attrgetter from typing import TYPE_CHECKING from pyenphase import ( + EnvoyACBPower, + EnvoyBatteryAggregate, EnvoyEncharge, EnvoyEnchargeAggregate, EnvoyEnchargePower, @@ -723,6 +725,78 @@ ENCHARGE_AGGREGATE_SENSORS = ( ) +@dataclass(frozen=True, kw_only=True) +class EnvoyAcbBatterySensorEntityDescription(SensorEntityDescription): + """Describes an Envoy ACB Battery sensor entity.""" + + value_fn: Callable[[EnvoyACBPower], int | str] + + +ACB_BATTERY_POWER_SENSORS = ( + EnvoyAcbBatterySensorEntityDescription( + key="acb_power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + value_fn=attrgetter("power"), + ), + EnvoyAcbBatterySensorEntityDescription( + key="acb_soc", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + value_fn=attrgetter("state_of_charge"), + ), + EnvoyAcbBatterySensorEntityDescription( + key="acb_battery_state", + translation_key="acb_battery_state", + device_class=SensorDeviceClass.ENUM, + options=["discharging", "idle", "charging", "full"], + value_fn=attrgetter("state"), + ), +) + +ACB_BATTERY_ENERGY_SENSORS = ( + EnvoyAcbBatterySensorEntityDescription( + key="acb_available_energy", + translation_key="acb_available_energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("charge_wh"), + ), +) + + +@dataclass(frozen=True, kw_only=True) +class EnvoyAggregateBatterySensorEntityDescription(SensorEntityDescription): + """Describes an Envoy aggregate Ensemble and ACB Battery sensor entity.""" + + value_fn: Callable[[EnvoyBatteryAggregate], int] + + +AGGREGATE_BATTERY_SENSORS = ( + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_soc", + translation_key="aggregated_soc", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + value_fn=attrgetter("state_of_charge"), + ), + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_available_energy", + translation_key="aggregated_available_energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("available_energy"), + ), + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_max_battery_capacity", + translation_key="aggregated_max_capacity", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("max_available_capacity"), + ), +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: EnphaseConfigEntry, @@ -847,6 +921,20 @@ async def async_setup_entry( EnvoyEnpowerEntity(coordinator, description) for description in ENPOWER_SENSORS ) + if envoy_data.acb_power: + entities.extend( + EnvoyAcbBatteryPowerEntity(coordinator, description) + for description in ACB_BATTERY_POWER_SENSORS + ) + entities.extend( + EnvoyAcbBatteryEnergyEntity(coordinator, description) + for description in ACB_BATTERY_ENERGY_SENSORS + ) + if envoy_data.battery_aggregate: + entities.extend( + AggregateBatteryEntity(coordinator, description) + for description in AGGREGATE_BATTERY_SENSORS + ) async_add_entities(entities) @@ -1228,3 +1316,60 @@ class EnvoyEnpowerEntity(EnvoySensorBaseEntity): enpower = self.data.enpower assert enpower is not None return self.entity_description.value_fn(enpower) + + +class EnvoyAcbBatteryPowerEntity(EnvoySensorBaseEntity): + """Envoy ACB Battery power sensor entity.""" + + entity_description: EnvoyAcbBatterySensorEntityDescription + + def __init__( + self, + coordinator: EnphaseUpdateCoordinator, + description: EnvoyAcbBatterySensorEntityDescription, + ) -> None: + """Initialize ACB Battery entity.""" + super().__init__(coordinator, description) + acb_data = self.data.acb_power + assert acb_data is not None + self._attr_unique_id = f"{self.envoy_serial_num}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{self.envoy_serial_num}_acb")}, + manufacturer="Enphase", + model="ACB", + name=f"ACB {self.envoy_serial_num}", + via_device=(DOMAIN, self.envoy_serial_num), + ) + + @property + def native_value(self) -> int | str | None: + """Return the state of the ACB Battery power sensors.""" + acb = self.data.acb_power + assert acb is not None + return self.entity_description.value_fn(acb) + + +class EnvoyAcbBatteryEnergyEntity(EnvoySystemSensorEntity): + """Envoy combined ACB and Ensemble Battery Aggregate energy sensor entity.""" + + entity_description: EnvoyAcbBatterySensorEntityDescription + + @property + def native_value(self) -> int | str: + """Return the state of the aggregate energy sensors.""" + acb = self.data.acb_power + assert acb is not None + return self.entity_description.value_fn(acb) + + +class AggregateBatteryEntity(EnvoySystemSensorEntity): + """Envoy combined ACB and Ensemble Battery Aggregate sensor entity.""" + + entity_description: EnvoyAggregateBatterySensorEntityDescription + + @property + def native_value(self) -> int: + """Return the state of the aggregate sensors.""" + battery_aggregate = self.data.battery_aggregate + assert battery_aggregate is not None + return self.entity_description.value_fn(battery_aggregate) diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index 2d91b3b0960..a338deb9638 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -337,6 +337,30 @@ }, "configured_reserve_soc": { "name": "Configured reserve battery level" + }, + "acb_battery_state": { + "name": "Battery state", + "state": { + "discharging": "Discharging", + "idle": "[%key:common::state::idle%]", + "charging": "Charging", + "full": "Full" + } + }, + "acb_available_energy": { + "name": "Available ACB battery energy" + }, + "acb_max_capacity": { + "name": "ACB Battery capacity" + }, + "aggregated_available_energy": { + "name": "Aggregated available battery energy" + }, + "aggregated_max_capacity": { + "name": "Aggregated Battery capacity" + }, + "aggregated_soc": { + "name": "Aggregated battery soc" } }, "switch": { diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 541b6f96e19..b860d49aa6b 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -6,6 +6,8 @@ from unittest.mock import AsyncMock, Mock, patch import jwt from pyenphase import ( + EnvoyACBPower, + EnvoyBatteryAggregate, EnvoyData, EnvoyEncharge, EnvoyEnchargeAggregate, @@ -172,6 +174,8 @@ def _load_json_2_production_data( mocked_data.system_production_phases[sub_item] = EnvoySystemProduction( **item_data ) + if item := json_fixture["data"].get("acb_power"): + mocked_data.acb_power = EnvoyACBPower(**item) def _load_json_2_meter_data( @@ -245,6 +249,8 @@ def _load_json_2_encharge_enpower_data( mocked_data.dry_contact_settings[sub_item] = EnvoyDryContactSettings( **item_data ) + if item := json_fixture["data"].get("battery_aggregate"): + mocked_data.battery_aggregate = EnvoyBatteryAggregate(**item) def _load_json_2_raw_data(mocked_data: EnvoyData, json_fixture: dict[str, Any]) -> None: diff --git a/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json b/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json new file mode 100644 index 00000000000..618b40027b8 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json @@ -0,0 +1,274 @@ +{ + "serial_number": "1234", + "firmware": "7.6.358", + "part_number": "800-00654-r08", + "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", + "supported_features": 1759, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": { + "123456": { + "admin_state": 6, + "admin_state_str": "ENCHG_STATE_READY", + "bmu_firmware_version": "2.1.16", + "comm_level_2_4_ghz": 4, + "comm_level_sub_ghz": 4, + "communicating": true, + "dc_switch_off": false, + "encharge_capacity": 3500, + "encharge_revision": 2, + "firmware_loaded_date": 1714736645, + "firmware_version": "2.6.6618_rel/22.11", + "installed_date": 1714736645, + "last_report_date": 1714804173, + "led_status": 17, + "max_cell_temp": 16, + "operating": true, + "part_number": "830-01760-r46", + "percent_full": 54, + "serial_number": "122327081322", + "temperature": 16, + "temperature_unit": "C", + "zigbee_dongle_fw_version": "100F" + } + }, + "encharge_power": { + "123456": { + "apparent_power_mva": 105, + "real_power_mw": 105, + "soc": 54 + } + }, + "encharge_aggregate": { + "available_energy": 1890, + "backup_reserve": 0, + "state_of_charge": 54, + "reserve_state_of_charge": 0, + "configured_reserve_state_of_charge": 0, + "max_available_capacity": 3500 + }, + "enpower": null, + "acb_power": { + "power": 260, + "charge_wh": 930, + "state_of_charge": 25, + "state": "discharging", + "batteries": 3 + }, + "battery_aggregate": { + "available_energy": 2820, + "state_of_charge": 39, + "max_available_capacity": 7220 + }, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, + "system_net_consumption_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1714749724", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 0.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1714749724" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "all_year_long", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 0, + "must_charge_duration": 0, + "must_charge_mode": "CP", + "enable_discharge_to_grid": false, + "periods": [ + { + "id": "period_1", + "start": 0, + "rate": 0.0 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c43325a639d..d6a523a3e15 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -1838,6 +1838,4860 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_acb_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'ACB 1234 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.acb_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'discharging', + 'idle', + 'charging', + 'full', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_battery_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery state', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'acb_battery_state', + 'unique_id': '1234_acb_battery_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'ACB 1234 Battery state', + 'options': list([ + 'discharging', + 'idle', + 'charging', + 'full', + ]), + }), + 'context': , + 'entity_id': 'sensor.acb_1234_battery_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'discharging', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_acb_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'ACB 1234 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.acb_1234_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '260', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_apparent_power_mva', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Encharge 123456 Apparent power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.105', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Encharge 123456 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '123456_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Encharge 123456 Last reported', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-05-04T06:29:33+00:00', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_real_power_mw', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Encharge 123456 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.105', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Encharge 123456 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_available_energy', + 'unique_id': '1234_aggregated_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Aggregated available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2820', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_max_capacity', + 'unique_id': '1234_aggregated_max_battery_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Aggregated Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7220', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_soc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_battery_soc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated battery soc', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_soc', + 'unique_id': '1234_aggregated_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_soc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Aggregated battery soc', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_battery_soc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_acb_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_acb_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available ACB battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'acb_available_energy', + 'unique_id': '1234_acb_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_acb_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Available ACB battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_acb_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '930', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'available_energy', + 'unique_id': '1234_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1890', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Battery', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_capacity', + 'unique_id': '1234_max_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3500', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_energy', + 'unique_id': '1234_reserve_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Reserve battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- # name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 784dfe54073..89f28c74514 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -31,6 +31,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -65,6 +66,7 @@ PRODUCTION_NAMES: tuple[str, ...] = ( "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -154,6 +156,7 @@ CONSUMPTION_NAMES: tuple[str, ...] = ( "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -197,6 +200,7 @@ NET_CONSUMPTION_NAMES: tuple[str, ...] = ( "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -803,6 +807,7 @@ async def test_sensor_inverter_disabled_by_integration( ("mock_envoy"), [ "envoy_metered_batt_relay", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -873,6 +878,7 @@ async def test_sensor_encharge_enpower_data( ("mock_envoy"), [ "envoy_metered_batt_relay", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -930,6 +936,101 @@ async def test_sensor_encharge_power_data( ) +ACB_POWER_INT_NAMES: tuple[str, ...] = ( + "power", + "battery", +) +ACB_POWER_STR_NAMES: tuple[str, ...] = ("battery_state",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_acb_batt", + ], + indirect=["mock_envoy"], +) +async def test_sensor_acb_power_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy acb battery power entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.acb_{sn}" + + data = mock_envoy.data.acb_power + ACB_POWER_INT_TARGETS: tuple[int, ...] = ( + data.power, + data.state_of_charge, + ) + ACB_POWER_STR_TARGETS: tuple[int, ...] = (data.state,) + + for name, target in list( + zip(ACB_POWER_INT_NAMES, ACB_POWER_INT_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + for name, target in list( + zip(ACB_POWER_STR_NAMES, ACB_POWER_STR_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +AGGREGATED_BATTERY_NAMES: tuple[str, ...] = ( + "aggregated_battery_soc", + "aggregated_available_battery_energy", + "aggregated_battery_capacity", +) +AGGREGATED_ACB_BATTERY_NAMES: tuple[str, ...] = ("available_acb_battery_energy",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_acb_batt", + ], + indirect=["mock_envoy"], +) +async def test_sensor_aggegated_battery_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy aggregated batteries entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.battery_aggregate + AGGREGATED_TARGETS: tuple[int, ...] = ( + data.state_of_charge, + data.available_energy, + data.max_available_capacity, + ) + + for name, target in list( + zip(AGGREGATED_BATTERY_NAMES, AGGREGATED_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + data = mock_envoy.data.acb_power + AGGREGATED_ACB_TARGETS: tuple[int, ...] = (data.charge_wh,) + for name, target in list( + zip(AGGREGATED_ACB_BATTERY_NAMES, AGGREGATED_ACB_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + def integration_disabled_entities( entity_registry: er.EntityRegistry, config_entry: MockConfigEntry ) -> list[str]: From a449ca65be6013a2ec70dc7a9560b339e10b0404 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 03:33:17 -0500 Subject: [PATCH 0798/1198] Improve test coverage for Russound RIO (#133096) * Improve test coverage for Russound RIO * Update * Update --- .../russound_rio/quality_scale.yaml | 2 +- tests/components/russound_rio/conftest.py | 49 +++++-- tests/components/russound_rio/const.py | 5 - .../russound_rio/fixtures/get_zones.json | 54 +++++--- .../russound_rio/test_media_player.py | 126 ++++++++++++++++++ 5 files changed, 200 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 63693ee6259..6edf439cae6 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -38,7 +38,7 @@ rules: comment: | This integration does not require authentication. parallel-updates: done - test-coverage: todo + test-coverage: done integration-owner: done docs-installation-parameters: todo docs-configuration-parameters: diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 3321d4160b9..b9e6e89812a 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -47,27 +47,54 @@ def mock_russound_client() -> Generator[AsyncMock]: ), ): client = mock_client.return_value - zones = { - int(k): ZoneControlSurface.from_dict(v) - for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() + controller_zones = { + int(controller_id): { + int(zone_id): ZoneControlSurface.from_dict(zone) + for zone_id, zone in v["zones"].items() + } + for controller_id, v in load_json_object_fixture("get_zones.json", DOMAIN)[ + "controllers" + ].items() } client.sources = { int(k): Source.from_dict(v) for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() } client.state = load_json_object_fixture("get_state.json", DOMAIN) - for k, v in zones.items(): - v.device_str = zone_device_str(1, k) - v.fetch_current_source = Mock( - side_effect=lambda current_source=v.current_source: client.sources.get( - int(current_source) + for controller_id, zones in controller_zones.items(): + for zone_id, zone in zones.items(): + zone.device_str = zone_device_str(controller_id, zone_id) + zone.fetch_current_source = Mock( + side_effect=lambda current_source=zone.current_source: client.sources.get( + int(current_source) + ) ) - ) + zone.volume_up = AsyncMock() + zone.volume_down = AsyncMock() + zone.set_volume = AsyncMock() + zone.zone_on = AsyncMock() + zone.zone_off = AsyncMock() + zone.select_source = AsyncMock() client.controllers = { 1: Controller( - 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones - ) + 1, + MODEL, + client, + controller_device_str(1), + HARDWARE_MAC, + None, + controller_zones[1], + ), + 2: Controller( + 2, + MODEL, + client, + controller_device_str(2), + None, + None, + controller_zones[2], + ), } client.connection_handler = RussoundTcpConnectionHandler( MOCK_CONFIG[CONF_HOST], MOCK_CONFIG[CONF_PORT] diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 18f75838525..8269e825e33 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -1,7 +1,5 @@ """Constants for russound_rio tests.""" -from collections import namedtuple - from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT @@ -19,9 +17,6 @@ MOCK_RECONFIGURATION_CONFIG = { CONF_PORT: 9622, } -_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 -MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} - DEVICE_NAME = "mca_c5" NAME_ZONE_1 = "backyard" ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json index 396310339b3..e1077944593 100644 --- a/tests/components/russound_rio/fixtures/get_zones.json +++ b/tests/components/russound_rio/fixtures/get_zones.json @@ -1,22 +1,38 @@ { - "1": { - "name": "Backyard", - "volume": "10", - "status": "ON", - "enabled": "True", - "current_source": "1" - }, - "2": { - "name": "Kitchen", - "volume": "50", - "status": "OFF", - "enabled": "True", - "current_source": "2" - }, - "3": { - "name": "Bedroom", - "volume": "10", - "status": "OFF", - "enabled": "False" + "controllers": { + "1": { + "zones": { + "1": { + "name": "Backyard", + "volume": "10", + "status": "ON", + "enabled": "True", + "current_source": "1" + }, + "2": { + "name": "Kitchen", + "volume": "50", + "status": "OFF", + "enabled": "True", + "current_source": "2" + }, + "3": { + "name": "Bedroom", + "volume": "10", + "status": "OFF", + "enabled": "False" + } + } + }, + "2": { + "zones": { + "9": { + "name": "Living Room", + "volume": "10", + "status": "OFF", + "enabled": "True" + } + } + } } } diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py index c740ec4f39e..1ff87ee8b0e 100644 --- a/tests/components/russound_rio/test_media_player.py +++ b/tests/components/russound_rio/test_media_player.py @@ -2,10 +2,23 @@ from unittest.mock import AsyncMock +from aiorussound.exceptions import CommandError from aiorussound.models import PlayStatus import pytest +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MP_DOMAIN, + SERVICE_SELECT_SOURCE, +) from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, STATE_BUFFERING, STATE_IDLE, STATE_OFF, @@ -14,6 +27,7 @@ from homeassistant.const import ( STATE_PLAYING, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from . import mock_state_update, setup_integration from .const import ENTITY_ID_ZONE_1 @@ -50,3 +64,115 @@ async def test_entity_state( state = hass.states.get(ENTITY_ID_ZONE_1) assert state.state == media_player_state + + +async def test_media_volume( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test volume service.""" + await setup_integration(hass, mock_config_entry) + + # Test volume up + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].volume_up.assert_called_once() + + # Test volume down + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].volume_down.assert_called_once() + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_MEDIA_VOLUME_LEVEL: 0.30}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].set_volume.assert_called_once_with( + "15" + ) + + +@pytest.mark.parametrize( + ("source_name", "source_id"), + [ + ("Aux", 1), + ("Spotify", 2), + ], +) +async def test_source_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, + source_name: str, + source_id: int, +) -> None: + """Test source service.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_INPUT_SOURCE: source_name}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].select_source.assert_called_once_with( + source_id + ) + + +async def test_invalid_source_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test source service with invalid source ID.""" + await setup_integration(hass, mock_config_entry) + + mock_russound_client.controllers[1].zones[ + 1 + ].select_source.side_effect = CommandError + + with pytest.raises( + HomeAssistantError, + match="Error executing async_select_source on entity media_player.mca_c5_backyard", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_INPUT_SOURCE: "Aux"}, + blocking=True, + ) + + +async def test_power_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test power service.""" + await setup_integration(hass, mock_config_entry) + + data = {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1} + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_ON, data, blocking=True) + + mock_russound_client.controllers[1].zones[1].zone_on.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_OFF, data, blocking=True) + + mock_russound_client.controllers[1].zones[1].zone_off.assert_called_once() From c1cf0e23b2c11d7de581f7d3bf9a59d241db4236 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Wed, 18 Dec 2024 10:10:42 +0100 Subject: [PATCH 0799/1198] Lift SABnzbd to bronze quality scale (#133453) --- homeassistant/components/sabnzbd/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/sabnzbd/manifest.json b/homeassistant/components/sabnzbd/manifest.json index afc35a2340e..f1b8a17134b 100644 --- a/homeassistant/components/sabnzbd/manifest.json +++ b/homeassistant/components/sabnzbd/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/sabnzbd", "iot_class": "local_polling", "loggers": ["pysabnzbd"], + "quality_scale": "bronze", "requirements": ["pysabnzbd==1.1.1"] } From 413a578fdbc7986d005c53690a1aaca82a50acb5 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 10:19:57 +0100 Subject: [PATCH 0800/1198] Bump pyOverkiz to 1.15.3 (#133458) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 8c750aec6bd..9ab901d5005 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.0"], + "requirements": ["pyoverkiz==1.15.3"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 37504e5ec41..b01683cbf76 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 55bb0e6ac1f..5b339e061b1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1755,7 +1755,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 From 5fb5e933e2759d3e71b76deeab850fdd9986806f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 09:20:14 +0000 Subject: [PATCH 0801/1198] Use a common base entity for Idasen Desk (#132496) Co-authored-by: Joost Lekkerkerker --- .../components/idasen_desk/__init__.py | 46 +++++-------------- .../components/idasen_desk/button.py | 35 ++++++-------- .../components/idasen_desk/coordinator.py | 10 ++-- homeassistant/components/idasen_desk/cover.py | 36 ++++----------- .../components/idasen_desk/entity.py | 34 ++++++++++++++ .../components/idasen_desk/quality_scale.yaml | 7 +-- .../components/idasen_desk/sensor.py | 32 +++---------- 7 files changed, 82 insertions(+), 118 deletions(-) create mode 100644 homeassistant/components/idasen_desk/entity.py diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index 56a377ac2df..1aacea91723 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -4,53 +4,31 @@ from __future__ import annotations import logging -from attr import dataclass from bleak.exc import BleakError from idasen_ha.errors import AuthFailedError from homeassistant.components import bluetooth from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_NAME, - CONF_ADDRESS, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from .const import DOMAIN from .coordinator import IdasenDeskCoordinator PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) - -@dataclass -class DeskData: - """Data for the Idasen Desk integration.""" - - address: str - device_info: DeviceInfo - coordinator: IdasenDeskCoordinator +type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) -> bool: """Set up IKEA Idasen from a config entry.""" address: str = entry.data[CONF_ADDRESS].upper() coordinator = IdasenDeskCoordinator(hass, _LOGGER, entry.title, address) - device_info = DeviceInfo( - name=entry.title, - connections={(dr.CONNECTION_BLUETOOTH, address)}, - ) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = DeskData( - address, device_info, coordinator - ) + entry.runtime_data = coordinator try: if not await coordinator.async_connect(): @@ -89,18 +67,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener( + hass: HomeAssistant, entry: IdasenDeskConfigEntry +) -> None: """Handle options update.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - if entry.title != data.device_info[ATTR_NAME]: - await hass.config_entries.async_reload(entry.entry_id) + await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - data: DeskData = hass.data[DOMAIN].pop(entry.entry_id) - await data.coordinator.async_disconnect() - bluetooth.async_rediscover_address(hass, data.address) + coordinator = entry.runtime_data + await coordinator.async_disconnect() + bluetooth.async_rediscover_address(hass, coordinator.address) return unload_ok diff --git a/homeassistant/components/idasen_desk/button.py b/homeassistant/components/idasen_desk/button.py index 0de3125576d..cd7553da1ac 100644 --- a/homeassistant/components/idasen_desk/button.py +++ b/homeassistant/components/idasen_desk/button.py @@ -6,14 +6,12 @@ import logging from typing import Any, Final from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity _LOGGER = logging.getLogger(__name__) @@ -45,43 +43,38 @@ BUTTONS: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set buttons for device.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - IdasenDeskButton(data.address, data.device_info, data.coordinator, button) - for button in BUTTONS - ) + coordinator = entry.runtime_data + async_add_entities(IdasenDeskButton(coordinator, button) for button in BUTTONS) -class IdasenDeskButton(ButtonEntity): +class IdasenDeskButton(IdasenDeskEntity, ButtonEntity): """Defines a IdasenDesk button.""" entity_description: IdasenDeskButtonDescription - _attr_has_entity_name = True def __init__( self, - address: str, - device_info: DeviceInfo, coordinator: IdasenDeskCoordinator, description: IdasenDeskButtonDescription, ) -> None: """Initialize the IdasenDesk button entity.""" + super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}-{address}" - self._attr_device_info = device_info - self._address = address - self._coordinator = coordinator - async def async_press(self) -> None: """Triggers the IdasenDesk button press service.""" _LOGGER.debug( "Trigger %s for %s", self.entity_description.key, - self._address, + self.coordinator.address, ) - await self.entity_description.press_action(self._coordinator)() + await self.entity_description.press_action(self.coordinator)() + + @property + def available(self) -> bool: + """Connect/disconnect buttons should always be available.""" + return True diff --git a/homeassistant/components/idasen_desk/coordinator.py b/homeassistant/components/idasen_desk/coordinator.py index 0661f2dede1..a84027a26c0 100644 --- a/homeassistant/components/idasen_desk/coordinator.py +++ b/homeassistant/components/idasen_desk/coordinator.py @@ -26,20 +26,20 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): """Init IdasenDeskCoordinator.""" super().__init__(hass, logger, name=name) - self._address = address + self.address = address self._expected_connected = False self.desk = Desk(self.async_set_updated_data) async def async_connect(self) -> bool: """Connect to desk.""" - _LOGGER.debug("Trying to connect %s", self._address) + _LOGGER.debug("Trying to connect %s", self.address) self._expected_connected = True ble_device = bluetooth.async_ble_device_from_address( - self.hass, self._address, connectable=True + self.hass, self.address, connectable=True ) if ble_device is None: - _LOGGER.debug("No BLEDevice for %s", self._address) + _LOGGER.debug("No BLEDevice for %s", self.address) return False await self.desk.connect(ble_device) return True @@ -47,7 +47,7 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): async def async_disconnect(self) -> None: """Disconnect from desk.""" self._expected_connected = False - _LOGGER.debug("Disconnecting from %s", self._address) + _LOGGER.debug("Disconnecting from %s", self.address) await self.desk.disconnect() async def async_connect_if_expected(self) -> None: diff --git a/homeassistant/components/idasen_desk/cover.py b/homeassistant/components/idasen_desk/cover.py index eb6bf5523de..95474ea8750 100644 --- a/homeassistant/components/idasen_desk/cover.py +++ b/homeassistant/components/idasen_desk/cover.py @@ -12,30 +12,25 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the cover platform for Idasen Desk.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - [IdasenDeskCover(data.address, data.device_info, data.coordinator)] - ) + coordinator = entry.runtime_data + async_add_entities([IdasenDeskCover(coordinator)]) -class IdasenDeskCover(CoordinatorEntity[IdasenDeskCoordinator], CoverEntity): +class IdasenDeskCover(IdasenDeskEntity, CoverEntity): """Representation of Idasen Desk device.""" _attr_device_class = CoverDeviceClass.DAMPER @@ -45,29 +40,14 @@ class IdasenDeskCover(CoordinatorEntity[IdasenDeskCoordinator], CoverEntity): | CoverEntityFeature.STOP | CoverEntityFeature.SET_POSITION ) - _attr_has_entity_name = True _attr_name = None _attr_translation_key = "desk" - def __init__( - self, - address: str, - device_info: DeviceInfo, - coordinator: IdasenDeskCoordinator, - ) -> None: + def __init__(self, coordinator: IdasenDeskCoordinator) -> None: """Initialize an Idasen Desk cover.""" - super().__init__(coordinator) - self._desk = coordinator.desk - self._attr_unique_id = address - self._attr_device_info = device_info - + super().__init__(coordinator.address, coordinator) self._attr_current_cover_position = self._desk.height_percent - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and self._desk.is_connected is True - @property def is_closed(self) -> bool: """Return if the cover is closed.""" diff --git a/homeassistant/components/idasen_desk/entity.py b/homeassistant/components/idasen_desk/entity.py new file mode 100644 index 00000000000..bda7afd528c --- /dev/null +++ b/homeassistant/components/idasen_desk/entity.py @@ -0,0 +1,34 @@ +"""Base entity for Idasen Desk.""" + +from __future__ import annotations + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import IdasenDeskCoordinator + + +class IdasenDeskEntity(CoordinatorEntity[IdasenDeskCoordinator]): + """IdasenDesk sensor.""" + + _attr_has_entity_name = True + + def __init__( + self, + unique_id: str, + coordinator: IdasenDeskCoordinator, + ) -> None: + """Initialize the IdasenDesk sensor entity.""" + super().__init__(coordinator) + + self._attr_unique_id = unique_id + self._attr_device_info = dr.DeviceInfo( + manufacturer="LINAK", + connections={(dr.CONNECTION_BLUETOOTH, coordinator.address)}, + ) + self._desk = coordinator.desk + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self._desk.is_connected is True diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 1b9ec8cd810..1908178ec15 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -9,10 +9,7 @@ rules: comment: | This integration does not use polling. brands: done - common-modules: - status: todo - comment: | - The cover and sensor entities could move common initialization to a base entity class. + common-modules: done config-flow-test-coverage: status: todo comment: | @@ -33,7 +30,7 @@ rules: entity-event-setup: done entity-unique-id: done has-entity-name: done - runtime-data: todo + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done diff --git a/homeassistant/components/idasen_desk/sensor.py b/homeassistant/components/idasen_desk/sensor.py index 8ed85d21a34..d4f629b85a8 100644 --- a/homeassistant/components/idasen_desk/sensor.py +++ b/homeassistant/components/idasen_desk/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from homeassistant import config_entries from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -15,12 +14,10 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfLength from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity @dataclass(frozen=True, kw_only=True) @@ -46,51 +43,36 @@ SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Idasen Desk sensors.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( - IdasenDeskSensor( - data.address, data.device_info, data.coordinator, sensor_description - ) + IdasenDeskSensor(coordinator, sensor_description) for sensor_description in SENSORS ) -class IdasenDeskSensor(CoordinatorEntity[IdasenDeskCoordinator], SensorEntity): +class IdasenDeskSensor(IdasenDeskEntity, SensorEntity): """IdasenDesk sensor.""" entity_description: IdasenDeskSensorDescription - _attr_has_entity_name = True def __init__( self, - address: str, - device_info: DeviceInfo, coordinator: IdasenDeskCoordinator, description: IdasenDeskSensorDescription, ) -> None: """Initialize the IdasenDesk sensor entity.""" - super().__init__(coordinator) + super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}-{address}" - self._attr_device_info = device_info - self._address = address - self._desk = coordinator.desk - async def async_added_to_hass(self) -> None: """When entity is added to hass.""" await super().async_added_to_hass() self._update_native_value() - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and self._desk.is_connected is True - @callback def _handle_coordinator_update(self, *args: Any) -> None: """Handle data update.""" From 39d781905de5bdce7325092427fc81969b57d4e2 Mon Sep 17 00:00:00 2001 From: Tomer Shemesh Date: Wed, 18 Dec 2024 04:21:37 -0500 Subject: [PATCH 0802/1198] Add ssdp discovery to Onkyo (#131066) --- CODEOWNERS | 4 +- homeassistant/components/onkyo/config_flow.py | 45 ++++++ homeassistant/components/onkyo/manifest.json | 42 ++++- homeassistant/generated/ssdp.py | 38 +++++ tests/components/onkyo/test_config_flow.py | 147 ++++++++++++++++++ 5 files changed, 272 insertions(+), 4 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index f1c6aa4aea5..8effcc49336 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1066,8 +1066,8 @@ build.json @home-assistant/supervisor /tests/components/ondilo_ico/ @JeromeHXP /homeassistant/components/onewire/ @garbled1 @epenet /tests/components/onewire/ @garbled1 @epenet -/homeassistant/components/onkyo/ @arturpragacz -/tests/components/onkyo/ @arturpragacz +/homeassistant/components/onkyo/ @arturpragacz @eclair4151 +/tests/components/onkyo/ @arturpragacz @eclair4151 /homeassistant/components/onvif/ @hunterjm /tests/components/onvif/ @hunterjm /homeassistant/components/open_meteo/ @frenck diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index a8ced6fae64..a484b3aaa04 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -4,7 +4,9 @@ import logging from typing import Any import voluptuous as vol +from yarl import URL +from homeassistant.components import ssdp from homeassistant.config_entries import ( SOURCE_RECONFIGURE, ConfigEntry, @@ -165,6 +167,49 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): ), ) + async def async_step_ssdp( + self, discovery_info: ssdp.SsdpServiceInfo + ) -> ConfigFlowResult: + """Handle flow initialized by SSDP discovery.""" + _LOGGER.debug("Config flow start ssdp: %s", discovery_info) + + if udn := discovery_info.ssdp_udn: + udn_parts = udn.split(":") + if len(udn_parts) == 2: + uuid = udn_parts[1] + last_uuid_section = uuid.split("-")[-1].upper() + await self.async_set_unique_id(last_uuid_section) + self._abort_if_unique_id_configured() + + if discovery_info.ssdp_location is None: + _LOGGER.error("SSDP location is None") + return self.async_abort(reason="unknown") + + host = URL(discovery_info.ssdp_location).host + + if host is None: + _LOGGER.error("SSDP host is None") + return self.async_abort(reason="unknown") + + try: + info = await async_interview(host) + except OSError: + _LOGGER.exception("Unexpected exception interviewing host %s", host) + return self.async_abort(reason="unknown") + + if info is None: + _LOGGER.debug("SSDP eiscp is None: %s", host) + return self.async_abort(reason="cannot_connect") + + await self.async_set_unique_id(info.identifier) + self._abort_if_unique_id_configured(updates={CONF_HOST: info.host}) + + self._receiver_info = info + + title_string = f"{info.model_name} ({info.host})" + self.context["title_placeholders"] = {"name": title_string} + return await self.async_step_configure_receiver() + async def async_step_configure_receiver( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onkyo/manifest.json b/homeassistant/components/onkyo/manifest.json index 0e75404b3eb..6f37fb61b44 100644 --- a/homeassistant/components/onkyo/manifest.json +++ b/homeassistant/components/onkyo/manifest.json @@ -1,11 +1,49 @@ { "domain": "onkyo", "name": "Onkyo", - "codeowners": ["@arturpragacz"], + "codeowners": ["@arturpragacz", "@eclair4151"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/onkyo", "integration_type": "device", "iot_class": "local_push", "loggers": ["pyeiscp"], - "requirements": ["pyeiscp==0.0.7"] + "requirements": ["pyeiscp==0.0.7"], + "ssdp": [ + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + } + ] } diff --git a/homeassistant/generated/ssdp.py b/homeassistant/generated/ssdp.py index 9ed65bab868..89d1aa30cb8 100644 --- a/homeassistant/generated/ssdp.py +++ b/homeassistant/generated/ssdp.py @@ -224,6 +224,44 @@ SSDP = { "manufacturer": "The OctoPrint Project", }, ], + "onkyo": [ + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Pioneer", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "Pioneer", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "Pioneer", + }, + ], "openhome": [ { "st": "urn:av-openhome-org:service:Product:1", diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index 1ee0bfdf9c5..f619127d9b9 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -6,6 +6,7 @@ from unittest.mock import patch import pytest from homeassistant import config_entries +from homeassistant.components import ssdp from homeassistant.components.onkyo import InputSource from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow from homeassistant.components.onkyo.const import ( @@ -83,6 +84,35 @@ async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> assert host_result["errors"]["base"] == "cannot_connect" +async def test_ssdp_discovery_already_configured( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with already configured device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.1.100"}, + unique_id="id1", + ) + config_entry.add_to_hass(hass) + + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_manual_valid_host_unexpected_error( hass: HomeAssistant, empty_mock_discovery ) -> None: @@ -198,6 +228,123 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" +async def test_ssdp_discovery_success( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with valid host.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "configure_receiver" + + select_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TV"]}, + ) + + assert select_result["type"] is FlowResultType.CREATE_ENTRY + assert select_result["data"]["host"] == "192.168.1.100" + assert select_result["result"].unique_id == "id1" + + +async def test_ssdp_discovery_host_info_error(hass: HomeAssistant) -> None: + """Test SSDP discovery with host info error.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + side_effect=OSError, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +async def test_ssdp_discovery_host_none_info( + hass: HomeAssistant, stub_mock_discovery +) -> None: + """Test SSDP discovery with host info error.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_ssdp_discovery_no_location( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with no location.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location=None, + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +async def test_ssdp_discovery_no_host( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with no host.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + async def test_configure_empty_source_list( hass: HomeAssistant, default_mock_discovery ) -> None: From a2be5a383c1bc0811828a92b2d600d0b0138e6b7 Mon Sep 17 00:00:00 2001 From: Ron Weikamp <15732230+ronweikamp@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:41:46 +0100 Subject: [PATCH 0803/1198] Bugfix: also schedule time based integration when source is 0 (#133438) * Bugfix also schedule time based integration when source is 0 * Update tests/components/integration/test_sensor.py Co-authored-by: Diogo Gomes * Improve comment in test. Remove redundant assertion. --------- Co-authored-by: Diogo Gomes --- .../components/integration/sensor.py | 2 +- tests/components/integration/test_sensor.py | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/integration/sensor.py b/homeassistant/components/integration/sensor.py index a053e5cea5c..27aa74d0785 100644 --- a/homeassistant/components/integration/sensor.py +++ b/homeassistant/components/integration/sensor.py @@ -576,7 +576,7 @@ class IntegrationSensor(RestoreSensor): if ( self._max_sub_interval is not None and source_state is not None - and (source_state_dec := _decimal_state(source_state.state)) + and (source_state_dec := _decimal_state(source_state.state)) is not None ): @callback diff --git a/tests/components/integration/test_sensor.py b/tests/components/integration/test_sensor.py index 974c8bb8691..07390cd9571 100644 --- a/tests/components/integration/test_sensor.py +++ b/tests/components/integration/test_sensor.py @@ -843,6 +843,39 @@ async def test_on_valid_source_expect_update_on_time( assert float(state.state) < 1.8 +async def test_on_0_source_expect_0_and_update_when_source_gets_positive( + hass: HomeAssistant, +) -> None: + """Test whether time based integration updates the integral on a valid zero source.""" + start_time = dt_util.utcnow() + + with freeze_time(start_time) as freezer: + await _setup_integral_sensor(hass, max_sub_interval=DEFAULT_MAX_SUB_INTERVAL) + await _update_source_sensor(hass, 0) + await hass.async_block_till_done() + + # wait one minute and one second + freezer.tick(61) + async_fire_time_changed(hass, dt_util.now()) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + assert condition.async_numeric_state(hass, state) is True + assert float(state.state) == 0 # integral is 0 after integration of 0 + + # wait one second and update state + freezer.tick(1) + async_fire_time_changed(hass, dt_util.now()) + await _update_source_sensor(hass, 100) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + # approx 100*1/3600 (right method after 1 second since last integration) + assert 0.027 < float(state.state) < 0.029 + + async def test_on_unvailable_source_expect_no_update_on_time( hass: HomeAssistant, ) -> None: From 8b8c4099162b9cc5b5af984a89d89077e2099bc2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:44:19 +0100 Subject: [PATCH 0804/1198] Fix test-before-setup IQS check (#133467) --- .../hassfest/quality_scale_validation/test_before_setup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py index 5f21a9d2458..1ac0d3d8e0b 100644 --- a/script/hassfest/quality_scale_validation/test_before_setup.py +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -17,13 +17,20 @@ _VALID_EXCEPTIONS = { def _get_exception_name(expression: ast.expr) -> str: """Get the name of the exception being raised.""" + if expression is None: + # Bare raise + return None + if isinstance(expression, ast.Name): + # Raise Exception return expression.id if isinstance(expression, ast.Call): + # Raise Exception() return _get_exception_name(expression.func) if isinstance(expression, ast.Attribute): + # Raise namespace.??? return _get_exception_name(expression.value) raise AssertionError( From a6520d2627f61df44b2aa15b1a72b2fa31c850dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Wed, 18 Dec 2024 10:52:45 +0100 Subject: [PATCH 0805/1198] Handle Home Connect error at diagnostics (#131644) --- .../components/home_connect/diagnostics.py | 9 +++- .../snapshots/test_diagnostics.ambr | 14 ++++++ .../home_connect/test_diagnostics.py | 48 +++++++++++++++++-- 3 files changed, 66 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/home_connect/diagnostics.py b/homeassistant/components/home_connect/diagnostics.py index d2505853d23..e095bc503ab 100644 --- a/homeassistant/components/home_connect/diagnostics.py +++ b/homeassistant/components/home_connect/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeconnect.api import HomeConnectAppliance +from homeconnect.api import HomeConnectAppliance, HomeConnectError from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -14,9 +14,14 @@ from .api import HomeConnectDevice def _generate_appliance_diagnostics(appliance: HomeConnectAppliance) -> dict[str, Any]: + try: + programs = appliance.get_programs_available() + except HomeConnectError: + programs = None return { + "connected": appliance.connected, "status": appliance.status, - "programs": appliance.get_programs_available(), + "programs": programs, } diff --git a/tests/components/home_connect/snapshots/test_diagnostics.ambr b/tests/components/home_connect/snapshots/test_diagnostics.ambr index 99f10fe2847..f3131eac52f 100644 --- a/tests/components/home_connect/snapshots/test_diagnostics.ambr +++ b/tests/components/home_connect/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_async_get_config_entry_diagnostics dict({ 'BOSCH-000000000-000000000000': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -23,6 +24,7 @@ }), }), 'BOSCH-HCS000000-D00000000001': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.WasherDryer.Program.Mix', 'LaundryCare.Washer.Option.Temperature', @@ -46,6 +48,7 @@ }), }), 'BOSCH-HCS000000-D00000000002': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -67,6 +70,7 @@ }), }), 'BOSCH-HCS000000-D00000000003': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -88,6 +92,7 @@ }), }), 'BOSCH-HCS000000-D00000000004': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -144,6 +149,7 @@ }), }), 'BOSCH-HCS000000-D00000000005': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -165,6 +171,7 @@ }), }), 'BOSCH-HCS000000-D00000000006': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -186,6 +193,7 @@ }), }), 'BOSCH-HCS01OVN1-43E0065FE245': dict({ + 'connected': True, 'programs': list([ 'Cooking.Oven.Program.HeatingMode.HotAir', 'Cooking.Oven.Program.HeatingMode.TopBottomHeating', @@ -217,6 +225,7 @@ }), }), 'BOSCH-HCS04DYR1-831694AE3C5A': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.Dryer.Program.Cotton', 'LaundryCare.Dryer.Program.Synthetic', @@ -241,6 +250,7 @@ }), }), 'BOSCH-HCS06COM1-D70390681C2C': dict({ + 'connected': True, 'programs': list([ 'ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso', 'ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato', @@ -268,6 +278,7 @@ }), }), 'SIEMENS-HCS02DWH1-6BE58C26DCC1': dict({ + 'connected': True, 'programs': list([ 'Dishcare.Dishwasher.Program.Auto1', 'Dishcare.Dishwasher.Program.Auto2', @@ -319,6 +330,7 @@ }), }), 'SIEMENS-HCS03WCH1-7BC6383CF794': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.Washer.Program.Cotton', 'LaundryCare.Washer.Program.EasyCare', @@ -356,6 +368,7 @@ }), }), 'SIEMENS-HCS05FRF1-304F4F9E541D': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -415,6 +428,7 @@ # --- # name: test_async_get_device_diagnostics dict({ + 'connected': True, 'programs': list([ 'Dishcare.Dishwasher.Program.Auto1', 'Dishcare.Dishwasher.Program.Auto2', diff --git a/tests/components/home_connect/test_diagnostics.py b/tests/components/home_connect/test_diagnostics.py index d0bc5e77735..f2db6e2b67a 100644 --- a/tests/components/home_connect/test_diagnostics.py +++ b/tests/components/home_connect/test_diagnostics.py @@ -1,8 +1,9 @@ """Test diagnostics for Home Connect.""" from collections.abc import Awaitable, Callable -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock +from homeconnect.api import HomeConnectError import pytest from syrupy import SnapshotAssertion @@ -63,14 +64,13 @@ async def test_async_get_device_diagnostics( @pytest.mark.usefixtures("bypass_throttle") -async def test_async_device_diagnostics_exceptions( +async def test_async_device_diagnostics_not_found( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, get_appliances: MagicMock, device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, ) -> None: """Test device config entry diagnostics.""" get_appliances.side_effect = get_all_appliances @@ -85,3 +85,45 @@ async def test_async_device_diagnostics_exceptions( with pytest.raises(ValueError): await async_get_device_diagnostics(hass, config_entry, device) + + +@pytest.mark.parametrize( + ("api_error", "expected_connection_status"), + [ + (HomeConnectError(), "unknown"), + ( + HomeConnectError( + { + "key": "SDK.Error.HomeAppliance.Connection.Initialization.Failed", + } + ), + "offline", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_device_diagnostics_api_error( + api_error: HomeConnectError, + expected_connection_status: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + appliance: Mock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device config entry diagnostics.""" + appliance.get_programs_available.side_effect = api_error + get_appliances.return_value = [appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.haId)}, + ) + + diagnostics = await async_get_device_diagnostics(hass, config_entry, device) + assert diagnostics["programs"] is None From 90208d2eb1da153fd3ada4de5465bce4a70ef9d1 Mon Sep 17 00:00:00 2001 From: dotvav Date: Wed, 18 Dec 2024 10:58:25 +0100 Subject: [PATCH 0806/1198] Bump pypalazzetti to 0.1.15 (#133433) --- homeassistant/components/palazzetti/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json index 05a5d260b50..70e58507159 100644 --- a/homeassistant/components/palazzetti/manifest.json +++ b/homeassistant/components/palazzetti/manifest.json @@ -15,5 +15,5 @@ "documentation": "https://www.home-assistant.io/integrations/palazzetti", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pypalazzetti==0.1.14"] + "requirements": ["pypalazzetti==0.1.15"] } diff --git a/requirements_all.txt b/requirements_all.txt index b01683cbf76..a6316379d8f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2168,7 +2168,7 @@ pyoverkiz==1.15.3 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.14 +pypalazzetti==0.1.15 # homeassistant.components.elv pypca==0.0.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b339e061b1..33e7327568e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1761,7 +1761,7 @@ pyoverkiz==1.15.3 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.14 +pypalazzetti==0.1.15 # homeassistant.components.lcn pypck==0.7.24 From 869a0d7abc6230e1b8d0609681b459947a2c3bed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Wed, 18 Dec 2024 11:01:38 +0100 Subject: [PATCH 0807/1198] Add name to cloud connection info response (#133468) --- homeassistant/components/cloud/client.py | 1 + tests/components/cloud/test_client.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index ee46fa42125..ea3d992e8f7 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -306,6 +306,7 @@ class CloudClient(Interface): }, "version": HA_VERSION, "instance_id": self.prefs.instance_id, + "name": self._hass.config.location_name, } async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 43eccc5ef9c..52457fe558c 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -441,6 +441,7 @@ async def test_cloud_connection_info(hass: HomeAssistant) -> None: assert response == { "instance_id": "12345678901234567890", + "name": "test home", "remote": { "alias": None, "can_enable": True, From fa0e54e658975b3559656828b6e2464414000f1a Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:05:52 +0100 Subject: [PATCH 0808/1198] Don't raise Overkiz user flow unique_id check (#133471) --- homeassistant/components/overkiz/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index af7e277d928..9a94c30d95d 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -76,7 +76,7 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): for gateway in gateways: if is_overkiz_gateway(gateway.id): gateway_id = gateway.id - await self.async_set_unique_id(gateway_id) + await self.async_set_unique_id(gateway_id, raise_on_progress=False) return user_input From 05b0c56191aeb31b84361142790a6c1abbef9176 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:22:22 +0100 Subject: [PATCH 0809/1198] Use enum instead of string for button entities key in Overkiz (#133472) --- homeassistant/components/overkiz/button.py | 24 ++++++++++++++-------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index 5a1116aeeb5..fcaa0b743dc 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -28,41 +28,47 @@ class OverkizButtonDescription(ButtonEntityDescription): BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ # My Position (cover, light) OverkizButtonDescription( - key="my", + key=OverkizCommand.MY, name="My position", icon="mdi:star", ), # Identify OverkizButtonDescription( - key="identify", # startIdentify and identify are reversed... Swap this when fixed in API. + key=OverkizCommand.IDENTIFY, # startIdentify and identify are reversed... Swap this when fixed in API. name="Start identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), OverkizButtonDescription( - key="stopIdentify", + key=OverkizCommand.STOP_IDENTIFY, name="Stop identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), OverkizButtonDescription( - key="startIdentify", # startIdentify and identify are reversed... Swap this when fixed in API. + key=OverkizCommand.START_IDENTIFY, # startIdentify and identify are reversed... Swap this when fixed in API. name="Identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, ), # RTDIndoorSiren / RTDOutdoorSiren - OverkizButtonDescription(key="dingDong", name="Ding dong", icon="mdi:bell-ring"), - OverkizButtonDescription(key="bip", name="Bip", icon="mdi:bell-ring"), OverkizButtonDescription( - key="fastBipSequence", name="Fast bip sequence", icon="mdi:bell-ring" + key=OverkizCommand.DING_DONG, name="Ding dong", icon="mdi:bell-ring" + ), + OverkizButtonDescription(key=OverkizCommand.BIP, name="Bip", icon="mdi:bell-ring"), + OverkizButtonDescription( + key=OverkizCommand.FAST_BIP_SEQUENCE, + name="Fast bip sequence", + icon="mdi:bell-ring", + ), + OverkizButtonDescription( + key=OverkizCommand.RING, name="Ring", icon="mdi:bell-ring" ), - OverkizButtonDescription(key="ring", name="Ring", icon="mdi:bell-ring"), # DynamicScreen (ogp:blind) uses goToAlias (id 1: favorite1) instead of 'my' OverkizButtonDescription( - key="goToAlias", + key=OverkizCommand.GO_TO_ALIAS, press_args="1", name="My position", icon="mdi:star", From 7730f423b38c621c2d2c1665328b0a9907fa9504 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:22:32 +0100 Subject: [PATCH 0810/1198] Add identify device class in Overkiz (#133474) --- homeassistant/components/overkiz/button.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index fcaa0b743dc..a39c236725a 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -7,7 +7,11 @@ from dataclasses import dataclass from pyoverkiz.enums import OverkizCommand from pyoverkiz.types import StateType as OverkizStateType -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant @@ -52,6 +56,7 @@ BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ name="Identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, + device_class=ButtonDeviceClass.IDENTIFY, ), # RTDIndoorSiren / RTDOutdoorSiren OverkizButtonDescription( From 992afc4cd37fcdf155602c212ea9349085ffa562 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 11:27:07 +0100 Subject: [PATCH 0811/1198] Set the with_strategy_settings to None for unknown backups (#133466) --- homeassistant/components/backup/manager.py | 6 +-- .../backup/snapshots/test_backup.ambr | 2 +- .../backup/snapshots/test_websocket.ambr | 42 +++++++++---------- tests/components/cloud/test_backup.py | 4 +- tests/components/hassio/test_backup.py | 4 +- tests/components/kitchen_sink/test_backup.py | 2 +- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index d6abc299317..a9bce8cb03d 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -60,7 +60,7 @@ class ManagerBackup(AgentBackup): agent_ids: list[str] failed_agent_ids: list[str] - with_strategy_settings: bool + with_strategy_settings: bool | None @dataclass(frozen=True, kw_only=True, slots=True) @@ -448,7 +448,7 @@ class BackupManager: with_strategy_settings = known_backup.with_strategy_settings else: failed_agent_ids = [] - with_strategy_settings = False + with_strategy_settings = None backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, @@ -497,7 +497,7 @@ class BackupManager: with_strategy_settings = known_backup.with_strategy_settings else: failed_agent_ids = [] - with_strategy_settings = False + with_strategy_settings = None backup = ManagerBackup( agent_ids=[], addons=result.addons, diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index b350ff680ee..9ef865955fe 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,7 +78,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 8bd4e2817b2..f43a7ed7a2c 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1539,7 +1539,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1607,7 +1607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1660,7 +1660,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1697,7 +1697,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1745,7 +1745,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1788,7 +1788,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1841,7 +1841,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1895,7 +1895,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2003,7 +2003,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2056,7 +2056,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2109,7 +2109,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2607,7 +2607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2649,7 +2649,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2692,7 +2692,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2729,7 +2729,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), dict({ 'addons': list([ @@ -2756,7 +2756,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2799,7 +2799,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 5e607bbc70b..bd8e80e0666 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -171,7 +171,7 @@ async def test_agents_list_backups( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": False, + "with_strategy_settings": None, } ] @@ -218,7 +218,7 @@ async def test_agents_list_backups_fail_cloud( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ( diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 75cc049f7b5..45aa28c19d6 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -341,7 +341,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ( @@ -362,7 +362,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ], diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 6a738094ae6..81876b5c3d1 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -104,7 +104,7 @@ async def test_agents_list_backups( "name": "Kitchen sink syncer", "protected": False, "size": 1234, - "with_strategy_settings": False, + "with_strategy_settings": None, } ] From fc4100833e3fc3db42574e60dd35920672bce052 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:43:04 +0100 Subject: [PATCH 0812/1198] Change device class from Volume to Volume Storage in Overkiz (#133473) Change device class from Volume to Volume Storage --- homeassistant/components/overkiz/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 5c54a1bd383..184b4938fef 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -423,7 +423,7 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ OverkizSensorDescription( key=OverkizState.CORE_REMAINING_HOT_WATER, name="Warm water remaining", - device_class=SensorDeviceClass.VOLUME, + device_class=SensorDeviceClass.VOLUME_STORAGE, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfVolume.LITERS, ), From 3bb62565729310a258f52f9fc8977ee919088dd4 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:48:10 +0100 Subject: [PATCH 0813/1198] Add test button for SmokeSensor in Overkiz (#133476) --- homeassistant/components/overkiz/button.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index a39c236725a..c34be5cde84 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from pyoverkiz.enums import OverkizCommand +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam from pyoverkiz.types import StateType as OverkizStateType from homeassistant.components.button import ( @@ -83,6 +83,14 @@ BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ name="Toggle", icon="mdi:sync", ), + # SmokeSensor + OverkizButtonDescription( + key=OverkizCommand.CHECK_EVENT_TRIGGER, + press_args=OverkizCommandParam.SHORT, + name="Test", + icon="mdi:smoke-detector", + entity_category=EntityCategory.DIAGNOSTIC, + ), ] SUPPORTED_COMMANDS = { From be25cb7aa7fd6048b3a60fe724eeabfc7507e9fb Mon Sep 17 00:00:00 2001 From: greyeee <62752780+greyeee@users.noreply.github.com> Date: Wed, 18 Dec 2024 20:19:45 +0800 Subject: [PATCH 0814/1198] Add support for SwitchBot Relay Switch 1 and Relay Switch 1PM (#132327) --- .../components/switchbot_cloud/__init__.py | 9 +- .../components/switchbot_cloud/sensor.py | 122 +++++++++++++----- .../components/switchbot_cloud/switch.py | 17 +++ .../components/switchbot_cloud/test_switch.py | 56 ++++++++ 4 files changed, 169 insertions(+), 35 deletions(-) create mode 100644 tests/components/switchbot_cloud/test_switch.py diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index 625b4698301..827dce550ef 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -75,9 +75,11 @@ def make_device_data( ) if ( isinstance(device, Device) - and device.device_type.startswith("Plug") - or isinstance(device, Remote) - ): + and ( + device.device_type.startswith("Plug") + or device.device_type in ["Relay Switch 1PM", "Relay Switch 1"] + ) + ) or isinstance(device, Remote): devices_data.switches.append( prepare_device(hass, api, device, coordinators_by_id) ) @@ -88,6 +90,7 @@ def make_device_data( "Hub 2", "MeterPro", "MeterPro(CO2)", + "Relay Switch 1PM", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index 90135ad96b3..ae912e914ba 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -12,6 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfPower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -26,38 +29,97 @@ SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" SENSOR_TYPE_CO2 = "CO2" +SENSOR_TYPE_POWER = "power" +SENSOR_TYPE_VOLTAGE = "voltage" +SENSOR_TYPE_CURRENT = "electricCurrent" -METER_PLUS_SENSOR_DESCRIPTIONS = ( - SensorEntityDescription( - key=SENSOR_TYPE_TEMPERATURE, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - SensorEntityDescription( - key=SENSOR_TYPE_HUMIDITY, - device_class=SensorDeviceClass.HUMIDITY, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), - SensorEntityDescription( - key=SENSOR_TYPE_BATTERY, - device_class=SensorDeviceClass.BATTERY, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), +TEMPERATURE_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_TEMPERATURE, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, ) -METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( - *METER_PLUS_SENSOR_DESCRIPTIONS, - SensorEntityDescription( - key=SENSOR_TYPE_CO2, - native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.CO2, - ), +HUMIDITY_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, ) +BATTERY_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_BATTERY, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, +) + +POWER_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_POWER, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, +) + +VOLATGE_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_VOLTAGE, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, +) + +CURRENT_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_CURRENT, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, +) + +CO2_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_CO2, + device_class=SensorDeviceClass.CO2, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, +) + +SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = { + "Meter": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "MeterPlus": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "WoIOSensor": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "Relay Switch 1PM": ( + POWER_DESCRIPTION, + VOLATGE_DESCRIPTION, + CURRENT_DESCRIPTION, + ), + "Hub 2": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + ), + "MeterPro": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "MeterPro(CO2)": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + CO2_DESCRIPTION, + ), +} + async def async_setup_entry( hass: HomeAssistant, @@ -70,11 +132,7 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in ( - METER_PRO_CO2_SENSOR_DESCRIPTIONS - if device.device_type == "MeterPro(CO2)" - else METER_PLUS_SENSOR_DESCRIPTIONS - ) + for description in SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES[device.device_type] ) diff --git a/homeassistant/components/switchbot_cloud/switch.py b/homeassistant/components/switchbot_cloud/switch.py index c30e60086fa..281ebb9322e 100644 --- a/homeassistant/components/switchbot_cloud/switch.py +++ b/homeassistant/components/switchbot_cloud/switch.py @@ -69,6 +69,18 @@ class SwitchBotCloudPlugSwitch(SwitchBotCloudSwitch): _attr_device_class = SwitchDeviceClass.OUTLET +class SwitchBotCloudRelaySwitchSwitch(SwitchBotCloudSwitch): + """Representation of a SwitchBot relay switch.""" + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if not self.coordinator.data: + return + self._attr_is_on = self.coordinator.data.get("switchStatus") == 1 + self.async_write_ha_state() + + @callback def _async_make_entity( api: SwitchBotAPI, device: Device | Remote, coordinator: SwitchBotCoordinator @@ -78,4 +90,9 @@ def _async_make_entity( return SwitchBotCloudRemoteSwitch(api, device, coordinator) if "Plug" in device.device_type: return SwitchBotCloudPlugSwitch(api, device, coordinator) + if device.device_type in [ + "Relay Switch 1PM", + "Relay Switch 1", + ]: + return SwitchBotCloudRelaySwitchSwitch(api, device, coordinator) raise NotImplementedError(f"Unsupported device type: {device.device_type}") diff --git a/tests/components/switchbot_cloud/test_switch.py b/tests/components/switchbot_cloud/test_switch.py new file mode 100644 index 00000000000..d4ef2c84549 --- /dev/null +++ b/tests/components/switchbot_cloud/test_switch.py @@ -0,0 +1,56 @@ +"""Test for the switchbot_cloud relay switch.""" + +from unittest.mock import patch + +from switchbot_api import Device + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switchbot_cloud import SwitchBotAPI +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from . import configure_integration + + +async def test_relay_switch( + hass: HomeAssistant, mock_list_devices, mock_get_status +) -> None: + """Test turn on and turn off.""" + mock_list_devices.return_value = [ + Device( + deviceId="relay-switch-id-1", + deviceName="relay-switch-1", + deviceType="Relay Switch 1", + hubDeviceId="test-hub-id", + ), + ] + + mock_get_status.return_value = {"switchStatus": 0} + + entry = configure_integration(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + entity_id = "switch.relay_switch_1" + assert hass.states.get(entity_id).state == STATE_OFF + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_ON + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_OFF From 2aba1d399b100cf310c638ef776e0390a806f913 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 18 Dec 2024 13:47:30 +0100 Subject: [PATCH 0815/1198] Rename test file to singular form (#133482) --- tests/components/atag/{test_sensors.py => test_sensor.py} | 0 tests/components/broadlink/{test_sensors.py => test_sensor.py} | 0 tests/components/cert_expiry/{test_sensors.py => test_sensor.py} | 0 tests/components/idasen_desk/{test_buttons.py => test_button.py} | 0 tests/components/idasen_desk/{test_sensors.py => test_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../madvr/snapshots/{test_sensors.ambr => test_sensor.ambr} | 0 .../madvr/{test_binary_sensors.py => test_binary_sensor.py} | 0 tests/components/madvr/{test_sensors.py => test_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../tesla_fleet/{test_binary_sensors.py => test_binary_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../teslemetry/{test_binary_sensors.py => test_binary_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../tessie/{test_binary_sensors.py => test_binary_sensor.py} | 0 15 files changed, 0 insertions(+), 0 deletions(-) rename tests/components/atag/{test_sensors.py => test_sensor.py} (100%) rename tests/components/broadlink/{test_sensors.py => test_sensor.py} (100%) rename tests/components/cert_expiry/{test_sensors.py => test_sensor.py} (100%) rename tests/components/idasen_desk/{test_buttons.py => test_button.py} (100%) rename tests/components/idasen_desk/{test_sensors.py => test_sensor.py} (100%) rename tests/components/madvr/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/madvr/snapshots/{test_sensors.ambr => test_sensor.ambr} (100%) rename tests/components/madvr/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/madvr/{test_sensors.py => test_sensor.py} (100%) rename tests/components/tesla_fleet/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/tesla_fleet/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/teslemetry/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/teslemetry/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/tessie/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/tessie/{test_binary_sensors.py => test_binary_sensor.py} (100%) diff --git a/tests/components/atag/test_sensors.py b/tests/components/atag/test_sensor.py similarity index 100% rename from tests/components/atag/test_sensors.py rename to tests/components/atag/test_sensor.py diff --git a/tests/components/broadlink/test_sensors.py b/tests/components/broadlink/test_sensor.py similarity index 100% rename from tests/components/broadlink/test_sensors.py rename to tests/components/broadlink/test_sensor.py diff --git a/tests/components/cert_expiry/test_sensors.py b/tests/components/cert_expiry/test_sensor.py similarity index 100% rename from tests/components/cert_expiry/test_sensors.py rename to tests/components/cert_expiry/test_sensor.py diff --git a/tests/components/idasen_desk/test_buttons.py b/tests/components/idasen_desk/test_button.py similarity index 100% rename from tests/components/idasen_desk/test_buttons.py rename to tests/components/idasen_desk/test_button.py diff --git a/tests/components/idasen_desk/test_sensors.py b/tests/components/idasen_desk/test_sensor.py similarity index 100% rename from tests/components/idasen_desk/test_sensors.py rename to tests/components/idasen_desk/test_sensor.py diff --git a/tests/components/madvr/snapshots/test_binary_sensors.ambr b/tests/components/madvr/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/madvr/snapshots/test_binary_sensors.ambr rename to tests/components/madvr/snapshots/test_binary_sensor.ambr diff --git a/tests/components/madvr/snapshots/test_sensors.ambr b/tests/components/madvr/snapshots/test_sensor.ambr similarity index 100% rename from tests/components/madvr/snapshots/test_sensors.ambr rename to tests/components/madvr/snapshots/test_sensor.ambr diff --git a/tests/components/madvr/test_binary_sensors.py b/tests/components/madvr/test_binary_sensor.py similarity index 100% rename from tests/components/madvr/test_binary_sensors.py rename to tests/components/madvr/test_binary_sensor.py diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensor.py similarity index 100% rename from tests/components/madvr/test_sensors.py rename to tests/components/madvr/test_sensor.py diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr rename to tests/components/tesla_fleet/snapshots/test_binary_sensor.ambr diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensor.py similarity index 100% rename from tests/components/tesla_fleet/test_binary_sensors.py rename to tests/components/tesla_fleet/test_binary_sensor.py diff --git a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/teslemetry/snapshots/test_binary_sensors.ambr rename to tests/components/teslemetry/snapshots/test_binary_sensor.ambr diff --git a/tests/components/teslemetry/test_binary_sensors.py b/tests/components/teslemetry/test_binary_sensor.py similarity index 100% rename from tests/components/teslemetry/test_binary_sensors.py rename to tests/components/teslemetry/test_binary_sensor.py diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/tessie/snapshots/test_binary_sensors.ambr rename to tests/components/tessie/snapshots/test_binary_sensor.ambr diff --git a/tests/components/tessie/test_binary_sensors.py b/tests/components/tessie/test_binary_sensor.py similarity index 100% rename from tests/components/tessie/test_binary_sensors.py rename to tests/components/tessie/test_binary_sensor.py From ecb3bf79f32a2e25d141ff467e5958826ed9fc3a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 13:51:05 +0100 Subject: [PATCH 0816/1198] Revert "Add support for subentries to config entries" (#133470) Revert "Add support for subentries to config entries (#117355)" This reverts commit ad15786115673c5b3fe40ea2f5d61b4b896f433e. --- .../components/config/config_entries.py | 126 ---- homeassistant/config_entries.py | 315 +-------- homeassistant/helpers/data_entry_flow.py | 4 +- script/hassfest/translations.py | 9 - tests/common.py | 2 - .../aemet/snapshots/test_diagnostics.ambr | 2 - .../airly/snapshots/test_diagnostics.ambr | 2 - .../airnow/snapshots/test_diagnostics.ambr | 2 - .../airvisual/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../airzone/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../axis/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../blink/snapshots/test_diagnostics.ambr | 2 - .../braviatv/snapshots/test_diagnostics.ambr | 2 - .../co2signal/snapshots/test_diagnostics.ambr | 2 - .../coinbase/snapshots/test_diagnostics.ambr | 2 - .../comelit/snapshots/test_diagnostics.ambr | 4 - .../components/config/test_config_entries.py | 469 ------------- .../deconz/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../ecovacs/snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_config_flow.ambr | 4 - .../snapshots/test_diagnostics.ambr | 6 - .../esphome/snapshots/test_diagnostics.ambr | 2 - tests/components/esphome/test_diagnostics.py | 1 - .../forecast_solar/snapshots/test_init.ambr | 2 - .../fritz/snapshots/test_diagnostics.ambr | 2 - .../fronius/snapshots/test_diagnostics.ambr | 2 - .../fyta/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_config_flow.ambr | 8 - .../gios/snapshots/test_diagnostics.ambr | 2 - .../goodwe/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - tests/components/guardian/test_diagnostics.py | 1 - .../snapshots/test_config_flow.ambr | 16 - .../snapshots/test_diagnostics.ambr | 2 - .../imgw_pib/snapshots/test_diagnostics.ambr | 2 - .../iqvia/snapshots/test_diagnostics.ambr | 2 - .../kostal_plenticore/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../madvr/snapshots/test_diagnostics.ambr | 2 - .../melcloud/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../netatmo/snapshots/test_diagnostics.ambr | 2 - .../nextdns/snapshots/test_diagnostics.ambr | 2 - .../nice_go/snapshots/test_diagnostics.ambr | 2 - tests/components/notion/test_diagnostics.py | 1 - .../onvif/snapshots/test_diagnostics.ambr | 2 - tests/components/openuv/test_diagnostics.py | 1 - .../p1_monitor/snapshots/test_init.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../components/philips_js/test_config_flow.py | 1 - .../pi_hole/snapshots/test_diagnostics.ambr | 2 - .../proximity/snapshots/test_diagnostics.ambr | 2 - tests/components/ps4/test_init.py | 1 - .../components/purpleair/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_diagnostics.ambr | 4 - .../recollect_waste/test_diagnostics.py | 1 - .../ridwell/snapshots/test_diagnostics.ambr | 2 - .../components/samsungtv/test_diagnostics.py | 3 - .../snapshots/test_diagnostics.ambr | 2 - .../components/simplisafe/test_diagnostics.py | 1 - .../solarlog/snapshots/test_diagnostics.ambr | 2 - tests/components/subaru/test_config_flow.py | 2 - .../switcher_kis/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../tractive/snapshots/test_diagnostics.ambr | 2 - .../tuya/snapshots/test_config_flow.ambr | 8 - .../twinkly/snapshots/test_diagnostics.ambr | 2 - .../unifi/snapshots/test_diagnostics.ambr | 2 - .../uptime/snapshots/test_config_flow.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../v2c/snapshots/test_diagnostics.ambr | 2 - .../vicare/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../watttime/snapshots/test_diagnostics.ambr | 2 - .../webmin/snapshots/test_diagnostics.ambr | 2 - tests/components/webostv/test_diagnostics.py | 1 - .../whirlpool/snapshots/test_diagnostics.ambr | 2 - .../whois/snapshots/test_config_flow.ambr | 20 - .../workday/snapshots/test_diagnostics.ambr | 2 - .../wyoming/snapshots/test_config_flow.ambr | 12 - .../zha/snapshots/test_diagnostics.ambr | 2 - tests/snapshots/test_config_entries.ambr | 2 - tests/test_config_entries.py | 643 +----------------- 95 files changed, 33 insertions(+), 1774 deletions(-) diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index 5794819995d..da50f7e93a1 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -46,13 +46,6 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options)) hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options)) - hass.http.register_view( - SubentryManagerFlowIndexView(hass.config_entries.subentries) - ) - hass.http.register_view( - SubentryManagerFlowResourceView(hass.config_entries.subentries) - ) - websocket_api.async_register_command(hass, config_entries_get) websocket_api.async_register_command(hass, config_entry_disable) websocket_api.async_register_command(hass, config_entry_get_single) @@ -61,9 +54,6 @@ def async_setup(hass: HomeAssistant) -> bool: websocket_api.async_register_command(hass, config_entries_progress) websocket_api.async_register_command(hass, ignore_config_flow) - websocket_api.async_register_command(hass, config_subentry_delete) - websocket_api.async_register_command(hass, config_subentry_list) - return True @@ -295,63 +285,6 @@ class OptionManagerFlowResourceView( return await super().post(request, flow_id) -class SubentryManagerFlowIndexView( - FlowManagerIndexView[config_entries.ConfigSubentryFlowManager] -): - """View to create subentry flows.""" - - url = "/api/config/config_entries/subentries/flow" - name = "api:config:config_entries:subentries:flow" - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - @RequestDataValidator( - vol.Schema( - { - vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)), - vol.Optional("show_advanced_options", default=False): cv.boolean, - }, - extra=vol.ALLOW_EXTRA, - ) - ) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: - """Handle a POST request. - - handler in request is [entry_id, subentry_type]. - """ - return await super()._post_impl(request, data) - - def get_context(self, data: dict[str, Any]) -> dict[str, Any]: - """Return context.""" - context = super().get_context(data) - context["source"] = config_entries.SOURCE_USER - return context - - -class SubentryManagerFlowResourceView( - FlowManagerResourceView[config_entries.ConfigSubentryFlowManager] -): - """View to interact with the subentry flow manager.""" - - url = "/api/config/config_entries/subentries/flow/{flow_id}" - name = "api:config:config_entries:subentries:flow:resource" - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - async def get(self, request: web.Request, /, flow_id: str) -> web.Response: - """Get the current state of a data_entry_flow.""" - return await super().get(request, flow_id) - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - async def post(self, request: web.Request, flow_id: str) -> web.Response: - """Handle a POST request.""" - return await super().post(request, flow_id) - - @websocket_api.require_admin @websocket_api.websocket_command({"type": "config_entries/flow/progress"}) def config_entries_progress( @@ -655,62 +588,3 @@ async def _async_matching_config_entries_json_fragments( ) or (filter_is_not_helper and entry.domain not in integrations) ] - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - "type": "config_entries/subentries/list", - "entry_id": str, - } -) -@websocket_api.async_response -async def config_subentry_list( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], -) -> None: - """List subentries of a config entry.""" - entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) - if entry is None: - return - - result = [ - { - "subentry_id": subentry.subentry_id, - "title": subentry.title, - "unique_id": subentry.unique_id, - } - for subentry_id, subentry in entry.subentries.items() - ] - connection.send_result(msg["id"], result) - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - "type": "config_entries/subentries/delete", - "entry_id": str, - "subentry_id": str, - } -) -@websocket_api.async_response -async def config_subentry_delete( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], -) -> None: - """Delete a subentry of a config entry.""" - entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) - if entry is None: - return - - try: - hass.config_entries.async_remove_subentry(entry, msg["subentry_id"]) - except config_entries.UnknownSubEntry: - connection.send_error( - msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found" - ) - return - - connection.send_result(msg["id"]) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index d34828f5e46..ade4cd855ca 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -15,7 +15,6 @@ from collections.abc import ( ) from contextvars import ContextVar from copy import deepcopy -from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum import functools @@ -23,7 +22,7 @@ from functools import cache import logging from random import randint from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, cast +from typing import TYPE_CHECKING, Any, Generic, Self, cast from async_interrupt import interrupt from propcache import cached_property @@ -129,7 +128,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 5 +STORAGE_VERSION_MINOR = 4 SAVE_DELAY = 1 @@ -257,10 +256,6 @@ class UnknownEntry(ConfigError): """Unknown entry specified.""" -class UnknownSubEntry(ConfigError): - """Unknown subentry specified.""" - - class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" @@ -305,7 +300,6 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): minor_version: int options: Mapping[str, Any] - subentries: Iterable[ConfigSubentryData] version: int @@ -319,51 +313,6 @@ def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> N ) -class ConfigSubentryData(TypedDict): - """Container for configuration subentry data. - - Returned by integrations, a subentry_id will be assigned automatically. - """ - - data: Mapping[str, Any] - title: str - unique_id: str | None - - -class ConfigSubentryDataWithId(ConfigSubentryData): - """Container for configuration subentry data. - - This type is used when loading existing subentries from storage. - """ - - subentry_id: str - - -class SubentryFlowResult(FlowResult[FlowContext, tuple[str, str]], total=False): - """Typed result dict for subentry flow.""" - - unique_id: str | None - - -@dataclass(frozen=True, kw_only=True) -class ConfigSubentry: - """Container for a configuration subentry.""" - - data: MappingProxyType[str, Any] - subentry_id: str = field(default_factory=ulid_util.ulid_now) - title: str - unique_id: str | None - - def as_dict(self) -> ConfigSubentryDataWithId: - """Return dictionary version of this subentry.""" - return { - "data": dict(self.data), - "subentry_id": self.subentry_id, - "title": self.title, - "unique_id": self.unique_id, - } - - class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -373,7 +322,6 @@ class ConfigEntry(Generic[_DataT]): data: MappingProxyType[str, Any] runtime_data: _DataT options: MappingProxyType[str, Any] - subentries: MappingProxyType[str, ConfigSubentry] unique_id: str | None state: ConfigEntryState reason: str | None @@ -389,7 +337,6 @@ class ConfigEntry(Generic[_DataT]): supports_remove_device: bool | None _supports_options: bool | None _supports_reconfigure: bool | None - _supported_subentries: tuple[str, ...] | None update_listeners: list[UpdateListenerType] _async_cancel_retry_setup: Callable[[], Any] | None _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None @@ -419,7 +366,6 @@ class ConfigEntry(Generic[_DataT]): pref_disable_polling: bool | None = None, source: str, state: ConfigEntryState = ConfigEntryState.NOT_LOADED, - subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None, title: str, unique_id: str | None, version: int, @@ -445,24 +391,6 @@ class ConfigEntry(Generic[_DataT]): # Entry options _setter(self, "options", MappingProxyType(options or {})) - # Subentries - subentries_data = subentries_data or () - subentries = {} - for subentry_data in subentries_data: - subentry_kwargs = {} - if "subentry_id" in subentry_data: - # If subentry_data has key "subentry_id", we're loading from storage - subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item] - subentry = ConfigSubentry( - data=MappingProxyType(subentry_data["data"]), - title=subentry_data["title"], - unique_id=subentry_data.get("unique_id"), - **subentry_kwargs, - ) - subentries[subentry.subentry_id] = subentry - - _setter(self, "subentries", MappingProxyType(subentries)) - # Entry system options if pref_disable_new_entities is None: pref_disable_new_entities = False @@ -499,9 +427,6 @@ class ConfigEntry(Generic[_DataT]): # Supports reconfigure _setter(self, "_supports_reconfigure", None) - # Supports subentries - _setter(self, "_supported_subentries", None) - # Listeners to call on update _setter(self, "update_listeners", []) @@ -574,18 +499,6 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False - @property - def supported_subentries(self) -> tuple[str, ...]: - """Return supported subentries.""" - if self._supported_subentries is None and ( - handler := HANDLERS.get(self.domain) - ): - # work out sub entries supported by the handler - object.__setattr__( - self, "_supported_subentries", handler.async_supported_subentries(self) - ) - return self._supported_subentries or () - def clear_state_cache(self) -> None: """Clear cached properties that are included in as_json_fragment.""" self.__dict__.pop("as_json_fragment", None) @@ -605,14 +518,12 @@ class ConfigEntry(Generic[_DataT]): "supports_remove_device": self.supports_remove_device or False, "supports_unload": self.supports_unload or False, "supports_reconfigure": self.supports_reconfigure, - "supported_subentries": self.supported_subentries, "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "disabled_by": self.disabled_by, "reason": self.reason, "error_reason_translation_key": self.error_reason_translation_key, "error_reason_translation_placeholders": self.error_reason_translation_placeholders, - "num_subentries": len(self.subentries), } return json_fragment(json_bytes(json_repr)) @@ -1107,7 +1018,6 @@ class ConfigEntry(Generic[_DataT]): "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "source": self.source, - "subentries": [subentry.as_dict() for subentry in self.subentries.values()], "title": self.title, "unique_id": self.unique_id, "version": self.version, @@ -1593,7 +1503,6 @@ class ConfigEntriesFlowManager( minor_version=result["minor_version"], options=result["options"], source=flow.context["source"], - subentries_data=result["subentries"], title=result["title"], unique_id=flow.unique_id, version=result["version"], @@ -1884,11 +1793,6 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): for entry in data["entries"]: entry["discovery_keys"] = {} - if old_minor_version < 5: - # Version 1.4 adds config subentries - for entry in data["entries"]: - entry.setdefault("subentries", entry.get("subentries", {})) - if old_major_version > 1: raise NotImplementedError return data @@ -1905,7 +1809,6 @@ class ConfigEntries: self.hass = hass self.flow = ConfigEntriesFlowManager(hass, self, hass_config) self.options = OptionsFlowManager(hass) - self.subentries = ConfigSubentryFlowManager(hass) self._hass_config = hass_config self._entries = ConfigEntryItems(hass) self._store = ConfigEntryStore(hass) @@ -2108,7 +2011,6 @@ class ConfigEntries: pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], source=entry["source"], - subentries_data=entry["subentries"], title=entry["title"], unique_id=entry["unique_id"], version=entry["version"], @@ -2268,44 +2170,6 @@ class ConfigEntries: If the entry was changed, the update_listeners are fired and this function returns True - If the entry was not changed, the update_listeners are - not fired and this function returns False - """ - return self._async_update_entry( - entry, - data=data, - discovery_keys=discovery_keys, - minor_version=minor_version, - options=options, - pref_disable_new_entities=pref_disable_new_entities, - pref_disable_polling=pref_disable_polling, - title=title, - unique_id=unique_id, - version=version, - ) - - @callback - def _async_update_entry( - self, - entry: ConfigEntry, - *, - data: Mapping[str, Any] | UndefinedType = UNDEFINED, - discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] - | UndefinedType = UNDEFINED, - minor_version: int | UndefinedType = UNDEFINED, - options: Mapping[str, Any] | UndefinedType = UNDEFINED, - pref_disable_new_entities: bool | UndefinedType = UNDEFINED, - pref_disable_polling: bool | UndefinedType = UNDEFINED, - subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED, - title: str | UndefinedType = UNDEFINED, - unique_id: str | None | UndefinedType = UNDEFINED, - version: int | UndefinedType = UNDEFINED, - ) -> bool: - """Update a config entry. - - If the entry was changed, the update_listeners are - fired and this function returns True - If the entry was not changed, the update_listeners are not fired and this function returns False """ @@ -2368,11 +2232,6 @@ class ConfigEntries: changed = True _setter(entry, "options", MappingProxyType(options)) - if subentries is not UNDEFINED: - if entry.subentries != subentries: - changed = True - _setter(entry, "subentries", MappingProxyType(subentries)) - if not changed: return False @@ -2390,37 +2249,6 @@ class ConfigEntries: self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True - @callback - def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool: - """Add a subentry to a config entry.""" - self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id) - - return self._async_update_entry( - entry, - subentries=entry.subentries | {subentry.subentry_id: subentry}, - ) - - @callback - def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool: - """Remove a subentry from a config entry.""" - subentries = dict(entry.subentries) - try: - subentries.pop(subentry_id) - except KeyError as err: - raise UnknownSubEntry from err - - return self._async_update_entry(entry, subentries=subentries) - - def _raise_if_subentry_unique_id_exists( - self, entry: ConfigEntry, unique_id: str | None - ) -> None: - """Raise if a subentry with the same unique_id exists.""" - if unique_id is None: - return - for existing_subentry in entry.subentries.values(): - if existing_subentry.unique_id == unique_id: - raise data_entry_flow.AbortFlow("already_configured") - @callback def _async_dispatch( self, change_type: ConfigEntryChange, entry: ConfigEntry @@ -2757,20 +2585,6 @@ class ConfigFlow(ConfigEntryBaseFlow): """Return options flow support for this handler.""" return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow - @staticmethod - @callback - def async_get_subentry_flow( - config_entry: ConfigEntry, subentry_type: str - ) -> ConfigSubentryFlow: - """Get the subentry flow for this handler.""" - raise NotImplementedError - - @classmethod - @callback - def async_supported_subentries(cls, config_entry: ConfigEntry) -> tuple[str, ...]: - """Return subentries supported by this handler.""" - return () - @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -3079,7 +2893,6 @@ class ConfigFlow(ConfigEntryBaseFlow): description: str | None = None, description_placeholders: Mapping[str, str] | None = None, options: Mapping[str, Any] | None = None, - subentries: Iterable[ConfigSubentryData] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: @@ -3099,7 +2912,6 @@ class ConfigFlow(ConfigEntryBaseFlow): result["minor_version"] = self.MINOR_VERSION result["options"] = options or {} - result["subentries"] = subentries or () result["version"] = self.VERSION return result @@ -3214,126 +3026,17 @@ class ConfigFlow(ConfigEntryBaseFlow): ) -class _ConfigSubFlowManager: - """Mixin class for flow managers which manage flows tied to a config entry.""" +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] +): + """Flow to set options for a configuration entry.""" - hass: HomeAssistant + _flow_result = ConfigFlowResult def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" return self.hass.config_entries.async_get_known_entry(config_entry_id) - -class ConfigSubentryFlowManager( - data_entry_flow.FlowManager[FlowContext, SubentryFlowResult, tuple[str, str]], - _ConfigSubFlowManager, -): - """Manage all the config subentry flows that are in progress.""" - - _flow_result = SubentryFlowResult - - async def async_create_flow( - self, - handler_key: tuple[str, str], - *, - context: FlowContext | None = None, - data: dict[str, Any] | None = None, - ) -> ConfigSubentryFlow: - """Create a subentry flow for a config entry. - - The entry_id and flow.handler[0] is the same thing to map entry with flow. - """ - if not context or "source" not in context: - raise KeyError("Context not set or doesn't have a source set") - - entry_id, subentry_type = handler_key - entry = self._async_get_config_entry(entry_id) - handler = await _async_get_flow_handler(self.hass, entry.domain, {}) - if subentry_type not in handler.async_supported_subentries(entry): - raise data_entry_flow.UnknownHandler( - f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'" - ) - subentry_flow = handler.async_get_subentry_flow(entry, subentry_type) - subentry_flow.init_step = context["source"] - return subentry_flow - - async def async_finish_flow( - self, - flow: data_entry_flow.FlowHandler[ - FlowContext, SubentryFlowResult, tuple[str, str] - ], - result: SubentryFlowResult, - ) -> SubentryFlowResult: - """Finish a subentry flow and add a new subentry to the configuration entry. - - The flow.handler[0] and entry_id is the same thing to map flow with entry. - """ - flow = cast(ConfigSubentryFlow, flow) - - if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: - return result - - entry_id = flow.handler[0] - entry = self.hass.config_entries.async_get_entry(entry_id) - if entry is None: - raise UnknownEntry(entry_id) - - unique_id = result.get("unique_id") - if unique_id is not None and not isinstance(unique_id, str): - raise HomeAssistantError("unique_id must be a string") - - self.hass.config_entries.async_add_subentry( - entry, - ConfigSubentry( - data=MappingProxyType(result["data"]), - title=result["title"], - unique_id=unique_id, - ), - ) - - result["result"] = True - return result - - -class ConfigSubentryFlow( - data_entry_flow.FlowHandler[FlowContext, SubentryFlowResult, tuple[str, str]] -): - """Base class for config subentry flows.""" - - _flow_result = SubentryFlowResult - handler: tuple[str, str] - - @callback - def async_create_entry( - self, - *, - title: str | None = None, - data: Mapping[str, Any], - description: str | None = None, - description_placeholders: Mapping[str, str] | None = None, - unique_id: str | None = None, - ) -> SubentryFlowResult: - """Finish config flow and create a config entry.""" - result = super().async_create_entry( - title=title, - data=data, - description=description, - description_placeholders=description_placeholders, - ) - - result["unique_id"] = unique_id - - return result - - -class OptionsFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult], - _ConfigSubFlowManager, -): - """Manage all the config entry option flows that are in progress.""" - - _flow_result = ConfigFlowResult - async def async_create_flow( self, handler_key: str, @@ -3343,7 +3046,7 @@ class OptionsFlowManager( ) -> OptionsFlow: """Create an options flow for a config entry. - The entry_id and the flow.handler is the same thing to map entry with flow. + Entry_id and flow.handler is the same thing to map entry with flow. """ entry = self._async_get_config_entry(handler_key) handler = await _async_get_flow_handler(self.hass, entry.domain, {}) @@ -3359,7 +3062,7 @@ class OptionsFlowManager( This method is called when a flow step returns FlowResultType.ABORT or FlowResultType.CREATE_ENTRY. - The flow.handler and the entry_id is the same thing to map flow with entry. + Flow.handler and entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index e98061d50b7..adb2062a8ea 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any, Any, Any], + bound=data_entry_flow.FlowManager[Any, Any], default=data_entry_flow.FlowManager, ) @@ -71,7 +71,7 @@ class FlowManagerIndexView(_BaseFlowManagerView[_FlowManagerT]): async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Initialize a POST request. - Override `post` and call `_post_impl` in subclasses which need + Override `_post_impl` in subclasses which need to implement their own `RequestDataValidator` """ return await self._post_impl(request, data) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 078c649666d..2fb70b6e0be 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -285,15 +285,6 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: "user" if integration.integration_type == "helper" else None ), ), - vol.Optional("config_subentries"): cv.schema_with_slug_keys( - gen_data_entry_schema( - config=config, - integration=integration, - flow_title=REQUIRED, - require_step_title=False, - ), - slug_validator=vol.Any("_", cv.slug), - ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, diff --git a/tests/common.py b/tests/common.py index d2b0dff8faa..ac6f10b8c44 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1000,7 +1000,6 @@ class MockConfigEntry(config_entries.ConfigEntry): reason=None, source=config_entries.SOURCE_USER, state=None, - subentries_data=None, title="Mock Title", unique_id=None, version=1, @@ -1017,7 +1016,6 @@ class MockConfigEntry(config_entries.ConfigEntry): "options": options or {}, "pref_disable_new_entities": pref_disable_new_entities, "pref_disable_polling": pref_disable_polling, - "subentries_data": subentries_data or (), "title": title, "unique_id": unique_id, "version": version, diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 1e09a372352..54546507dfa 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -21,8 +21,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index 1c760eaec52..ec501b2fd7e 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Home', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index 73ba6a7123f..3dd4788dc61 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -35,8 +35,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index 0dbdef1d508..606d6082351 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index 113db6e3b96..cb1d3a7aee7 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -101,8 +101,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'XXXXXXX', 'version': 1, diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index 39668e3d19f..fb4f6530b1e 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -287,8 +287,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 4bd7bfaccdd..c6ad36916bf 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -101,8 +101,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'installation1', 'version': 1, diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index 07db19101ab..2f90b09d39f 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index b475c796d2b..ebd0061f416 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr index d7f9a045921..e9540b5cec6 100644 --- a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Beosound Balance-11111111', 'unique_id': '11111111', 'version': 1, diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index 54df2b48cdb..edc2879a66b 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -48,8 +48,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 3, diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index de76c00cd23..cd29c647df7 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 4159c8ec1a1..9218e7343ec 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 3eab18fb9f3..51bd946f140 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -44,8 +44,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index 877f48a4611..58ce74035f9 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -71,8 +71,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -137,8 +135,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 0a1ffbe87b3..ee000c5ada2 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -137,13 +137,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": True, "supports_reconfigure": False, "supports_remove_device": False, @@ -157,13 +155,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -177,13 +173,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -197,13 +191,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -217,13 +209,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -581,13 +571,11 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -598,7 +586,6 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, - "subentries": [], } @@ -667,13 +654,11 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -684,7 +669,6 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, - "subentries": [], } @@ -1104,273 +1088,6 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} -async def test_subentry_flow(hass: HomeAssistant, client) -> None: - """Test we can start a subentry flow.""" - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_init(self, user_input=None): - raise NotImplementedError - - async def async_step_user(self, user_input=None): - schema = OrderedDict() - schema[vol.Required("enabled")] = bool - return self.async_show_form( - step_id="user", - data_schema=schema, - description_placeholders={"enabled": "Set to true to be true"}, - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - data.pop("flow_id") - assert data == { - "type": "form", - "handler": ["test1", "test"], - "step_id": "user", - "data_schema": [{"name": "enabled", "required": True, "type": "boolean"}], - "description_placeholders": {"enabled": "Set to true to be true"}, - "errors": None, - "last_step": None, - "preview": None, - } - - -@pytest.mark.parametrize( - ("endpoint", "method"), - [ - ("/api/config/config_entries/subentries/flow", "post"), - ("/api/config/config_entries/subentries/flow/1", "get"), - ("/api/config/config_entries/subentries/flow/1", "post"), - ], -) -async def test_subentry_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str -) -> None: - """Test unauthorized on subentry flow.""" - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_init(self, user_input=None): - schema = OrderedDict() - schema[vol.Required("enabled")] = bool - return self.async_show_form( - step_id="user", - data_schema=schema, - description_placeholders={"enabled": "Set to true to be true"}, - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - hass_admin_user.groups = [] - - with patch.dict(HANDLERS, {"test": TestFlow}): - resp = await getattr(client, method)(endpoint, json={"handler": entry.entry_id}) - - assert resp.status == HTTPStatus.UNAUTHORIZED - - -async def test_two_step_subentry_flow(hass: HomeAssistant, client) -> None: - """Test we can finish a two step subentry flow.""" - mock_integration( - hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) - ) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_user(self, user_input=None): - return await self.async_step_finish() - - async def async_step_finish(self, user_input=None): - if user_input: - return self.async_create_entry( - title="Mock title", data=user_input, unique_id="test" - ) - - return self.async_show_form( - step_id="finish", data_schema=vol.Schema({"enabled": bool}) - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - flow_id = data["flow_id"] - expected_data = { - "data_schema": [{"name": "enabled", "type": "boolean"}], - "description_placeholders": None, - "errors": None, - "flow_id": flow_id, - "handler": ["test1", "test"], - "last_step": None, - "preview": None, - "step_id": "finish", - "type": "form", - } - assert data == expected_data - - resp = await client.get(f"/api/config/config_entries/subentries/flow/{flow_id}") - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == expected_data - - resp = await client.post( - f"/api/config/config_entries/subentries/flow/{flow_id}", - json={"enabled": True}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == { - "description_placeholders": None, - "description": None, - "flow_id": flow_id, - "handler": ["test1", "test"], - "title": "Mock title", - "type": "create_entry", - "unique_id": "test", - } - - -async def test_subentry_flow_with_invalid_data(hass: HomeAssistant, client) -> None: - """Test a subentry flow with invalid_data.""" - mock_integration( - hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) - ) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_user(self, user_input=None): - return self.async_show_form( - step_id="finish", - data_schema=vol.Schema( - { - vol.Required( - "choices", default=["invalid", "valid"] - ): cv.multi_select({"valid": "Valid"}) - } - ), - ) - - async def async_step_finish(self, user_input=None): - return self.async_create_entry( - title="Enable disable", data=user_input - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - flow_id = data.pop("flow_id") - assert data == { - "type": "form", - "handler": ["test1", "test"], - "step_id": "finish", - "data_schema": [ - { - "default": ["invalid", "valid"], - "name": "choices", - "options": {"valid": "Valid"}, - "required": True, - "type": "multi_select", - } - ], - "description_placeholders": None, - "errors": None, - "last_step": None, - "preview": None, - } - - with patch.dict(HANDLERS, {"test": TestFlow}): - resp = await client.post( - f"/api/config/config_entries/subentries/flow/{flow_id}", - json={"choices": ["valid", "invalid"]}, - ) - assert resp.status == HTTPStatus.BAD_REQUEST - data = await resp.json() - assert data == {"errors": {"choices": "invalid is not a valid option"}} - - @pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -1403,13 +1120,11 @@ async def test_get_single( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "user", "state": "loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1765,13 +1480,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1786,13 +1499,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1807,13 +1518,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1828,13 +1537,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1849,13 +1556,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1881,13 +1586,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1912,13 +1615,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1933,13 +1634,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1964,13 +1663,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1985,13 +1682,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2022,13 +1717,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2043,13 +1736,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2064,13 +1755,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2085,13 +1774,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2106,13 +1793,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2215,13 +1900,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2239,13 +1922,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2263,13 +1944,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2293,13 +1972,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2324,13 +2001,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2354,13 +2029,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2446,13 +2119,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2470,13 +2141,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2502,13 +2171,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2530,13 +2197,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2562,13 +2227,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2592,13 +2255,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2809,133 +2470,3 @@ async def test_does_not_support_reconfigure( response == '{"message":"Handler ConfigEntriesFlowManager doesn\'t support step reconfigure"}' ) - - -async def test_list_subentries( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test that we can list subentries.""" - assert await async_setup_component(hass, "config", {}) - ws_client = await hass_ws_client(hass) - - entry = MockConfigEntry( - domain="test", - state=core_ce.ConfigEntryState.LOADED, - subentries_data=[ - core_ce.ConfigSubentryData( - data={"test": "test"}, - subentry_id="mock_id", - title="Mock title", - unique_id="test", - ) - ], - ) - entry.add_to_hass(hass) - - assert entry.pref_disable_new_entities is False - assert entry.pref_disable_polling is False - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": entry.entry_id, - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] == [ - {"subentry_id": "mock_id", "title": "Mock title", "unique_id": "test"}, - ] - - # Try listing subentries for an unknown entry - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": "no_such_entry", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config entry not found", - } - - -async def test_delete_subentry( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test that we can delete a subentry.""" - assert await async_setup_component(hass, "config", {}) - ws_client = await hass_ws_client(hass) - - entry = MockConfigEntry( - domain="test", - state=core_ce.ConfigEntryState.LOADED, - subentries_data=[ - core_ce.ConfigSubentryData( - data={"test": "test"}, subentry_id="mock_id", title="Mock title" - ) - ], - ) - entry.add_to_hass(hass) - - assert entry.pref_disable_new_entities is False - assert entry.pref_disable_polling is False - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": entry.entry_id, - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] is None - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": entry.entry_id, - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] == [] - - # Try deleting the subentry again - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": entry.entry_id, - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config subentry not found", - } - - # Try deleting subentry from an unknown entry - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": "no_such_entry", - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config entry not found", - } diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 20558b4bbbd..1ca674a4fbe 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -21,8 +21,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index 0e507ca0b28..abedc128756 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '123456', 'version': 1, diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 1288b7f3ef6..53940bf5119 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -32,8 +32,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '1234567890', 'version': 1, diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index 0a46dd7f476..d407fe2dc5b 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'dsmr_reader', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index f9540e06038..38c8a9a5ab9 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, @@ -72,8 +70,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 88b0af6dc7b..72e504c97c8 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -28,14 +28,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'EnergyZero', 'unique_id': 'energyzero', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'EnergyZero', 'type': , 'version': 1, diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 3cacd3a8518..76835098f27 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -20,8 +20,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -456,8 +454,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -932,8 +928,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 8f1711e829e..4f7ea679b20 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -20,8 +20,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'ESPHome Device', 'unique_id': '11:22:33:44:55:aa', 'version': 1, diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 0beeae71df3..832e7d6572f 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -79,7 +79,6 @@ async def test_diagnostics_with_bluetooth( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "11:22:33:44:55:aa", "version": 1, diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index c0db54c2d4e..6ae4c2f6198 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -23,8 +23,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Green House', 'unique_id': 'unique', 'version': 2, diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr index 9b5b8c9353a..53f7093a21b 100644 --- a/tests/components/fritz/snapshots/test_diagnostics.ambr +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -61,8 +61,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index b112839835a..010de06e276 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index f1792cb7535..eb19797e5b1 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'fyta_user', 'unique_id': None, 'version': 1, diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 10f23759fae..6d521b1f2c8 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -66,14 +66,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'bluetooth', - 'subentries': list([ - ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, @@ -227,14 +223,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 890edc00482..71e0afdc495 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Home', 'unique_id': '123', 'version': 1, diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index 40ed22195d5..f52e47688e8 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index 1ecedbd1173..edbbdb1ba28 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -15,8 +15,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'import', - 'subentries': list([ - ]), 'title': '1234', 'unique_id': '1234', 'version': 1, diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index 4487d0b6ac6..faba2103000 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -42,7 +42,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "valve_controller": { diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index 71e70f3a153..0a301fc3941 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -30,14 +30,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -78,14 +74,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -126,14 +118,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'Energy Socket', 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Energy Socket', 'type': , 'version': 1, @@ -170,14 +158,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 2dab82451a6..a4dc986c2f9 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -183,8 +183,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Husqvarna Automower of Erika Mustermann', 'unique_id': '123', 'version': 1, diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index f15fc706d7e..494980ba4ce 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -15,8 +15,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'River Name (Station Name)', 'unique_id': '123', 'version': 1, diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index 41cfedb0e29..f2fa656cb0f 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -358,8 +358,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 3a99a7f681d..08f06684d9a 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -57,7 +57,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 640726e2355..201bbbc971e 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -25,8 +25,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index db82f41eb73..c689d04949a 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -73,8 +73,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'test-site-name', 'unique_id': None, 'version': 1, diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr index 92d0578dba8..3a281391860 100644 --- a/tests/components/madvr/snapshots/test_diagnostics.ambr +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'envy', 'unique_id': '00:11:22:33:44:55', 'version': 1, diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index 671f5afcc52..e6a432de07e 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'melcloud', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr index 1b4090ca5a4..f8897a4a47f 100644 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'AA:BB:CC:DD:EE:FF', 'version': 1, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr index d042dc02ac3..5b4b169c0fe 100644 --- a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -28,8 +28,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 4ea7e30bcf9..463556ec657 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -646,8 +646,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'netatmo', 'version': 1, diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 23f42fee077..827d6aeb6e5 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Fake Profile', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index b33726d2b72..f4ba363a421 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -60,8 +60,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index c1d1bd1bb2e..890ce2dfc4a 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -37,7 +37,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "bridges": [ diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index c3938efcbb6..c8a9ff75d62 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -24,8 +24,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 03b392b3e7b..61b68b5ad90 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -39,7 +39,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "protection_window": { diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr index 83684e153c9..d0a676fce1b 100644 --- a/tests/components/p1_monitor/snapshots/test_init.ambr +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, @@ -40,8 +38,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr index d0fdc81acb4..1e55805f867 100644 --- a/tests/components/pegel_online/snapshots/test_diagnostics.ambr +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -31,8 +31,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', 'version': 1, diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 53db95f0534..4f7a6176634 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -94,8 +94,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index 4b8048a8ebe..80d05961813 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -155,7 +155,6 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) "version": 1, "options": {}, "minor_version": 1, - "subentries": (), } await hass.async_block_till_done() diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 2d6f6687d04..3094fcef24b 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -33,8 +33,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 42ec74710f9..3d9673ffd90 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -102,8 +102,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'home', 'unique_id': 'proximity_home', 'version': 1, diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index 24d45fee5b9..d14f367b2bd 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -52,7 +52,6 @@ MOCK_FLOW_RESULT = { "title": "test_ps4", "data": MOCK_DATA, "options": {}, - "subentries": (), } MOCK_ENTRY_ID = "SomeID" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index 6271a63d652..ae4b28567be 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -38,7 +38,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "fields": [ diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr index abf8e380916..e131bf3d952 100644 --- a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -86,8 +84,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index 681805996f1..acd5fd165b4 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1144,8 +1144,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, @@ -2277,8 +2275,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index a57e289ec04..24c690bcb37 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -34,7 +34,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": [ { diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index 4b4dda7227d..b03d87c7a89 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -44,8 +44,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index e8e0b699a7e..0319d5dd8dd 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -51,7 +51,6 @@ async def test_entry_diagnostics( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -92,7 +91,6 @@ async def test_entry_diagnostics_encrypted( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -132,7 +130,6 @@ async def test_entry_diagnostics_encrypte_offline( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index c7db7a33959..237d3eab257 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Pentair: DD-EE-FF', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index 13c1e28aa36..d5479f00b06 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -32,7 +32,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "subscription_data": { "12345": { diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr index 6aef72ebbd5..e0f1bc2623c 100644 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'solarlog', 'unique_id': None, 'version': 1, diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 0b45546902b..6abc544c92a 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -136,7 +136,6 @@ async def test_user_form_pin_not_required( "data": deepcopy(TEST_CONFIG), "options": {}, "minor_version": 1, - "subentries": (), } expected["data"][CONF_PIN] = None @@ -342,7 +341,6 @@ async def test_pin_form_success(hass: HomeAssistant, pin_form) -> None: "data": TEST_CONFIG, "options": {}, "minor_version": 1, - "subentries": (), } result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID assert result == expected diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index f59958420c4..53572085f9b 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -69,6 +69,5 @@ async def test_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, } diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index afa508cc004..75d942fc601 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -56,8 +56,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, @@ -113,8 +111,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index b5b33d7c246..3180c7c0b1d 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -37,8 +37,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index 3613f7e5997..11427a84801 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index 90d83d69814..a5a68a12a22 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -24,8 +24,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '12345', 'unique_id': '12345', 'version': 1, @@ -56,8 +54,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Old Tuya configuration entry', 'unique_id': '12345', 'version': 1, @@ -111,14 +107,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'mocked_username', 'unique_id': None, 'version': 1, }), - 'subentries': tuple( - ), 'title': 'mocked_username', 'type': , 'version': 1, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index e52f76634fd..28ec98cf572 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -37,8 +37,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Twinkly', 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'version': 1, diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index aa7337be0ba..4ba90a00113 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -42,8 +42,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '1', 'version': 1, diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 93b1da60998..38312667375 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -27,14 +27,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Uptime', 'unique_id': None, 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Uptime', 'type': , 'version': 1, diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index ef235bba99d..6cdf121d7e3 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -25,8 +25,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Energy Bill', 'unique_id': None, 'version': 2, diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index 780a00acd64..96567b80c54 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': 'ABC123', 'version': 1, diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index 0b1dcef5a29..ae9b05389c7 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4731,8 +4731,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'ViCare', 'version': 1, diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr index dd268f4ed1a..c258b14dc2d 100644 --- a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -35,8 +35,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 3cc5e1d6f66..0c137acc36b 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -27,8 +27,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index c64fa212a98..8299b0eafba 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -253,8 +253,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 7f54e940966..3d7cb00e021 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -61,6 +61,5 @@ async def test_diagnostics( "created_at": entry.created_at.isoformat(), "modified_at": entry.modified_at.isoformat(), "discovery_keys": {}, - "subentries": [], }, } diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index ee8abe04bf1..c60ce17b952 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -38,8 +38,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 0d99b0596e3..937502d4d6c 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -30,14 +30,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -74,14 +70,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -118,14 +110,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -162,14 +150,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -206,14 +190,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr index e7331b911a8..f41b86b7f6d 100644 --- a/tests/components/workday/snapshots/test_diagnostics.ambr +++ b/tests/components/workday/snapshots/test_diagnostics.ambr @@ -40,8 +40,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index d288c531407..bdead0f2028 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -36,14 +36,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', - 'subentries': list([ - ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Piper', 'type': , 'version': 1, @@ -86,14 +82,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', - 'subentries': list([ - ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Piper', 'type': , 'version': 1, @@ -135,14 +127,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'Test Satellite', 'unique_id': 'test_zeroconf_name._wyoming._tcp.local._Test Satellite', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Test Satellite', 'type': , 'version': 1, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index 08807f65d5d..f46a06e84b8 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -113,8 +113,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 4, diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 08b532677f4..51e56f4874e 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 1ad152e8e42..aba85a35349 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4,7 +4,6 @@ from __future__ import annotations import asyncio from collections.abc import Generator -from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import timedelta import logging import re @@ -906,7 +905,7 @@ async def test_entries_excludes_ignore_and_disabled( async def test_saving_and_loading( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any] + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test that we're saving and loading correctly.""" mock_integration( @@ -923,17 +922,7 @@ async def test_saving_and_loading( async def async_step_user(self, user_input=None): """Test user step.""" await self.async_set_unique_id("unique") - subentries = [ - config_entries.ConfigSubentryData( - data={"foo": "bar"}, title="subentry 1" - ), - config_entries.ConfigSubentryData( - data={"sun": "moon"}, title="subentry 2", unique_id="very_unique" - ), - ] - return self.async_create_entry( - title="Test Title", data={"token": "abcd"}, subentries=subentries - ) + return self.async_create_entry(title="Test Title", data={"token": "abcd"}) with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( @@ -982,98 +971,6 @@ async def test_saving_and_loading( # To execute the save await hass.async_block_till_done() - stored_data = hass_storage["core.config_entries"] - assert stored_data == { - "data": { - "entries": [ - { - "created_at": ANY, - "data": { - "token": "abcd", - }, - "disabled_by": None, - "discovery_keys": {}, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": True, - "pref_disable_polling": True, - "source": "user", - "subentries": [ - { - "data": {"foo": "bar"}, - "subentry_id": ANY, - "title": "subentry 1", - "unique_id": None, - }, - { - "data": {"sun": "moon"}, - "subentry_id": ANY, - "title": "subentry 2", - "unique_id": "very_unique", - }, - ], - "title": "Test Title", - "unique_id": "unique", - "version": 5, - }, - { - "created_at": ANY, - "data": { - "username": "bla", - }, - "disabled_by": None, - "discovery_keys": { - "test": [ - {"domain": "test", "key": "blah", "version": 1}, - ], - }, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "user", - "subentries": [], - "title": "Test 2 Title", - "unique_id": None, - "version": 3, - }, - { - "created_at": ANY, - "data": { - "username": "bla", - }, - "disabled_by": None, - "discovery_keys": { - "test": [ - {"domain": "test", "key": ["a", "b"], "version": 1}, - ], - }, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "user", - "subentries": [], - "title": "Test 2 Title", - "unique_id": None, - "version": 3, - }, - ], - }, - "key": "core.config_entries", - "minor_version": 5, - "version": 1, - } - # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() @@ -1086,25 +983,6 @@ async def test_saving_and_loading( ): assert orig.as_dict() == loaded.as_dict() - hass.config_entries.async_update_entry( - entry_1, - pref_disable_polling=False, - pref_disable_new_entities=False, - ) - - # To trigger the call_later - freezer.tick(1.0) - async_fire_time_changed(hass) - # To execute the save - await hass.async_block_till_done() - - # Assert no data is lost when storing again - expected_stored_data = stored_data - expected_stored_data["data"]["entries"][0]["modified_at"] = ANY - expected_stored_data["data"]["entries"][0]["pref_disable_new_entities"] = False - expected_stored_data["data"]["entries"][0]["pref_disable_polling"] = False - assert hass_storage["core.config_entries"] == expected_stored_data | {} - @freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: @@ -1538,42 +1416,6 @@ async def test_update_entry_options_and_trigger_listener( assert len(update_listener_calls) == 1 -async def test_update_subentry_and_trigger_listener( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can update subentry and trigger listener.""" - entry = MockConfigEntry(domain="test", options={"first": True}) - entry.add_to_manager(manager) - update_listener_calls = [] - - subentry = config_entries.ConfigSubentry( - data={"test": "test"}, unique_id="test", title="Mock title" - ) - - async def update_listener( - hass: HomeAssistant, entry: config_entries.ConfigEntry - ) -> None: - """Test function.""" - assert entry.subentries == expected_subentries - update_listener_calls.append(None) - - entry.add_update_listener(update_listener) - - expected_subentries = {subentry.subentry_id: subentry} - assert manager.async_add_subentry(entry, subentry) is True - - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.subentries == expected_subentries - assert len(update_listener_calls) == 1 - - expected_subentries = {} - assert manager.async_remove_subentry(entry, subentry.subentry_id) is True - - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.subentries == expected_subentries - assert len(update_listener_calls) == 2 - - async def test_setup_raise_not_ready( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -1900,456 +1742,20 @@ async def test_entry_options_unknown_config_entry( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) + class TestFlow: + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + with pytest.raises(config_entries.UnknownEntry): await manager.options.async_create_flow( "blah", context={"source": "test"}, data=None ) -async def test_create_entry_subentries( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test a config entry being created with subentries.""" - - subentrydata = config_entries.ConfigSubentryData( - data={"test": "test"}, - title="Mock title", - unique_id="test", - ) - - async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Mock setup.""" - hass.async_create_task( - hass.config_entries.flow.async_init( - "comp", - context={"source": config_entries.SOURCE_IMPORT}, - data={"data": "data", "subentry": subentrydata}, - ) - ) - return True - - async_setup_entry = AsyncMock(return_value=True) - mock_integration( - hass, - MockModule( - "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry - ), - ) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_import(self, user_input): - """Test import step creating entry, with subentry.""" - return self.async_create_entry( - title="title", - data={"example": user_input["data"]}, - subentries=[user_input["subentry"]], - ) - - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): - assert await async_setup_component(hass, "comp", {}) - - await hass.async_block_till_done() - - assert len(async_setup_entry.mock_calls) == 1 - - entries = hass.config_entries.async_entries("comp") - assert len(entries) == 1 - assert entries[0].supported_subentries == () - assert entries[0].data == {"example": "data"} - assert len(entries[0].subentries) == 1 - subentry_id = list(entries[0].subentries)[0] - subentry = config_entries.ConfigSubentry( - data=subentrydata["data"], - subentry_id=subentry_id, - title=subentrydata["title"], - unique_id="test", - ) - assert entries[0].subentries == {subentry_id: subentry} - - -async def test_entry_subentry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can add a subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": "test", - }, - ) - - assert entry.data == {"first": True} - assert entry.options == {} - subentry_id = list(entry.subentries)[0] - assert entry.subentries == { - subentry_id: config_entries.ConfigSubentry( - data={"second": True}, - subentry_id=subentry_id, - title="Mock title", - unique_id="test", - ) - } - assert entry.supported_subentries == ("test",) - - -async def test_entry_subentry_non_string( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test adding an invalid subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - with pytest.raises(HomeAssistantError): - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": 123, - }, - ) - - -@pytest.mark.parametrize("context", [None, {}, {"bla": "bleh"}]) -async def test_entry_subentry_no_context( - hass: HomeAssistant, manager: config_entries.ConfigEntries, context: dict | None -) -> None: - """Test starting a subentry flow without "source" in context.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow), pytest.raises(KeyError): - await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context=context, data=None - ) - - -@pytest.mark.parametrize( - ("unique_id", "expected_result"), - [(None, does_not_raise()), ("test", pytest.raises(HomeAssistantError))], -) -async def test_entry_subentry_duplicate( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - unique_id: str | None, - expected_result: AbstractContextManager, -) -> None: - """Test adding a duplicated subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry( - domain="test", - data={"first": True}, - subentries_data=[ - config_entries.ConfigSubentryData( - data={}, - subentry_id="blabla", - title="Mock title", - unique_id=unique_id, - ) - ], - ) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - with expected_result: - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": unique_id, - }, - ) - - -async def test_entry_subentry_abort( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can abort subentry flow.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - assert await manager.subentries.async_finish_flow( - flow, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "test"} - ) - - -async def test_entry_subentry_unknown_config_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for an unknown config entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - with pytest.raises(config_entries.UnknownEntry): - await manager.subentries.async_create_flow( - ("blah", "blah"), context={"source": "test"}, data=None - ) - - -async def test_entry_subentry_deleted_config_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to finish a subentry flow for a deleted config entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - await hass.config_entries.async_remove(entry.entry_id) - - with pytest.raises(config_entries.UnknownEntry): - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": "test", - }, - ) - - -async def test_entry_subentry_unsupported( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for a config entry without support.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with ( - mock_config_flow("test", TestFlow), - pytest.raises(data_entry_flow.UnknownHandler), - ): - await manager.subentries.async_create_flow( - ( - entry.entry_id, - "unknown", - ), - context={"source": "test"}, - data=None, - ) - - -async def test_entry_subentry_unsupported_subentry_type( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for a config entry without support.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - with ( - mock_config_flow("test", TestFlow), - pytest.raises(data_entry_flow.UnknownHandler), - ): - await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - async def test_entry_setup_succeed( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4505,20 +3911,21 @@ async def test_updating_entry_with_and_without_changes( assert manager.async_update_entry(entry) is False - for change, expected_value in ( - ({"data": {"second": True, "third": 456}}, {"second": True, "third": 456}), - ({"data": {"second": True}}, {"second": True}), - ({"minor_version": 2}, 2), - ({"options": {"hello": True}}, {"hello": True}), - ({"pref_disable_new_entities": True}, True), - ({"pref_disable_polling": True}, True), - ({"title": "sometitle"}, "sometitle"), - ({"unique_id": "abcd1234"}, "abcd1234"), - ({"version": 2}, 2), + for change in ( + {"data": {"second": True, "third": 456}}, + {"data": {"second": True}}, + {"minor_version": 2}, + {"options": {"hello": True}}, + {"pref_disable_new_entities": True}, + {"pref_disable_polling": True}, + {"title": "sometitle"}, + {"unique_id": "abcd1234"}, + {"version": 2}, ): assert manager.async_update_entry(entry, **change) is True key = next(iter(change)) - assert getattr(entry, key) == expected_value + value = next(iter(change.values())) + assert getattr(entry, key) == value assert manager.async_update_entry(entry, **change) is False assert manager.async_entry_for_domain_unique_id("test", "abc123") is None @@ -6052,7 +5459,6 @@ async def test_unhashable_unique_id_fails( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -6088,7 +5494,6 @@ async def test_unhashable_unique_id_fails_on_update( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id="123", version=1, @@ -6119,7 +5524,6 @@ async def test_string_unique_id_no_warning( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id="123", version=1, @@ -6162,7 +5566,6 @@ async def test_hashable_unique_id( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -6197,7 +5600,6 @@ async def test_no_unique_id_no_warning( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=None, version=1, @@ -7122,7 +6524,6 @@ async def test_migration_from_1_2( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "import", - "subentries": {}, "title": "Sun", "unique_id": None, "version": 1, From ca2c7280eb2649f1315e8cf104636f1eaf20f3a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 12:59:56 +0000 Subject: [PATCH 0817/1198] Remove uneeded logger param from Idasen Desk Coordinator (#133485) --- homeassistant/components/idasen_desk/__init__.py | 2 +- homeassistant/components/idasen_desk/coordinator.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index 1aacea91723..671319e46eb 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -27,7 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) - """Set up IKEA Idasen from a config entry.""" address: str = entry.data[CONF_ADDRESS].upper() - coordinator = IdasenDeskCoordinator(hass, _LOGGER, entry.title, address) + coordinator = IdasenDeskCoordinator(hass, entry.title, address) entry.runtime_data = coordinator try: diff --git a/homeassistant/components/idasen_desk/coordinator.py b/homeassistant/components/idasen_desk/coordinator.py index a84027a26c0..d9e90cfe5ea 100644 --- a/homeassistant/components/idasen_desk/coordinator.py +++ b/homeassistant/components/idasen_desk/coordinator.py @@ -19,13 +19,12 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): def __init__( self, hass: HomeAssistant, - logger: logging.Logger, name: str, address: str, ) -> None: """Init IdasenDeskCoordinator.""" - super().__init__(hass, logger, name=name) + super().__init__(hass, _LOGGER, name=name) self.address = address self._expected_connected = False From 4399d09820c20ea254100672ee5d4a3a40d276c8 Mon Sep 17 00:00:00 2001 From: Bas Brussee <68892092+basbruss@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:02:08 +0100 Subject: [PATCH 0818/1198] Allow data description in sections (#128965) * Allow data description in sections * update format with ruff * Add data_description to kitchen_sink input section --------- Co-authored-by: Erik --- homeassistant/components/kitchen_sink/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index 63e27e04637..b8dcfdd8e69 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -21,6 +21,9 @@ "bool": "Optional boolean", "int": "Numeric input" }, + "data_description": { + "int": "A longer description for the numeric input" + }, "description": "This section allows input of some extra data", "name": "Collapsible section" } From c06bc537248aed95037b85ba15be129ac567af9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Wed, 18 Dec 2024 14:26:37 +0100 Subject: [PATCH 0819/1198] Deprecate Home Connect program switches (#131641) --- .../components/home_connect/strings.json | 4 + .../components/home_connect/switch.py | 57 ++++++++++++++ tests/components/home_connect/test_switch.py | 75 +++++++++++++++++++ 3 files changed, 136 insertions(+) diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index e70f2f28c65..f5c3cf69807 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -90,6 +90,10 @@ "deprecated_binary_common_door_sensor": { "title": "Deprecated binary door sensor detected in some automations or scripts", "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + }, + "deprecated_program_switch": { + "title": "Deprecated program switch detected in some automations or scripts", + "description": "Program switch are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use active program select entity to run the program without any additional option and get the current running program on the above automations or scripts to fix this issue." } }, "services": { diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index acb78e87db1..305077bfb86 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -6,10 +6,18 @@ from typing import Any from homeconnect.api import HomeConnectError +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( @@ -201,6 +209,55 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): self._attr_has_entity_name = False self.program_name = program_name + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_{self.entity_id}", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_program_switch", + translation_placeholders={ + "entity_id": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, DOMAIN, f"deprecated_program_switch_{self.entity_id}" + ) + async def async_turn_on(self, **kwargs: Any) -> None: """Start the program.""" _LOGGER.debug("Tried to turn on program %s", self.program_name) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 3a89005dc59..a02cb553ece 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -6,6 +6,8 @@ from unittest.mock import MagicMock, Mock from homeconnect.api import HomeConnectAppliance, HomeConnectError import pytest +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.home_connect.const import ( ATTR_ALLOWED_VALUES, ATTR_CONSTRAINTS, @@ -16,8 +18,10 @@ from homeassistant.components.home_connect.const import ( BSH_POWER_ON, BSH_POWER_STANDBY, BSH_POWER_STATE, + DOMAIN, REFRIGERATION_SUPERMODEFREEZER, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -30,6 +34,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from .conftest import get_all_appliances @@ -506,3 +512,72 @@ async def test_power_switch_service_validation_errors( await hass.services.async_call( SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("bypass_throttle") +async def test_create_issue( + hass: HomeAssistant, + appliance: Mock, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = "switch.washer_program_mix" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + issue_id = f"deprecated_program_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 From a46a0ad2b4692b3a8dd6f0df07df721bc518f20d Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Wed, 18 Dec 2024 14:35:02 +0100 Subject: [PATCH 0820/1198] Add device_id parameter to LCN actions (service calls) (#129590) --- homeassistant/components/lcn/__init__.py | 2 + homeassistant/components/lcn/const.py | 1 + homeassistant/components/lcn/helpers.py | 11 +- homeassistant/components/lcn/services.py | 53 +++++- homeassistant/components/lcn/services.yaml | 118 +++++++++++-- homeassistant/components/lcn/strings.json | 89 ++++++++++ tests/components/lcn/test_services.py | 193 +++++++++++++++------ 7 files changed, 398 insertions(+), 69 deletions(-) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index eb26ef48e4e..6dc6fb1ecc4 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -31,6 +31,7 @@ from .const import ( CONF_SK_NUM_TRIES, CONF_TRANSITION, CONNECTION, + DEVICE_CONNECTIONS, DOMAIN, PLATFORMS, ) @@ -102,6 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { CONNECTION: lcn_connection, + DEVICE_CONNECTIONS: {}, ADD_ENTITIES_CALLBACKS: {}, } # Update config_entry with LCN device serials diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index 97aeeecd8b5..cee9da9be43 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -20,6 +20,7 @@ DEFAULT_NAME = "pchk" ADD_ENTITIES_CALLBACKS = "add_entities_callbacks" CONNECTION = "connection" +DEVICE_CONNECTIONS = "device_connections" CONF_HARDWARE_SERIAL = "hardware_serial" CONF_SOFTWARE_SERIAL = "software_serial" CONF_HARDWARE_TYPE = "hardware_type" diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index 6a9c63ea212..348305c775e 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -38,6 +38,7 @@ from .const import ( CONF_SCENES, CONF_SOFTWARE_SERIAL, CONNECTION, + DEVICE_CONNECTIONS, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -237,7 +238,7 @@ def register_lcn_address_devices( identifiers = {(DOMAIN, generate_unique_id(config_entry.entry_id, address))} if device_config[CONF_ADDRESS][2]: # is group - device_model = f"LCN group (g{address[0]:03d}{address[1]:03d})" + device_model = "LCN group" sw_version = None else: # is module hardware_type = device_config[CONF_HARDWARE_TYPE] @@ -245,10 +246,10 @@ def register_lcn_address_devices( hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[hardware_type] else: hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[-1] - device_model = f"{hardware_name} (m{address[0]:03d}{address[1]:03d})" + device_model = f"{hardware_name}" sw_version = f"{device_config[CONF_SOFTWARE_SERIAL]:06X}" - device_registry.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers=identifiers, via_device=host_identifiers, @@ -258,6 +259,10 @@ def register_lcn_address_devices( model=device_model, ) + hass.data[DOMAIN][config_entry.entry_id][DEVICE_CONNECTIONS][ + device_entry.id + ] = get_device_connection(hass, address, config_entry) + async def async_update_device_config( device_connection: DeviceConnectionType, device_config: ConfigType diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 92f5863c47e..a6c42de0487 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -8,12 +8,21 @@ import voluptuous as vol from homeassistant.const import ( CONF_ADDRESS, CONF_BRIGHTNESS, + CONF_DEVICE_ID, CONF_HOST, CONF_STATE, CONF_UNIT_OF_MEASUREMENT, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import ( CONF_KEYS, @@ -30,6 +39,7 @@ from .const import ( CONF_TRANSITION, CONF_VALUE, CONF_VARIABLE, + DEVICE_CONNECTIONS, DOMAIN, LED_PORTS, LED_STATUS, @@ -53,7 +63,13 @@ from .helpers import ( class LcnServiceCall: """Parent class for all LCN service calls.""" - schema = vol.Schema({vol.Required(CONF_ADDRESS): is_address}) + schema = vol.Schema( + { + vol.Optional(CONF_DEVICE_ID): cv.string, + vol.Optional(CONF_ADDRESS): is_address, + } + ) + supports_response = SupportsResponse.NONE def __init__(self, hass: HomeAssistant) -> None: """Initialize service call.""" @@ -61,8 +77,37 @@ class LcnServiceCall: def get_device_connection(self, service: ServiceCall) -> DeviceConnectionType: """Get address connection object.""" - address, host_name = service.data[CONF_ADDRESS] + if CONF_DEVICE_ID not in service.data and CONF_ADDRESS not in service.data: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_device_identifier", + ) + if CONF_DEVICE_ID in service.data: + device_id = service.data[CONF_DEVICE_ID] + device_registry = dr.async_get(self.hass) + if not (device := device_registry.async_get(device_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device_id", + translation_placeholders={"device_id": device_id}, + ) + + return self.hass.data[DOMAIN][device.primary_config_entry][ + DEVICE_CONNECTIONS + ][device_id] + + async_create_issue( + self.hass, + DOMAIN, + "deprecated_address_parameter", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_address_parameter", + ) + + address, host_name = service.data[CONF_ADDRESS] for config_entry in self.hass.config_entries.async_entries(DOMAIN): if config_entry.data[CONF_HOST] == host_name: device_connection = get_device_connection( @@ -73,7 +118,7 @@ class LcnServiceCall: return device_connection raise ValueError("Invalid host name.") - async def async_call_service(self, service: ServiceCall) -> None: + async def async_call_service(self, service: ServiceCall) -> ServiceResponse: """Execute service call.""" raise NotImplementedError diff --git a/homeassistant/components/lcn/services.yaml b/homeassistant/components/lcn/services.yaml index d62a1e72d45..f58e79b9f40 100644 --- a/homeassistant/components/lcn/services.yaml +++ b/homeassistant/components/lcn/services.yaml @@ -2,8 +2,76 @@ output_abs: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: &device_selector + device: + filter: + - integration: lcn + model: LCN group + - integration: lcn + model: UnknownModuleType + - integration: lcn + model: LCN-SW1.0 + - integration: lcn + model: LCN-SW1.1 + - integration: lcn + model: LCN-UP1.0 + - integration: lcn + model: LCN-UP2 + - integration: lcn + model: LCN-SW2 + - integration: lcn + model: LCN-UP-Profi1-Plus + - integration: lcn + model: LCN-DI12 + - integration: lcn + model: LCN-HU + - integration: lcn + model: LCN-SH + - integration: lcn + model: LCN-UP2 + - integration: lcn + model: LCN-UPP + - integration: lcn + model: LCN-SK + - integration: lcn + model: LCN-LD + - integration: lcn + model: LCN-SH-Plus + - integration: lcn + model: LCN-UPS + - integration: lcn + model: LCN_UPS24V + - integration: lcn + model: LCN-GTM + - integration: lcn + model: LCN-SHS + - integration: lcn + model: LCN-ESD + - integration: lcn + model: LCN-EB2 + - integration: lcn + model: LCN-MRS + - integration: lcn + model: LCN-EB11 + - integration: lcn + model: LCN-UMR + - integration: lcn + model: LCN-UPU + - integration: lcn + model: LCN-UMR24V + - integration: lcn + model: LCN-SHD + - integration: lcn + model: LCN-SHU + - integration: lcn + model: LCN-SR6 + - integration: lcn + model: LCN-UMF + - integration: lcn + model: LCN-WBH address: - required: true example: "myhome.s0.m7" selector: text: @@ -34,8 +102,10 @@ output_abs: output_rel: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -58,8 +128,10 @@ output_rel: output_toggle: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -83,8 +155,10 @@ output_toggle: relays: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -96,8 +170,10 @@ relays: led: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -130,8 +206,10 @@ led: var_abs: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -197,8 +275,10 @@ var_abs: var_reset: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -230,8 +310,10 @@ var_reset: var_rel: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -321,8 +403,10 @@ var_rel: lock_regulator: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -355,8 +439,10 @@ lock_regulator: send_keys: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -402,8 +488,10 @@ send_keys: lock_keys: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -445,8 +533,10 @@ lock_keys: dyn_text: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -464,8 +554,10 @@ dyn_text: pck: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 088a3654500..988c2a637fb 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -70,6 +70,10 @@ "deprecated_keylock_sensor": { "title": "Deprecated LCN key lock binary sensor", "description": "Your LCN key lock binary sensor entity `{entity}` is beeing used in automations or scripts. A key lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." + }, + "deprecated_address_parameter": { + "title": "Deprecated 'address' parameter", + "description": "The 'address' parameter in the LCN service calls is deprecated. The 'devide_id' parameter should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." } }, "services": { @@ -77,6 +81,10 @@ "name": "Output absolute brightness", "description": "Sets absolute brightness of output port in percent.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "The device_id of the LCN module or group." + }, "address": { "name": "Address", "description": "Module address." @@ -99,6 +107,10 @@ "name": "Output relative brightness", "description": "Sets relative brightness of output port in percent.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -117,6 +129,10 @@ "name": "Toggle output", "description": "Toggles output port.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -135,6 +151,10 @@ "name": "Relays", "description": "Sets the relays status.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -149,6 +169,10 @@ "name": "LED", "description": "Sets the led state.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -167,6 +191,10 @@ "name": "Set absolute variable", "description": "Sets absolute value of a variable or setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -189,6 +217,10 @@ "name": "Reset variable", "description": "Resets value of variable or setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -203,6 +235,10 @@ "name": "Shift variable", "description": "Shift value of a variable, setpoint or threshold.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -229,6 +265,10 @@ "name": "Lock regulator", "description": "Locks a regulator setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -247,6 +287,10 @@ "name": "Send keys", "description": "Sends keys (which executes bound commands).", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -273,6 +317,10 @@ "name": "Lock keys", "description": "Locks keys.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -299,6 +347,10 @@ "name": "Dynamic text", "description": "Sends dynamic text to LCN-GTxD displays.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -317,6 +369,10 @@ "name": "PCK", "description": "Sends arbitrary PCK command.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -326,6 +382,39 @@ "description": "PCK command (without address header)." } } + }, + "address_to_device_id": { + "name": "Address to device id", + "description": "Convert LCN address to device id.", + "fields": { + "id": { + "name": "Module or group id", + "description": "Target module or group id." + }, + "segment_id": { + "name": "Segment id", + "description": "Target segment id." + }, + "type": { + "name": "Type", + "description": "Target type." + }, + "host": { + "name": "Host name", + "description": "Host name as given in the integration panel." + } + } + } + }, + "exceptions": { + "no_device_identifier": { + "message": "No device identifier provided. Please provide the device id." + }, + "invalid_address": { + "message": "LCN device for given address has not been configured." + }, + "invalid_device_id": { + "message": "LCN device for given device id has not been configured." } } } diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py index a4ea559cd72..cd97e3484e3 100644 --- a/tests/components/lcn/test_services.py +++ b/tests/components/lcn/test_services.py @@ -26,22 +26,37 @@ from homeassistant.components.lcn.services import LcnService from homeassistant.const import ( CONF_ADDRESS, CONF_BRIGHTNESS, + CONF_DEVICE_ID, CONF_STATE, CONF_UNIT_OF_MEASUREMENT, ) from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir from homeassistant.setup import async_setup_component from .conftest import ( MockConfigEntry, MockModuleConnection, - MockPchkConnectionManager, + get_device, init_integration, ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: +def device_config( + hass: HomeAssistant, entry: MockConfigEntry, config_type: str +) -> dict[str, str]: + """Return test device config depending on type.""" + if config_type == CONF_ADDRESS: + return {CONF_ADDRESS: "pchk.s0.m7"} + return {CONF_DEVICE_ID: get_device(hass, entry, (0, 7, False)).id} + + +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_output_abs( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test output_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -51,7 +66,7 @@ async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) - DOMAIN, LcnService.OUTPUT_ABS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_BRIGHTNESS: 100, CONF_TRANSITION: 5, @@ -62,8 +77,12 @@ async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) - dim_output.assert_awaited_with(0, 100, 9) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_output_rel( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test output_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -73,7 +92,7 @@ async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) - DOMAIN, LcnService.OUTPUT_REL, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_BRIGHTNESS: 25, }, @@ -83,9 +102,11 @@ async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) - rel_output.assert_awaited_with(0, 25) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_output_toggle( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test output_toggle service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -96,7 +117,7 @@ async def test_service_output_toggle( DOMAIN, LcnService.OUTPUT_TOGGLE, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_TRANSITION: 5, }, @@ -106,8 +127,12 @@ async def test_service_output_toggle( toggle_output.assert_awaited_with(0, 9) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_relays( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test relays service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -116,7 +141,7 @@ async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> No await hass.services.async_call( DOMAIN, LcnService.RELAYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_STATE: "0011TT--"}, + {**device_config(hass, entry, config_type), CONF_STATE: "0011TT--"}, blocking=True, ) @@ -126,8 +151,12 @@ async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> No control_relays.assert_awaited_with(relay_states) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_led( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test led service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -136,7 +165,11 @@ async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: await hass.services.async_call( DOMAIN, LcnService.LED, - {CONF_ADDRESS: "pchk.s0.m7", CONF_LED: "led6", CONF_STATE: "blink"}, + { + **device_config(hass, entry, config_type), + CONF_LED: "led6", + CONF_STATE: "blink", + }, blocking=True, ) @@ -146,8 +179,12 @@ async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: control_led.assert_awaited_with(led, led_state) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_abs( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -157,7 +194,7 @@ async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> N DOMAIN, LcnService.VAR_ABS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_VARIABLE: "var1", CONF_VALUE: 75, CONF_UNIT_OF_MEASUREMENT: "%", @@ -170,8 +207,12 @@ async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> N ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_rel( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -181,7 +222,7 @@ async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> N DOMAIN, LcnService.VAR_REL, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_VARIABLE: "var1", CONF_VALUE: 10, CONF_UNIT_OF_MEASUREMENT: "%", @@ -198,8 +239,12 @@ async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> N ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_reset( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_reset service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -208,16 +253,18 @@ async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.VAR_RESET, - {CONF_ADDRESS: "pchk.s0.m7", CONF_VARIABLE: "var1"}, + {**device_config(hass, entry, config_type), CONF_VARIABLE: "var1"}, blocking=True, ) var_reset.assert_awaited_with(pypck.lcn_defs.Var["VAR1"]) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_lock_regulator( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test lock_regulator service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -228,7 +275,7 @@ async def test_service_lock_regulator( DOMAIN, LcnService.LOCK_REGULATOR, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_SETPOINT: "r1varsetpoint", CONF_STATE: True, }, @@ -238,8 +285,12 @@ async def test_service_lock_regulator( lock_regulator.assert_awaited_with(0, True) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_send_keys( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test send_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -248,7 +299,11 @@ async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.SEND_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_KEYS: "a1a5d8", CONF_STATE: "hit"}, + { + **device_config(hass, entry, config_type), + CONF_KEYS: "a1a5d8", + CONF_STATE: "hit", + }, blocking=True, ) @@ -260,9 +315,11 @@ async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> send_keys.assert_awaited_with(keys, pypck.lcn_defs.SendKeyCommand["HIT"]) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_send_keys_hit_deferred( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test send_keys (hit_deferred) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -281,7 +338,7 @@ async def test_service_send_keys_hit_deferred( DOMAIN, LcnService.SEND_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_KEYS: "a1a5d8", CONF_TIME: 5, CONF_TIME_UNIT: "s", @@ -304,7 +361,7 @@ async def test_service_send_keys_hit_deferred( DOMAIN, LcnService.SEND_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_KEYS: "a1a5d8", CONF_STATE: "make", CONF_TIME: 5, @@ -314,8 +371,12 @@ async def test_service_send_keys_hit_deferred( ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_lock_keys( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test lock_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -324,7 +385,11 @@ async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.LOCK_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_TABLE: "a", CONF_STATE: "0011TT--"}, + { + **device_config(hass, entry, config_type), + CONF_TABLE: "a", + CONF_STATE: "0011TT--", + }, blocking=True, ) @@ -334,9 +399,11 @@ async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> lock_keys.assert_awaited_with(0, lock_states) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_lock_keys_tab_a_temporary( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test lock_keys (tab_a_temporary) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -350,7 +417,7 @@ async def test_service_lock_keys_tab_a_temporary( DOMAIN, LcnService.LOCK_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_STATE: "0011TT--", CONF_TIME: 10, CONF_TIME_UNIT: "s", @@ -376,7 +443,7 @@ async def test_service_lock_keys_tab_a_temporary( DOMAIN, LcnService.LOCK_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_TABLE: "b", CONF_STATE: "0011TT--", CONF_TIME: 10, @@ -386,8 +453,12 @@ async def test_service_lock_keys_tab_a_temporary( ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_dyn_text( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test dyn_text service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -396,15 +467,23 @@ async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.DYN_TEXT, - {CONF_ADDRESS: "pchk.s0.m7", CONF_ROW: 1, CONF_TEXT: "text in row 1"}, + { + **device_config(hass, entry, config_type), + CONF_ROW: 1, + CONF_TEXT: "text in row 1", + }, blocking=True, ) dyn_text.assert_awaited_with(0, "text in row 1") -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_pck( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test pck service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -413,14 +492,13 @@ async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: await hass.services.async_call( DOMAIN, LcnService.PCK, - {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + {**device_config(hass, entry, config_type), CONF_PCK: "PIN4"}, blocking=True, ) pck.assert_awaited_with("PIN4") -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_called_with_invalid_host_id( hass: HomeAssistant, entry: MockConfigEntry ) -> None: @@ -437,3 +515,20 @@ async def test_service_called_with_invalid_host_id( ) pck.assert_not_awaited() + + +async def test_service_with_deprecated_address_parameter( + hass: HomeAssistant, entry: MockConfigEntry, issue_registry: ir.IssueRegistry +) -> None: + """Test service puts issue in registry if called with address parameter.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + assert issue_registry.async_get_issue(DOMAIN, "deprecated_address_parameter") From 971618399723472f17541130f260b5148f77f9af Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Wed, 18 Dec 2024 14:38:29 +0100 Subject: [PATCH 0821/1198] Add entity translations to devolo Home Control (#132927) --- .../devolo_home_control/binary_sensor.py | 13 +--- .../components/devolo_home_control/sensor.py | 7 +- .../devolo_home_control/strings.json | 15 ++++ tests/components/devolo_home_control/mocks.py | 38 +++++++++- .../snapshots/test_binary_sensor.ambr | 4 +- .../snapshots/test_sensor.ambr | 74 ++++++++++++++++--- .../devolo_home_control/test_sensor.py | 56 ++++++++------ 7 files changed, 157 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/devolo_home_control/binary_sensor.py b/homeassistant/components/devolo_home_control/binary_sensor.py index 449b1c7659f..d24033a80b9 100644 --- a/homeassistant/components/devolo_home_control/binary_sensor.py +++ b/homeassistant/components/devolo_home_control/binary_sensor.py @@ -81,14 +81,8 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity): or self._binary_sensor_property.sensor_type ) - if device_instance.binary_sensor_property[element_uid].sub_type != "": - self._attr_name = device_instance.binary_sensor_property[ - element_uid - ].sub_type.capitalize() - else: - self._attr_name = device_instance.binary_sensor_property[ - element_uid - ].sensor_type.capitalize() + if device_instance.binary_sensor_property[element_uid].sub_type == "overload": + self._attr_translation_key = "overload" self._value = self._binary_sensor_property.state @@ -129,7 +123,8 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity): self._key = key self._attr_is_on = False - self._attr_name = f"Button {key}" + self._attr_translation_key = "button" + self._attr_translation_placeholders = {"key": str(key)} def _sync(self, message: tuple) -> None: """Update the binary sensor state.""" diff --git a/homeassistant/components/devolo_home_control/sensor.py b/homeassistant/components/devolo_home_control/sensor.py index 61a63419732..8d0a7f0313c 100644 --- a/homeassistant/components/devolo_home_control/sensor.py +++ b/homeassistant/components/devolo_home_control/sensor.py @@ -116,9 +116,11 @@ class DevoloGenericMultiLevelDeviceEntity(DevoloMultiLevelDeviceEntity): self._multi_level_sensor_property.sensor_type ) self._attr_native_unit_of_measurement = self._multi_level_sensor_property.unit - self._attr_name = self._multi_level_sensor_property.sensor_type.capitalize() self._value = self._multi_level_sensor_property.value + if self._multi_level_sensor_property.sensor_type == "light": + self._attr_translation_key = "brightness" + if element_uid.startswith("devolo.VoltageMultiLevelSensor:"): self._attr_entity_registry_enabled_default = False @@ -128,7 +130,6 @@ class DevoloBatteryEntity(DevoloMultiLevelDeviceEntity): _attr_entity_category = EntityCategory.DIAGNOSTIC _attr_native_unit_of_measurement = PERCENTAGE - _attr_name = "Battery level" _attr_device_class = SensorDeviceClass.BATTERY _attr_state_class = SensorStateClass.MEASUREMENT @@ -175,8 +176,6 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity): device_instance.consumption_property[element_uid], consumption ) - self._attr_name = f"{consumption.capitalize()} consumption" - @property def unique_id(self) -> str: """Return the unique ID of the entity. diff --git a/homeassistant/components/devolo_home_control/strings.json b/homeassistant/components/devolo_home_control/strings.json index 1eaf64564c2..be853e2d89d 100644 --- a/homeassistant/components/devolo_home_control/strings.json +++ b/homeassistant/components/devolo_home_control/strings.json @@ -30,5 +30,20 @@ } } } + }, + "entity": { + "binary_sensor": { + "button": { + "name": "Button {key}" + }, + "overload": { + "name": "Overload" + } + }, + "sensor": { + "brightness": { + "name": "Brightness" + } + } } } diff --git a/tests/components/devolo_home_control/mocks.py b/tests/components/devolo_home_control/mocks.py index 33c0a230e90..d611c73cf2c 100644 --- a/tests/components/devolo_home_control/mocks.py +++ b/tests/components/devolo_home_control/mocks.py @@ -70,6 +70,18 @@ class MultiLevelSensorPropertyMock(MultiLevelSensorProperty): self._logger = MagicMock() +class BrightnessSensorPropertyMock(MultiLevelSensorProperty): + """devolo Home Control brightness sensor mock.""" + + def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-called + """Initialize the mock.""" + self.element_uid = "Test" + self.sensor_type = "light" + self._unit = "%" + self._value = 20 + self._logger = MagicMock() + + class MultiLevelSwitchPropertyMock(MultiLevelSwitchProperty): """devolo Home Control multi level switch mock.""" @@ -138,7 +150,18 @@ class BinarySensorMockOverload(DeviceMock): """Initialize the mock.""" super().__init__() self.binary_sensor_property = {"Overload": BinarySensorPropertyMock()} - self.binary_sensor_property["Overload"].sensor_type = "overload" + self.binary_sensor_property["Overload"].sub_type = "overload" + + +class BrightnessSensorMock(DeviceMock): + """devolo Home Control brightness sensor device mock.""" + + def __init__(self) -> None: + """Initialize the mock.""" + super().__init__() + self.multi_level_sensor_property = { + "devolo.MultiLevelSensor:Test": BrightnessSensorPropertyMock() + } class ClimateMock(DeviceMock): @@ -275,6 +298,19 @@ class HomeControlMockBinarySensor(HomeControlMock): self.publisher.unregister = MagicMock() +class HomeControlMockBrightness(HomeControlMock): + """devolo Home Control gateway mock with brightness devices.""" + + def __init__(self, **kwargs: Any) -> None: + """Initialize the mock.""" + super().__init__() + self.devices = { + "Test": BrightnessSensorMock(), + } + self.publisher = Publisher(self.devices.keys()) + self.publisher.unregister = MagicMock() + + class HomeControlMockClimate(HomeControlMock): """devolo Home Control gateway mock with climate devices.""" diff --git a/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr b/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr index 0980a550c7b..c5daed73b33 100644 --- a/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr +++ b/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr @@ -88,7 +88,7 @@ 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'overload', 'unique_id': 'Overload', 'unit_of_measurement': None, }) @@ -134,7 +134,7 @@ 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'button', 'unique_id': 'Test_1', 'unit_of_measurement': None, }) diff --git a/tests/components/devolo_home_control/snapshots/test_sensor.ambr b/tests/components/devolo_home_control/snapshots/test_sensor.ambr index 7f67c70f6ac..3c23385594a 100644 --- a/tests/components/devolo_home_control/snapshots/test_sensor.ambr +++ b/tests/components/devolo_home_control/snapshots/test_sensor.ambr @@ -3,12 +3,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Test Battery level', + 'friendly_name': 'Test Battery', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.test_battery_level', + 'entity_id': 'sensor.test_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -29,7 +29,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.test_battery_level', + 'entity_id': 'sensor.test_battery', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Battery level', + 'original_name': 'Battery', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, @@ -50,16 +50,66 @@ 'unit_of_measurement': '%', }) # --- +# name: test_brightness_sensor + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Brightness', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_brightness_sensor.1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brightness', + 'platform': 'devolo_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'brightness', + 'unique_id': 'devolo.MultiLevelSensor:Test', + 'unit_of_measurement': '%', + }) +# --- # name: test_consumption_sensor StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Test Current consumption', + 'friendly_name': 'Test Power', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.test_current_consumption', + 'entity_id': 'sensor.test_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -80,7 +130,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.test_current_consumption', + 'entity_id': 'sensor.test_power', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -92,7 +142,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Current consumption', + 'original_name': 'Power', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, @@ -105,12 +155,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'Test Total consumption', + 'friendly_name': 'Test Energy', 'state_class': , 'unit_of_measurement': 'kWh', }), 'context': , - 'entity_id': 'sensor.test_total_consumption', + 'entity_id': 'sensor.test_energy', 'last_changed': , 'last_reported': , 'last_updated': , @@ -131,7 +181,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.test_total_consumption', + 'entity_id': 'sensor.test_energy', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -143,7 +193,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Total consumption', + 'original_name': 'Energy', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, diff --git a/tests/components/devolo_home_control/test_sensor.py b/tests/components/devolo_home_control/test_sensor.py index 08b53dae865..ba4c493c366 100644 --- a/tests/components/devolo_home_control/test_sensor.py +++ b/tests/components/devolo_home_control/test_sensor.py @@ -10,7 +10,30 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import configure_integration -from .mocks import HomeControlMock, HomeControlMockConsumption, HomeControlMockSensor +from .mocks import ( + HomeControlMock, + HomeControlMockBrightness, + HomeControlMockConsumption, + HomeControlMockSensor, +) + + +async def test_brightness_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test setup of a brightness sensor device.""" + entry = configure_integration(hass) + test_gateway = HomeControlMockBrightness() + with patch( + "homeassistant.components.devolo_home_control.HomeControl", + side_effect=[test_gateway, HomeControlMock()], + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(f"{SENSOR_DOMAIN}.test_brightness") + assert state == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_brightness") == snapshot async def test_temperature_sensor( @@ -45,14 +68,14 @@ async def test_battery_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery") assert state == snapshot - assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery_level") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery") == snapshot # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("Test", 10, "battery_level")) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level").state == "10" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery").state == "10" async def test_consumption_sensor( @@ -68,37 +91,26 @@ async def test_consumption_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_power") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_current_consumption") - == snapshot - ) + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_power") == snapshot - state = hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_energy") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_total_consumption") == snapshot - ) + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_energy") == snapshot # Emulate websocket message: value changed test_gateway.devices["Test"].consumption_property["devolo.Meter:Test"].total = 50.0 test_gateway.publisher.dispatch("Test", ("devolo.Meter:Test", 50.0)) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state == "50.0" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_energy").state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption").state - == STATE_UNAVAILABLE - ) - assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state - == STATE_UNAVAILABLE - ) + assert hass.states.get(f"{SENSOR_DOMAIN}.test_power").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SENSOR_DOMAIN}.test_energy").state == STATE_UNAVAILABLE async def test_voltage_sensor(hass: HomeAssistant) -> None: From 2d6d313e5cae60510c3e294110905b9d80ea5e5e Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Wed, 18 Dec 2024 14:50:12 +0100 Subject: [PATCH 0822/1198] Complete adding custom integration action sections support to hassfest (#132443) --- script/hassfest/services.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 8c9ab5c0c0b..3a0ebed76fe 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -77,6 +77,8 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( { + vol.Optional("description"): str, + vol.Optional("name"): str, vol.Optional("collapsed"): bool, vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), } From 943b1d9f08ec451545a483e98cccec815ca72b59 Mon Sep 17 00:00:00 2001 From: adam-the-hero <132444842+adam-the-hero@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:52:25 +0100 Subject: [PATCH 0823/1198] Add sensors platform to Watergate integration (#133015) --- .../components/watergate/__init__.py | 32 +- .../components/watergate/coordinator.py | 33 +- homeassistant/components/watergate/entity.py | 10 +- .../components/watergate/quality_scale.yaml | 1 + homeassistant/components/watergate/sensor.py | 214 ++++++++ .../components/watergate/strings.json | 33 ++ homeassistant/components/watergate/valve.py | 13 +- tests/components/watergate/conftest.py | 8 + tests/components/watergate/const.py | 19 +- .../watergate/snapshots/test_sensor.ambr | 506 ++++++++++++++++++ tests/components/watergate/test_sensor.py | 150 ++++++ 11 files changed, 1002 insertions(+), 17 deletions(-) create mode 100644 homeassistant/components/watergate/sensor.py create mode 100644 tests/components/watergate/snapshots/test_sensor.ambr create mode 100644 tests/components/watergate/test_sensor.py diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py index 1cf38876556..fa761110339 100644 --- a/homeassistant/components/watergate/__init__.py +++ b/homeassistant/components/watergate/__init__.py @@ -25,8 +25,13 @@ from .coordinator import WatergateDataCoordinator _LOGGER = logging.getLogger(__name__) +WEBHOOK_TELEMETRY_TYPE = "telemetry" +WEBHOOK_VALVE_TYPE = "valve" +WEBHOOK_WIFI_CHANGED_TYPE = "wifi-changed" +WEBHOOK_POWER_SUPPLY_CHANGED_TYPE = "power-supply-changed" PLATFORMS: list[Platform] = [ + Platform.SENSOR, Platform.VALVE, ] @@ -82,7 +87,6 @@ def get_webhook_handler( async def async_webhook_handler( hass: HomeAssistant, webhook_id: str, request: Request ) -> Response | None: - # Handle http post calls to the path. if not request.body_exists: return HomeAssistantView.json( result="No Body", status_code=HTTPStatus.BAD_REQUEST @@ -96,9 +100,29 @@ def get_webhook_handler( body_type = body.get("type") - coordinator_data = coordinator.data - if body_type == Platform.VALVE and coordinator_data: - coordinator_data.valve_state = data.state + if not (coordinator_data := coordinator.data): + pass + elif body_type == WEBHOOK_VALVE_TYPE: + coordinator_data.state.valve_state = data.state + elif body_type == WEBHOOK_TELEMETRY_TYPE: + errors = data.errors or {} + coordinator_data.telemetry.flow = ( + data.flow if "flow" not in errors else None + ) + coordinator_data.telemetry.pressure = ( + data.pressure if "pressure" not in errors else None + ) + coordinator_data.telemetry.water_temperature = ( + data.temperature if "temperature" not in errors else None + ) + elif body_type == WEBHOOK_WIFI_CHANGED_TYPE: + coordinator_data.networking.ip = data.ip + coordinator_data.networking.gateway = data.gateway + coordinator_data.networking.subnet = data.subnet + coordinator_data.networking.ssid = data.ssid + coordinator_data.networking.rssi = data.rssi + elif body_type == WEBHOOK_POWER_SUPPLY_CHANGED_TYPE: + coordinator_data.state.power_supply = data.supply coordinator.async_set_updated_data(coordinator_data) diff --git a/homeassistant/components/watergate/coordinator.py b/homeassistant/components/watergate/coordinator.py index c0b87feed30..1d83b7a3ccb 100644 --- a/homeassistant/components/watergate/coordinator.py +++ b/homeassistant/components/watergate/coordinator.py @@ -1,10 +1,11 @@ """Coordinator for Watergate API.""" +from dataclasses import dataclass from datetime import timedelta import logging from watergate_local_api import WatergateApiException, WatergateLocalApiClient -from watergate_local_api.models import DeviceState +from watergate_local_api.models import DeviceState, NetworkingData, TelemetryData from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -14,7 +15,16 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): +@dataclass +class WatergateAgregatedRequests: + """Class to hold aggregated requests.""" + + state: DeviceState + telemetry: TelemetryData + networking: NetworkingData + + +class WatergateDataCoordinator(DataUpdateCoordinator[WatergateAgregatedRequests]): """Class to manage fetching watergate data.""" def __init__(self, hass: HomeAssistant, api: WatergateLocalApiClient) -> None: @@ -27,9 +37,22 @@ class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): ) self.api = api - async def _async_update_data(self) -> DeviceState: + async def _async_update_data(self) -> WatergateAgregatedRequests: try: state = await self.api.async_get_device_state() + telemetry = await self.api.async_get_telemetry_data() + networking = await self.api.async_get_networking() except WatergateApiException as exc: - raise UpdateFailed from exc - return state + raise UpdateFailed(f"Sonic device is unavailable: {exc}") from exc + return WatergateAgregatedRequests(state, telemetry, networking) + + def async_set_updated_data(self, data: WatergateAgregatedRequests) -> None: + """Manually update data, notify listeners and DO NOT reset refresh interval.""" + + self.data = data + self.logger.debug( + "Manually updated %s data", + self.name, + ) + + self.async_update_listeners() diff --git a/homeassistant/components/watergate/entity.py b/homeassistant/components/watergate/entity.py index 977a7fbedb4..8f43643029f 100644 --- a/homeassistant/components/watergate/entity.py +++ b/homeassistant/components/watergate/entity.py @@ -20,11 +20,13 @@ class WatergateEntity(CoordinatorEntity[WatergateDataCoordinator]): """Initialize the entity.""" super().__init__(coordinator) self._api_client = coordinator.api - self._attr_unique_id = f"{coordinator.data.serial_number}.{entity_name}" + self._attr_unique_id = f"{coordinator.data.state.serial_number}.{entity_name}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.data.serial_number)}, + identifiers={(DOMAIN, coordinator.data.state.serial_number)}, name="Sonic", - serial_number=coordinator.data.serial_number, + serial_number=coordinator.data.state.serial_number, manufacturer=MANUFACTURER, - sw_version=coordinator.data.firmware_version if coordinator.data else None, + sw_version=( + coordinator.data.state.firmware_version if coordinator.data else None + ), ) diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml index c6027f6a548..b116eff970e 100644 --- a/homeassistant/components/watergate/quality_scale.yaml +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -27,6 +27,7 @@ rules: test-before-configure: done test-before-setup: done unique-config-entry: done + # Silver config-entry-unloading: done log-when-unavailable: todo diff --git a/homeassistant/components/watergate/sensor.py b/homeassistant/components/watergate/sensor.py new file mode 100644 index 00000000000..82ac7cfea92 --- /dev/null +++ b/homeassistant/components/watergate/sensor.py @@ -0,0 +1,214 @@ +"""Support for Watergate sensors.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta +from enum import StrEnum +import logging + +from homeassistant.components.sensor import ( + HomeAssistant, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolume, + UnitOfVolumeFlowRate, +) +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.util import dt as dt_util + +from . import WatergateConfigEntry +from .coordinator import WatergateAgregatedRequests, WatergateDataCoordinator +from .entity import WatergateEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +class PowerSupplyMode(StrEnum): + """LED bar mode.""" + + BATTERY = "battery" + EXTERNAL = "external" + BATTERY_EXTERNAL = "battery_external" + + +@dataclass(kw_only=True, frozen=True) +class WatergateSensorEntityDescription(SensorEntityDescription): + """Description for Watergate sensor entities.""" + + value_fn: Callable[ + [WatergateAgregatedRequests], + StateType | datetime | PowerSupplyMode, + ] + + +DESCRIPTIONS: list[WatergateSensorEntityDescription] = [ + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.state.water_meter.duration + if data.state and data.state.water_meter + else None + ), + translation_key="water_meter_volume", + key="water_meter_volume", + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.state.water_meter.duration + if data.state and data.state.water_meter + else None + ), + translation_key="water_meter_duration", + key="water_meter_duration", + native_unit_of_measurement=UnitOfTime.MINUTES, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: data.networking.rssi if data.networking else None, + key="rssi", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc( + dt_util.now() - timedelta(microseconds=data.networking.wifi_uptime) + ) + if data.networking + else None + ), + translation_key="wifi_up_since", + key="wifi_up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc( + dt_util.now() - timedelta(microseconds=data.networking.mqtt_uptime) + ) + if data.networking + else None + ), + translation_key="mqtt_up_since", + key="mqtt_up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.telemetry.water_temperature if data.telemetry else None + ), + translation_key="water_temperature", + key="water_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: data.telemetry.pressure if data.telemetry else None, + translation_key="water_pressure", + key="water_pressure", + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.telemetry.flow / 1000 + if data.telemetry and data.telemetry.flow is not None + else None + ), + key="water_flow_rate", + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc(dt_util.now() - timedelta(seconds=data.state.uptime)) + if data.state + else None + ), + translation_key="up_since", + key="up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + PowerSupplyMode(data.state.power_supply.replace("+", "_")) + if data.state + else None + ), + translation_key="power_supply_mode", + key="power_supply_mode", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.ENUM, + options=[member.value for member in PowerSupplyMode], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up all entries for Watergate Platform.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + SonicSensor(coordinator, description) for description in DESCRIPTIONS + ) + + +class SonicSensor(WatergateEntity, SensorEntity): + """Define a Sonic Sensor entity.""" + + entity_description: WatergateSensorEntityDescription + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_description: WatergateSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, entity_description.key) + self.entity_description = entity_description + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return ( + super().available + and self.entity_description.value_fn(self.coordinator.data) is not None + ) + + @property + def native_value(self) -> str | int | float | datetime | PowerSupplyMode | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json index 2a75c4d103d..c312525e420 100644 --- a/homeassistant/components/watergate/strings.json +++ b/homeassistant/components/watergate/strings.json @@ -17,5 +17,38 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } + }, + "entity": { + "sensor": { + "water_meter_volume": { + "name": "Water meter volume" + }, + "water_meter_duration": { + "name": "Water meter duration" + }, + "wifi_up_since": { + "name": "Wi-Fi up since" + }, + "mqtt_up_since": { + "name": "MQTT up since" + }, + "water_temperature": { + "name": "Water temperature" + }, + "water_pressure": { + "name": "Water pressure" + }, + "up_since": { + "name": "Up since" + }, + "power_supply_mode": { + "name": "Power supply mode", + "state": { + "battery": "Battery", + "external": "Mains", + "battery_external": "Battery and mains" + } + } + } } } diff --git a/homeassistant/components/watergate/valve.py b/homeassistant/components/watergate/valve.py index aecaf3fbca9..556b53e1d3c 100644 --- a/homeassistant/components/watergate/valve.py +++ b/homeassistant/components/watergate/valve.py @@ -43,7 +43,9 @@ class SonicValve(WatergateEntity, ValveEntity): ) -> None: """Initialize the sensor.""" super().__init__(coordinator, ENTITY_NAME) - self._valve_state = coordinator.data.valve_state if coordinator.data else None + self._valve_state = ( + coordinator.data.state.valve_state if coordinator.data.state else None + ) @property def is_closed(self) -> bool: @@ -65,7 +67,9 @@ class SonicValve(WatergateEntity, ValveEntity): """Handle data update.""" self._attr_available = self.coordinator.data is not None self._valve_state = ( - self.coordinator.data.valve_state if self.coordinator.data else None + self.coordinator.data.state.valve_state + if self.coordinator.data.state + else None ) self.async_write_ha_state() @@ -80,3 +84,8 @@ class SonicValve(WatergateEntity, ValveEntity): await self._api_client.async_set_valve_state(ValveState.CLOSED) self._valve_state = ValveState.CLOSING self.async_write_ha_state() + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self.coordinator.data.state is not None diff --git a/tests/components/watergate/conftest.py b/tests/components/watergate/conftest.py index d29b90431a4..6d40a4b7152 100644 --- a/tests/components/watergate/conftest.py +++ b/tests/components/watergate/conftest.py @@ -9,7 +9,9 @@ from homeassistant.const import CONF_IP_ADDRESS from .const import ( DEFAULT_DEVICE_STATE, + DEFAULT_NETWORKING_STATE, DEFAULT_SERIAL_NUMBER, + DEFAULT_TELEMETRY_STATE, MOCK_CONFIG, MOCK_WEBHOOK_ID, ) @@ -35,6 +37,12 @@ def mock_watergate_client() -> Generator[AsyncMock]: mock_client_instance.async_get_device_state = AsyncMock( return_value=DEFAULT_DEVICE_STATE ) + mock_client_instance.async_get_networking = AsyncMock( + return_value=DEFAULT_NETWORKING_STATE + ) + mock_client_instance.async_get_telemetry_data = AsyncMock( + return_value=DEFAULT_TELEMETRY_STATE + ) yield mock_client_instance diff --git a/tests/components/watergate/const.py b/tests/components/watergate/const.py index 4297b3321ad..0f7cc12c14b 100644 --- a/tests/components/watergate/const.py +++ b/tests/components/watergate/const.py @@ -1,6 +1,7 @@ """Constants for the Watergate tests.""" -from watergate_local_api.models import DeviceState +from watergate_local_api.models import DeviceState, NetworkingData, TelemetryData +from watergate_local_api.models.water_meter import WaterMeter from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME, CONF_WEBHOOK_ID @@ -22,6 +23,20 @@ DEFAULT_DEVICE_STATE = DeviceState( "battery", "1.0.0", 100, - {"volume": 1.2, "duration": 100}, + WaterMeter(1.2, 100), DEFAULT_SERIAL_NUMBER, ) + +DEFAULT_NETWORKING_STATE = NetworkingData( + True, + True, + "192.168.1.127", + "192.168.1.1", + "255.255.255.0", + "Sonic", + -50, + 2137, + 1910, +) + +DEFAULT_TELEMETRY_STATE = TelemetryData(0.0, 100, 28.32, None, []) diff --git a/tests/components/watergate/snapshots/test_sensor.ambr b/tests/components/watergate/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a8969798105 --- /dev/null +++ b/tests/components/watergate/snapshots/test_sensor.ambr @@ -0,0 +1,506 @@ +# serializer version: 1 +# name: test_sensor[sensor.sonic_mqtt_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_mqtt_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'MQTT up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mqtt_up_since', + 'unique_id': 'a63182948ce2896a.mqtt_up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_mqtt_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic MQTT up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_mqtt_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:59:59+00:00', + }) +# --- +# name: test_sensor[sensor.sonic_power_supply_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'battery', + 'external', + 'battery_external', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_power_supply_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power supply mode', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_supply_mode', + 'unique_id': 'a63182948ce2896a.power_supply_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_power_supply_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Sonic Power supply mode', + 'options': list([ + 'battery', + 'external', + 'battery_external', + ]), + }), + 'context': , + 'entity_id': 'sensor.sonic_power_supply_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'battery', + }) +# --- +# name: test_sensor[sensor.sonic_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a63182948ce2896a.rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[sensor.sonic_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Sonic Signal strength', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.sonic_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-50', + }) +# --- +# name: test_sensor[sensor.sonic_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'up_since', + 'unique_id': 'a63182948ce2896a.up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic Up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:58:20+00:00', + }) +# --- +# name: test_sensor[sensor.sonic_volume_flow_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_volume_flow_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Volume flow rate', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a63182948ce2896a.water_flow_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_volume_flow_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Sonic Volume flow rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_volume_flow_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_meter_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water meter duration', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_meter_duration', + 'unique_id': 'a63182948ce2896a.water_meter_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Sonic Water meter duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_meter_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_meter_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water meter volume', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_meter_volume', + 'unique_id': 'a63182948ce2896a.water_meter_volume', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Sonic Water meter volume', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_meter_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water pressure', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_pressure', + 'unique_id': 'a63182948ce2896a.water_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Sonic Water pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water temperature', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_temperature', + 'unique_id': 'a63182948ce2896a.water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Sonic Water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.32', + }) +# --- +# name: test_sensor[sensor.sonic_wi_fi_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_wi_fi_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_up_since', + 'unique_id': 'a63182948ce2896a.wifi_up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_wi_fi_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic Wi-Fi up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_wi_fi_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:59:59+00:00', + }) +# --- diff --git a/tests/components/watergate/test_sensor.py b/tests/components/watergate/test_sensor.py new file mode 100644 index 00000000000..58632c7548b --- /dev/null +++ b/tests/components/watergate/test_sensor.py @@ -0,0 +1,150 @@ +"""Tests for the Watergate valve platform.""" + +from collections.abc import Generator + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import EntityCategory, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import DEFAULT_NETWORKING_STATE, DEFAULT_TELEMETRY_STATE, MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, MockConfigEntry, patch, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the sensor.""" + freezer.move_to("2021-01-09 12:00:00+00:00") + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, mock_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +async def test_diagnostics_are_disabled_by_default( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if all diagnostic entities are disabled by default.""" + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, mock_entry) + + entries = [ + entry + for entry in entity_registry.entities.get_entries_for_config_entry_id( + mock_entry.entry_id + ) + if entry.entity_category == EntityCategory.DIAGNOSTIC + ] + + assert len(entries) == 5 + for entry in entries: + assert entry.disabled + + +async def test_telemetry_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(entity_id) + assert state.state == str(expected_state) + + assert_state("sensor.sonic_volume_flow_rate", DEFAULT_TELEMETRY_STATE.flow) + assert_state("sensor.sonic_water_pressure", DEFAULT_TELEMETRY_STATE.pressure) + assert_state( + "sensor.sonic_water_temperature", DEFAULT_TELEMETRY_STATE.water_temperature + ) + + telemetry_change_data = { + "type": "telemetry", + "data": {"flow": 2137, "pressure": 1910, "temperature": 20}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=telemetry_change_data) + + await hass.async_block_till_done() + + assert_state("sensor.sonic_volume_flow_rate", "2.137") + assert_state("sensor.sonic_water_pressure", "1910") + assert_state("sensor.sonic_water_temperature", "20") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_wifi_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(entity_id) + assert state.state == str(expected_state) + + assert_state("sensor.sonic_signal_strength", DEFAULT_NETWORKING_STATE.rssi) + + wifi_change_data = { + "type": "wifi-changed", + "data": { + "ip": "192.168.2.137", + "gateway": "192.168.2.1", + "ssid": "Sonic 2", + "rssi": -70, + "subnet": "255.255.255.0", + }, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=wifi_change_data) + + await hass.async_block_till_done() + + assert_state("sensor.sonic_signal_strength", "-70") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_power_supply_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + entity_id = "sensor.sonic_power_supply_mode" + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == "battery" + + power_supply_change_data = { + "type": "power-supply-changed", + "data": {"supply": "external"}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=power_supply_change_data) + + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "external" From 3132700492bea7ab9ba6c42ba0689ef18a6a55e6 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Wed, 18 Dec 2024 14:02:44 +0000 Subject: [PATCH 0824/1198] Add ability to translate ENUM sensor states in Unifi integration (#131921) --- homeassistant/components/unifi/const.py | 25 ++- homeassistant/components/unifi/sensor.py | 4 +- homeassistant/components/unifi/strings.json | 20 +++ .../unifi/snapshots/test_sensor.ambr | 156 +++++++++--------- tests/components/unifi/test_sensor.py | 5 +- 5 files changed, 113 insertions(+), 97 deletions(-) diff --git a/homeassistant/components/unifi/const.py b/homeassistant/components/unifi/const.py index 2b16895a9a8..bbd03b070a4 100644 --- a/homeassistant/components/unifi/const.py +++ b/homeassistant/components/unifi/const.py @@ -50,17 +50,16 @@ DPI_SWITCH = "dpi" OUTLET_SWITCH = "outlet" DEVICE_STATES = { - DeviceState.DISCONNECTED: "Disconnected", - DeviceState.CONNECTED: "Connected", - DeviceState.PENDING: "Pending", - DeviceState.FIRMWARE_MISMATCH: "Firmware Mismatch", - DeviceState.UPGRADING: "Upgrading", - DeviceState.PROVISIONING: "Provisioning", - DeviceState.HEARTBEAT_MISSED: "Heartbeat Missed", - DeviceState.ADOPTING: "Adopting", - DeviceState.DELETING: "Deleting", - DeviceState.INFORM_ERROR: "Inform Error", - DeviceState.ADOPTION_FALIED: "Adoption Failed", - DeviceState.ISOLATED: "Isolated", - DeviceState.UNKNOWN: "Unknown", + DeviceState.DISCONNECTED: "disconnected", + DeviceState.CONNECTED: "connected", + DeviceState.PENDING: "pending", + DeviceState.FIRMWARE_MISMATCH: "firmware_mismatch", + DeviceState.UPGRADING: "upgrading", + DeviceState.PROVISIONING: "provisioning", + DeviceState.HEARTBEAT_MISSED: "heartbeat_missed", + DeviceState.ADOPTING: "adopting", + DeviceState.DELETING: "deleting", + DeviceState.INFORM_ERROR: "inform_error", + DeviceState.ADOPTION_FALIED: "adoption_failed", + DeviceState.ISOLATED: "isolated", } diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 74d49db6e4e..194a8575174 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -205,9 +205,9 @@ def async_client_is_connected_fn(hub: UnifiHub, obj_id: str) -> bool: @callback -def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str: +def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str | None: """Retrieve the state of the device.""" - return DEVICE_STATES[device.state] + return DEVICE_STATES.get(device.state) @callback diff --git a/homeassistant/components/unifi/strings.json b/homeassistant/components/unifi/strings.json index 1c7317c4267..f9315318d1e 100644 --- a/homeassistant/components/unifi/strings.json +++ b/homeassistant/components/unifi/strings.json @@ -33,6 +33,26 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "device_state": { + "state": { + "disconnected": "[%key:common::state::disconnected%]", + "connected": "[%key:common::state::connected%]", + "pending": "Pending", + "firmware_mismatch": "Firmware mismatch", + "upgrading": "Upgrading", + "provisioning": "Provisioning", + "heartbeat_missed": "Heartbeat missed", + "adopting": "Adopting", + "deleting": "Deleting", + "inform_error": "Inform error", + "adoption_failed": "Adoption failed", + "isolated": "Isolated" + } + } + } + }, "options": { "abort": { "integration_not_setup": "UniFi integration is not set up" diff --git a/tests/components/unifi/snapshots/test_sensor.ambr b/tests/components/unifi/snapshots/test_sensor.ambr index fc86a57a294..e14658b2b96 100644 --- a/tests/components/unifi/snapshots/test_sensor.ambr +++ b/tests/components/unifi/snapshots/test_sensor.ambr @@ -55,19 +55,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -103,19 +102,18 @@ 'device_class': 'enum', 'friendly_name': 'Device State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -123,7 +121,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] @@ -536,19 +534,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -584,19 +581,18 @@ 'device_class': 'enum', 'friendly_name': 'Dummy USP-PDU-Pro State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -604,7 +600,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] @@ -1601,19 +1597,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -1649,19 +1644,18 @@ 'device_class': 'enum', 'friendly_name': 'mock-name State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -1669,7 +1663,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 3c94d12018d..5e47d263079 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -30,6 +30,7 @@ from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, + STATE_UNKNOWN, EntityCategory, Platform, ) @@ -891,7 +892,9 @@ async def test_device_state( for i in list(map(int, DeviceState)): device["state"] = i mock_websocket_message(message=MessageKey.DEVICE, data=device) - assert hass.states.get("sensor.device_state").state == DEVICE_STATES[i] + assert hass.states.get("sensor.device_state").state == DEVICE_STATES.get( + i, STATE_UNKNOWN + ) @pytest.mark.parametrize( From fce6d6246f85928281369b28de7369e4c8317234 Mon Sep 17 00:00:00 2001 From: Philip Baylas Date: Wed, 18 Dec 2024 14:07:03 +0000 Subject: [PATCH 0825/1198] Change log level of connection failure to info (#132625) Co-authored-by: Franck Nijhof --- homeassistant/components/plex/server.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index 0716b3606af..eab1d086d4c 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -425,9 +425,7 @@ class PlexServer: client = resource.connect(timeout=3) _LOGGER.debug("Resource connection successful to plex.tv: %s", client) except NotFound: - _LOGGER.error( - "Resource connection failed to plex.tv: %s", resource.name - ) + _LOGGER.info("Resource connection failed to plex.tv: %s", resource.name) else: client.proxyThroughServer(value=False, server=self._plex_server) self._client_device_cache[client.machineIdentifier] = client From 1e075cdac757115db8b2d0ae0444ea4a39112eca Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 15:21:17 +0100 Subject: [PATCH 0826/1198] Add diagnostics to slide_local (#133488) --- .../components/slide_local/diagnostics.py | 27 +++++++++++++ .../components/slide_local/quality_scale.yaml | 2 +- .../snapshots/test_diagnostics.ambr | 39 +++++++++++++++++++ .../slide_local/test_diagnostics.py | 34 ++++++++++++++++ 4 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/slide_local/diagnostics.py create mode 100644 tests/components/slide_local/snapshots/test_diagnostics.ambr create mode 100644 tests/components/slide_local/test_diagnostics.py diff --git a/homeassistant/components/slide_local/diagnostics.py b/homeassistant/components/slide_local/diagnostics.py new file mode 100644 index 00000000000..2655cb5fada --- /dev/null +++ b/homeassistant/components/slide_local/diagnostics.py @@ -0,0 +1,27 @@ +"""Provides diagnostics for slide_local.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from . import SlideConfigEntry + +TO_REDACT = [ + CONF_PASSWORD, +] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: SlideConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "slide_data": data, + } diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 4eda62f6497..887b90b6b11 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -43,7 +43,7 @@ rules: entity-disabled-by-default: done discovery: done stale-devices: todo - diagnostics: todo + diagnostics: done exception-translations: done icon-translations: todo reconfiguration-flow: todo diff --git a/tests/components/slide_local/snapshots/test_diagnostics.ambr b/tests/components/slide_local/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..63dab3f5a66 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'api_version': 2, + 'host': '127.0.0.2', + 'mac': '12:34:56:78:90:ab', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'slide_local', + 'entry_id': 'ce5f5431554d101905d31797e1232da8', + 'minor_version': 1, + 'options': dict({ + 'invert_position': False, + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'slide', + 'unique_id': '12:34:56:78:90:ab', + 'version': 1, + }), + 'slide_data': dict({ + 'board_rev': 1, + 'calib_time': 10239, + 'curtain_type': 0, + 'device_name': 'slide bedroom', + 'mac': '1234567890ab', + 'pos': 0, + 'slide_id': 'slide_1234567890ab', + 'state': 'open', + 'touch_go': True, + 'zone_name': 'bedroom', + }), + }) +# --- diff --git a/tests/components/slide_local/test_diagnostics.py b/tests/components/slide_local/test_diagnostics.py new file mode 100644 index 00000000000..3e11af378c5 --- /dev/null +++ b/tests/components/slide_local/test_diagnostics.py @@ -0,0 +1,34 @@ +"""Test slide_local diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_platform( + hass, mock_config_entry, [Platform.BUTTON, Platform.COVER, Platform.SWITCH] + ) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) From c9f1829c0bf0b0cad5427614b40f1bc4aadd2c4f Mon Sep 17 00:00:00 2001 From: mkmer Date: Wed, 18 Dec 2024 09:27:40 -0500 Subject: [PATCH 0827/1198] Add (de)humidifier platform to Honeywell (#132287) Co-authored-by: Joost Lekkerkerker --- .../components/honeywell/__init__.py | 2 +- .../components/honeywell/humidifier.py | 136 ++++++++++++++++++ .../components/honeywell/strings.json | 8 ++ tests/components/honeywell/__init__.py | 2 +- tests/components/honeywell/conftest.py | 23 ++- .../honeywell/snapshots/test_humidity.ambr | 39 +++++ tests/components/honeywell/test_climate.py | 2 +- tests/components/honeywell/test_humidity.py | 110 ++++++++++++++ 8 files changed, 318 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/honeywell/humidifier.py create mode 100644 tests/components/honeywell/snapshots/test_humidity.ambr create mode 100644 tests/components/honeywell/test_humidity.py diff --git a/homeassistant/components/honeywell/__init__.py b/homeassistant/components/honeywell/__init__.py index a8ee5975914..eb89ba2a681 100644 --- a/homeassistant/components/honeywell/__init__.py +++ b/homeassistant/components/honeywell/__init__.py @@ -22,7 +22,7 @@ from .const import ( ) UPDATE_LOOP_SLEEP_TIME = 5 -PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] +PLATFORMS = [Platform.CLIMATE, Platform.HUMIDIFIER, Platform.SENSOR, Platform.SWITCH] MIGRATE_OPTIONS_KEYS = {CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE} diff --git a/homeassistant/components/honeywell/humidifier.py b/homeassistant/components/honeywell/humidifier.py new file mode 100644 index 00000000000..e94ba465c30 --- /dev/null +++ b/homeassistant/components/honeywell/humidifier.py @@ -0,0 +1,136 @@ +"""Support for Honeywell (de)humidifiers.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from aiosomecomfort.device import Device + +from homeassistant.components.humidifier import ( + HumidifierDeviceClass, + HumidifierEntity, + HumidifierEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import HoneywellConfigEntry +from .const import DOMAIN + +HUMIDIFIER_KEY = "humidifier" +DEHUMIDIFIER_KEY = "dehumidifier" + + +@dataclass(frozen=True, kw_only=True) +class HoneywellHumidifierEntityDescription(HumidifierEntityDescription): + """Describes a Honeywell humidifier entity.""" + + current_humidity: Callable[[Device], Any] + current_set_humidity: Callable[[Device], Any] + max_humidity: Callable[[Device], Any] + min_humidity: Callable[[Device], Any] + set_humidity: Callable[[Device, Any], Any] + mode: Callable[[Device], Any] + off: Callable[[Device], Any] + on: Callable[[Device], Any] + + +HUMIDIFIERS: dict[str, HoneywellHumidifierEntityDescription] = { + "Humidifier": HoneywellHumidifierEntityDescription( + key=HUMIDIFIER_KEY, + translation_key=HUMIDIFIER_KEY, + current_humidity=lambda device: device.current_humidity, + set_humidity=lambda device, humidity: device.set_humidifier_setpoint(humidity), + min_humidity=lambda device: device.humidifier_lower_limit, + max_humidity=lambda device: device.humidifier_upper_limit, + current_set_humidity=lambda device: device.humidifier_setpoint, + mode=lambda device: device.humidifier_mode, + off=lambda device: device.set_humidifier_off(), + on=lambda device: device.set_humidifier_auto(), + device_class=HumidifierDeviceClass.HUMIDIFIER, + ), + "Dehumidifier": HoneywellHumidifierEntityDescription( + key=DEHUMIDIFIER_KEY, + translation_key=DEHUMIDIFIER_KEY, + current_humidity=lambda device: device.current_humidity, + set_humidity=lambda device, humidity: device.set_dehumidifier_setpoint( + humidity + ), + min_humidity=lambda device: device.dehumidifier_lower_limit, + max_humidity=lambda device: device.dehumidifier_upper_limit, + current_set_humidity=lambda device: device.dehumidifier_setpoint, + mode=lambda device: device.dehumidifier_mode, + off=lambda device: device.set_dehumidifier_off(), + on=lambda device: device.set_dehumidifier_auto(), + device_class=HumidifierDeviceClass.DEHUMIDIFIER, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HoneywellConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Honeywell (de)humidifier dynamically.""" + data = config_entry.runtime_data + entities: list = [] + for device in data.devices.values(): + if device.has_humidifier: + entities.append(HoneywellHumidifier(device, HUMIDIFIERS["Humidifier"])) + if device.has_dehumidifier: + entities.append(HoneywellHumidifier(device, HUMIDIFIERS["Dehumidifier"])) + + async_add_entities(entities) + + +class HoneywellHumidifier(HumidifierEntity): + """Representation of a Honeywell US (De)Humidifier.""" + + entity_description: HoneywellHumidifierEntityDescription + _attr_has_entity_name = True + + def __init__( + self, device: Device, description: HoneywellHumidifierEntityDescription + ) -> None: + """Initialize the (De)Humidifier.""" + self._device = device + self.entity_description = description + self._attr_unique_id = f"{device.deviceid}_{description.key}" + self._attr_min_humidity = description.min_humidity(device) + self._attr_max_humidity = description.max_humidity(device) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.deviceid)}, + name=device.name, + manufacturer="Honeywell", + ) + + @property + def is_on(self) -> bool: + """Return the device is on or off.""" + return self.entity_description.mode(self._device) != 0 + + @property + def target_humidity(self) -> int | None: + """Return the humidity we try to reach.""" + return self.entity_description.current_set_humidity(self._device) + + @property + def current_humidity(self) -> int | None: + """Return the current humidity.""" + return self.entity_description.current_humidity(self._device) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the device on.""" + await self.entity_description.on(self._device) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the device off.""" + await self.entity_description.off(self._device) + + async def async_set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + await self.entity_description.set_humidity(self._device, humidity) diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index a64f1a6fce0..2538e7101a1 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -61,6 +61,14 @@ } } } + }, + "humidifier": { + "humidifier": { + "name": "[%key:component::humidifier::title%]" + }, + "dehumidifier": { + "name": "[%key:component::humidifier::entity_component::dehumidifier::name%]" + } } }, "exceptions": { diff --git a/tests/components/honeywell/__init__.py b/tests/components/honeywell/__init__.py index 98fcaa551bf..94022667e0e 100644 --- a/tests/components/honeywell/__init__.py +++ b/tests/components/honeywell/__init__.py @@ -1,4 +1,4 @@ -"""Tests for honeywell component.""" +"""Tests for Honeywell component.""" from unittest.mock import MagicMock diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index e48664db9ae..dd3341aa75c 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -127,7 +127,16 @@ def device(): mock_device.refresh = AsyncMock() mock_device.heat_away_temp = HEATAWAY mock_device.cool_away_temp = COOLAWAY - + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False + mock_device.humidifier_upper_limit = 60 + mock_device.humidifier_lower_limit = 10 + mock_device.humidifier_setpoint = 20 + mock_device.dehumidifier_mode = 1 + mock_device.dehumidifier_upper_limit = 55 + mock_device.dehumidifier_lower_limit = 15 + mock_device.dehumidifier_setpoint = 30 + mock_device.dehumidifier_mode = 1 mock_device.raw_dr_data = {"CoolSetpLimit": None, "HeatSetpLimit": None} return mock_device @@ -149,6 +158,8 @@ def device_with_outdoor_sensor(): mock_device.temperature_unit = "C" mock_device.outdoor_temperature = OUTDOORTEMP mock_device.outdoor_humidity = OUTDOORHUMIDITY + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False mock_device.raw_ui_data = { "SwitchOffAllowed": True, "SwitchAutoAllowed": True, @@ -188,6 +199,16 @@ def another_device(): mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False + mock_device.humidifier_upper_limit = 60 + mock_device.humidifier_lower_limit = 10 + mock_device.humidifier_setpoint = 20 + mock_device.dehumidifier_mode = 1 + mock_device.dehumidifier_upper_limit = 55 + mock_device.dehumidifier_lower_limit = 15 + mock_device.dehumidifier_setpoint = 30 + mock_device.dehumidifier_mode = 1 mock_device.raw_ui_data = { "SwitchOffAllowed": True, "SwitchAutoAllowed": True, diff --git a/tests/components/honeywell/snapshots/test_humidity.ambr b/tests/components/honeywell/snapshots/test_humidity.ambr new file mode 100644 index 00000000000..369167b8c1e --- /dev/null +++ b/tests/components/honeywell/snapshots/test_humidity.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_static_attributes[dehumidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 50, + 'device_class': 'dehumidifier', + 'friendly_name': 'device1 Dehumidifier', + 'humidity': 30, + 'max_humidity': 55, + 'min_humidity': 15, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.device1_dehumidifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_static_attributes[humidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 50, + 'device_class': 'humidifier', + 'friendly_name': 'device1 Humidifier', + 'humidity': 20, + 'max_humidity': 60, + 'min_humidity': 10, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.device1_humidifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 73c5ff33dbc..57cdfaa9a23 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -1,4 +1,4 @@ -"""Test the Whirlpool Sixth Sense climate domain.""" +"""Test the Honeywell climate domain.""" import datetime from unittest.mock import MagicMock diff --git a/tests/components/honeywell/test_humidity.py b/tests/components/honeywell/test_humidity.py new file mode 100644 index 00000000000..2e1f8cec6aa --- /dev/null +++ b/tests/components/honeywell/test_humidity.py @@ -0,0 +1,110 @@ +"""Test the Honeywell humidity domain.""" + +from unittest.mock import MagicMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.humidifier import ( + ATTR_HUMIDITY, + DOMAIN as HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, +) +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration + + +async def test_humidifier_service_calls( + hass: HomeAssistant, device: MagicMock, config_entry: MagicMock +) -> None: + """Test the setup of the climate entities when there are no additional options available.""" + device.has_humidifier = True + await init_integration(hass, config_entry) + entity_id = f"humidifier.{device.name}_humidifier" + assert hass.states.get(f"humidifier.{device.name}_dehumidifier") is None + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_humidifier_auto.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_humidifier_off.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_ENTITY_ID: entity_id, ATTR_HUMIDITY: 40}, + blocking=True, + ) + device.set_humidifier_setpoint.assert_called_once_with(40) + + +async def test_dehumidifier_service_calls( + hass: HomeAssistant, device: MagicMock, config_entry: MagicMock +) -> None: + """Test the setup of the climate entities when there are no additional options available.""" + device.has_dehumidifier = True + await init_integration(hass, config_entry) + entity_id = f"humidifier.{device.name}_dehumidifier" + assert hass.states.get(f"humidifier.{device.name}_humidifier") is None + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_dehumidifier_auto.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_dehumidifier_off.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_ENTITY_ID: entity_id, ATTR_HUMIDITY: 40}, + blocking=True, + ) + device.set_dehumidifier_setpoint.assert_called_once_with(40) + + +async def test_static_attributes( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device: MagicMock, + config_entry: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test static humidifier attributes.""" + device.has_dehumidifier = True + device.has_humidifier = True + await init_integration(hass, config_entry) + + entity_id_dehumidifier = f"humidifier.{device.name}_dehumidifier" + entity_id_humidifier = f"humidifier.{device.name}_humidifier" + entry = entity_registry.async_get(entity_id_dehumidifier) + assert entry + + state = hass.states.get(entity_id_dehumidifier) + + assert state == snapshot(name="dehumidifier") + + state = hass.states.get(entity_id_humidifier) + + assert state == snapshot(name="humidifier") From d6c201de4aa1825d02369535305f6620aa63eed8 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 15:33:11 +0100 Subject: [PATCH 0828/1198] Add exceptions and translations for slide_local (#133490) --- .../components/slide_local/button.py | 24 ++++++++++- .../components/slide_local/quality_scale.yaml | 4 +- .../components/slide_local/strings.json | 6 +++ .../components/slide_local/switch.py | 43 +++++++++++++++++-- tests/components/slide_local/test_button.py | 42 ++++++++++++++++++ tests/components/slide_local/test_switch.py | 42 ++++++++++++++++++ 6 files changed, 153 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/slide_local/button.py b/homeassistant/components/slide_local/button.py index 9c285881116..795cd4f1c2e 100644 --- a/homeassistant/components/slide_local/button.py +++ b/homeassistant/components/slide_local/button.py @@ -2,16 +2,25 @@ from __future__ import annotations +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) + from homeassistant.components.button import ButtonEntity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SlideConfigEntry +from .const import DOMAIN from .coordinator import SlideCoordinator from .entity import SlideEntity -PARALLEL_UPDATES = 0 +PARALLEL_UPDATES = 1 async def async_setup_entry( @@ -39,4 +48,15 @@ class SlideButton(SlideEntity, ButtonEntity): async def async_press(self) -> None: """Send out a calibrate command.""" - await self.coordinator.slide.slide_calibrate(self.coordinator.host) + try: + await self.coordinator.slide.slide_calibrate(self.coordinator.host) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="calibration_error", + ) from ex diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 887b90b6b11..4833f19e2b2 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -25,9 +25,7 @@ rules: config-entry-unloading: done log-when-unavailable: done entity-unavailable: done - action-exceptions: - status: exempt - comment: No custom action. + action-exceptions: done reauthentication-flow: todo parallel-updates: done test-coverage: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 24c03d2ff96..6aeda9f92fd 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -54,6 +54,12 @@ } }, "exceptions": { + "calibration_error": { + "message": "Error while sending the calibration request to the device." + }, + "touchgo_error": { + "message": "Error while sending the request setting Touch&Go to {state} to the device." + }, "update_error": { "message": "Error while updating data from the API." } diff --git a/homeassistant/components/slide_local/switch.py b/homeassistant/components/slide_local/switch.py index 6d357864c48..f1c33f9a76f 100644 --- a/homeassistant/components/slide_local/switch.py +++ b/homeassistant/components/slide_local/switch.py @@ -4,16 +4,25 @@ from __future__ import annotations from typing import Any +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) + from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SlideConfigEntry +from .const import DOMAIN from .coordinator import SlideCoordinator from .entity import SlideEntity -PARALLEL_UPDATES = 0 +PARALLEL_UPDATES = 1 async def async_setup_entry( @@ -47,10 +56,38 @@ class SlideSwitch(SlideEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off touchgo.""" - await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + try: + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="touchgo_error", + translation_placeholders={ + "state": "off", + }, + ) from ex await self.coordinator.async_request_refresh() async def async_turn_on(self, **kwargs: Any) -> None: """Turn on touchgo.""" - await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + try: + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="touchgo_error", + translation_placeholders={ + "state": "on", + }, + ) from ex await self.coordinator.async_request_refresh() diff --git a/tests/components/slide_local/test_button.py b/tests/components/slide_local/test_button.py index 646c8fd7ef3..c232affbb99 100644 --- a/tests/components/slide_local/test_button.py +++ b/tests/components/slide_local/test_button.py @@ -2,11 +2,19 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_platform @@ -44,3 +52,37 @@ async def test_pressing_button( blocking=True, ) mock_slide_api.slide_calibrate.assert_called_once() + + +@pytest.mark.parametrize( + ("exception"), + [ + ClientConnectionError, + ClientTimeoutError, + AuthenticationFailed, + DigestAuthCalcError, + ], +) +async def test_pressing_button_exception( + hass: HomeAssistant, + exception: Exception, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + mock_slide_api.slide_calibrate.side_effect = exception + + with pytest.raises( + HomeAssistantError, + match="Error while sending the calibration request to the device", + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.slide_bedroom_calibrate", + }, + blocking=True, + ) diff --git a/tests/components/slide_local/test_switch.py b/tests/components/slide_local/test_switch.py index 0ac9820ca10..9d0d8274aa5 100644 --- a/tests/components/slide_local/test_switch.py +++ b/tests/components/slide_local/test_switch.py @@ -2,6 +2,12 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) import pytest from syrupy import SnapshotAssertion @@ -13,6 +19,7 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_platform @@ -59,3 +66,38 @@ async def test_services( blocking=True, ) mock_slide_api.slide_set_touchgo.assert_called_once() + + +@pytest.mark.parametrize( + ("exception", "service"), + [ + (ClientConnectionError, SERVICE_TURN_OFF), + (ClientTimeoutError, SERVICE_TURN_ON), + (AuthenticationFailed, SERVICE_TURN_OFF), + (DigestAuthCalcError, SERVICE_TURN_ON), + ], +) +async def test_service_exception( + hass: HomeAssistant, + exception: Exception, + service: str, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + mock_slide_api.slide_set_touchgo.side_effect = exception + + with pytest.raises( + HomeAssistantError, + match=f"Error while sending the request setting Touch&Go to {service[5:]} to the device", + ): + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: "switch.slide_bedroom_touchgo", + }, + blocking=True, + ) From f46e764982f9dc0b67b564f10055cc0a510ddeba Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:06:48 -0500 Subject: [PATCH 0829/1198] Update quality scale for Russound RIO (#133093) --- .../russound_rio/quality_scale.yaml | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 6edf439cae6..bd511802467 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -18,8 +18,8 @@ rules: comment: | This integration does not provide additional actions. docs-high-level-description: done - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: done entity-unique-id: done has-entity-name: done @@ -40,7 +40,7 @@ rules: parallel-updates: done test-coverage: done integration-owner: done - docs-installation-parameters: todo + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -61,17 +61,23 @@ rules: stale-devices: todo diagnostics: done exception-translations: done - icon-translations: todo + icon-translations: + status: exempt + comment: | + There are no entities that require icons. reconfiguration-flow: done dynamic-devices: todo discovery-update-info: todo repair-issues: done - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: done docs-supported-functions: todo - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo + docs-data-update: done + docs-known-limitations: + status: exempt + comment: | + There are no known limitations beyond the push API delay noted in Troubleshooting. + docs-troubleshooting: done docs-examples: todo # Platinum From 2564533dae2319a790c002e736ee163f8634a26c Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Wed, 18 Dec 2024 10:22:39 -0500 Subject: [PATCH 0830/1198] Update Roborock to 2.8.1 (#133492) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index c305e4710fc..69d867aa164 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.7.2", + "python-roborock==2.8.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index a6316379d8f..6336205eed3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2415,7 +2415,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 33e7327568e..e8e131a5bd5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1942,7 +1942,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 From a1558213c49871a955a3dec440cc1984b143615e Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 16:53:15 +0100 Subject: [PATCH 0831/1198] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to?= =?UTF-8?q?=202.3.1=20(#133493)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index 91c74b68e01..cc368b3e92f 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.0"] + "requirements": ["fjaraskupan==2.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6336205eed3..47929f65916 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -915,7 +915,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e8e131a5bd5..3b55231f898 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -774,7 +774,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From 5516f3609d2f282a96a487fd9fee45e7d0329624 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 17:35:11 +0100 Subject: [PATCH 0832/1198] Rename strategy backup to automatic backup (#133489) * Rename strategy backup to automatic backup * Update homeassistant/components/backup/config.py Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/backup/config.py | 38 +-- homeassistant/components/backup/manager.py | 52 ++-- homeassistant/components/backup/websocket.py | 12 +- .../backup/snapshots/test_backup.ambr | 22 +- .../backup/snapshots/test_websocket.ambr | 294 +++++++++--------- tests/components/backup/test_manager.py | 16 +- tests/components/backup/test_websocket.py | 210 ++++++------- tests/components/cloud/test_backup.py | 8 +- tests/components/hassio/test_backup.py | 4 +- tests/components/kitchen_sink/test_backup.py | 4 +- 10 files changed, 330 insertions(+), 330 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index ef21dc81ee5..e8d740d2e13 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -33,8 +33,8 @@ class StoredBackupConfig(TypedDict): """Represent the stored backup config.""" create_backup: StoredCreateBackupConfig - last_attempted_strategy_backup: str | None - last_completed_strategy_backup: str | None + last_attempted_automatic_backup: str | None + last_completed_automatic_backup: str | None retention: StoredRetentionConfig schedule: StoredBackupSchedule @@ -44,8 +44,8 @@ class BackupConfigData: """Represent loaded backup config data.""" create_backup: CreateBackupConfig - last_attempted_strategy_backup: datetime | None = None - last_completed_strategy_backup: datetime | None = None + last_attempted_automatic_backup: datetime | None = None + last_completed_automatic_backup: datetime | None = None retention: RetentionConfig schedule: BackupSchedule @@ -59,12 +59,12 @@ class BackupConfigData: include_folders = None retention = data["retention"] - if last_attempted_str := data["last_attempted_strategy_backup"]: + if last_attempted_str := data["last_attempted_automatic_backup"]: last_attempted = dt_util.parse_datetime(last_attempted_str) else: last_attempted = None - if last_attempted_str := data["last_completed_strategy_backup"]: + if last_attempted_str := data["last_completed_automatic_backup"]: last_completed = dt_util.parse_datetime(last_attempted_str) else: last_completed = None @@ -79,8 +79,8 @@ class BackupConfigData: name=data["create_backup"]["name"], password=data["create_backup"]["password"], ), - last_attempted_strategy_backup=last_attempted, - last_completed_strategy_backup=last_completed, + last_attempted_automatic_backup=last_attempted, + last_completed_automatic_backup=last_completed, retention=RetentionConfig( copies=retention["copies"], days=retention["days"], @@ -90,20 +90,20 @@ class BackupConfigData: def to_dict(self) -> StoredBackupConfig: """Convert backup config data to a dict.""" - if self.last_attempted_strategy_backup: - last_attempted = self.last_attempted_strategy_backup.isoformat() + if self.last_attempted_automatic_backup: + last_attempted = self.last_attempted_automatic_backup.isoformat() else: last_attempted = None - if self.last_completed_strategy_backup: - last_completed = self.last_completed_strategy_backup.isoformat() + if self.last_completed_automatic_backup: + last_completed = self.last_completed_automatic_backup.isoformat() else: last_completed = None return StoredBackupConfig( create_backup=self.create_backup.to_dict(), - last_attempted_strategy_backup=last_attempted, - last_completed_strategy_backup=last_completed, + last_attempted_automatic_backup=last_attempted, + last_completed_automatic_backup=last_completed, retention=self.retention.to_dict(), schedule=self.schedule.to_dict(), ) @@ -286,7 +286,7 @@ class BackupSchedule: self._unschedule_next(manager) now = dt_util.now() if (cron_event := self.cron_event) is None: - seed_time = manager.config.data.last_completed_strategy_backup or now + seed_time = manager.config.data.last_completed_automatic_backup or now cron_event = self.cron_event = CronSim(cron_pattern, seed_time) next_time = next(cron_event) @@ -316,7 +316,7 @@ class BackupSchedule: include_homeassistant=True, # always include HA name=config_data.create_backup.name, password=config_data.create_backup.password, - with_strategy_settings=True, + with_automatic_settings=True, ) except Exception: # noqa: BLE001 # another more specific exception will be added @@ -404,14 +404,14 @@ async def _delete_filtered_backups( get_agent_errors, ) - # only delete backups that are created by the backup strategy + # only delete backups that are created with the saved automatic settings backups = { backup_id: backup for backup_id, backup in backups.items() - if backup.with_strategy_settings + if backup.with_automatic_settings } - LOGGER.debug("Total strategy backups: %s", backups) + LOGGER.debug("Total automatic backups: %s", backups) filtered_backups = backup_filter(backups) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index a9bce8cb03d..e2c4f91730f 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -60,7 +60,7 @@ class ManagerBackup(AgentBackup): agent_ids: list[str] failed_agent_ids: list[str] - with_strategy_settings: bool | None + with_automatic_settings: bool | None @dataclass(frozen=True, kw_only=True, slots=True) @@ -445,10 +445,10 @@ class BackupManager: if (backup_id := agent_backup.backup_id) not in backups: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_strategy_settings = known_backup.with_strategy_settings + with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_strategy_settings = None + with_automatic_settings = None backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, @@ -462,7 +462,7 @@ class BackupManager: name=agent_backup.name, protected=agent_backup.protected, size=agent_backup.size, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) backups[backup_id].agent_ids.append(agent_ids[idx]) @@ -494,10 +494,10 @@ class BackupManager: if backup is None: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_strategy_settings = known_backup.with_strategy_settings + with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_strategy_settings = None + with_automatic_settings = None backup = ManagerBackup( agent_ids=[], addons=result.addons, @@ -511,7 +511,7 @@ class BackupManager: name=result.name, protected=result.protected, size=result.size, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) backup.agent_ids.append(agent_ids[idx]) @@ -611,7 +611,7 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool = False, + with_automatic_settings: bool = False, ) -> NewBackup: """Create a backup.""" new_backup = await self.async_initiate_backup( @@ -623,7 +623,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) assert self._backup_finish_task await self._backup_finish_task @@ -640,14 +640,14 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool = False, + with_automatic_settings: bool = False, ) -> NewBackup: """Initiate generating a backup.""" if self.state is not BackupManagerState.IDLE: raise HomeAssistantError(f"Backup manager busy: {self.state}") - if with_strategy_settings: - self.config.data.last_attempted_strategy_backup = dt_util.now() + if with_automatic_settings: + self.config.data.last_attempted_automatic_backup = dt_util.now() self.store.save() self.async_on_backup_event( @@ -663,7 +663,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) except Exception: self.async_on_backup_event( @@ -683,7 +683,7 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool, + with_automatic_settings: bool, ) -> NewBackup: """Initiate generating a backup.""" if not agent_ids: @@ -708,13 +708,13 @@ class BackupManager: password=password, ) self._backup_finish_task = self.hass.async_create_task( - self._async_finish_backup(agent_ids, with_strategy_settings), + self._async_finish_backup(agent_ids, with_automatic_settings), name="backup_manager_finish_backup", ) return new_backup async def _async_finish_backup( - self, agent_ids: list[str], with_strategy_settings: bool + self, agent_ids: list[str], with_automatic_settings: bool ) -> None: if TYPE_CHECKING: assert self._backup_task is not None @@ -743,12 +743,12 @@ class BackupManager: open_stream=written_backup.open_stream, ) await written_backup.release_stream() - if with_strategy_settings: - # create backup was successful, update last_completed_strategy_backup - self.config.data.last_completed_strategy_backup = dt_util.now() + if with_automatic_settings: + # create backup was successful, update last_completed_automatic_backup + self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() self.known_backups.add( - written_backup.backup, agent_errors, with_strategy_settings + written_backup.backup, agent_errors, with_automatic_settings ) # delete old backups more numerous than copies @@ -870,7 +870,7 @@ class KnownBackups: backup["backup_id"]: KnownBackup( backup_id=backup["backup_id"], failed_agent_ids=backup["failed_agent_ids"], - with_strategy_settings=backup["with_strategy_settings"], + with_automatic_settings=backup["with_automatic_settings"], ) for backup in stored_backups } @@ -883,13 +883,13 @@ class KnownBackups: self, backup: AgentBackup, agent_errors: dict[str, Exception], - with_strategy_settings: bool, + with_automatic_settings: bool, ) -> None: """Add a backup.""" self._backups[backup.backup_id] = KnownBackup( backup_id=backup.backup_id, failed_agent_ids=list(agent_errors), - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) self._manager.store.save() @@ -911,14 +911,14 @@ class KnownBackup: backup_id: str failed_agent_ids: list[str] - with_strategy_settings: bool + with_automatic_settings: bool def to_dict(self) -> StoredKnownBackup: """Convert known backup to a dict.""" return { "backup_id": self.backup_id, "failed_agent_ids": self.failed_agent_ids, - "with_strategy_settings": self.with_strategy_settings, + "with_automatic_settings": self.with_automatic_settings, } @@ -927,7 +927,7 @@ class StoredKnownBackup(TypedDict): backup_id: str failed_agent_ids: list[str] - with_strategy_settings: bool + with_automatic_settings: bool class CoreBackupReaderWriter(BackupReaderWriter): diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 7dacc39f9ba..abe3d372be5 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -25,7 +25,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) - websocket_api.async_register_command(hass, handle_create_with_strategy_settings) + websocket_api.async_register_command(hass, handle_create_with_automatic_settings) websocket_api.async_register_command(hass, handle_delete) websocket_api.async_register_command(hass, handle_restore) websocket_api.async_register_command(hass, handle_subscribe_events) @@ -52,8 +52,8 @@ async def handle_info( agent_id: str(err) for agent_id, err in agent_errors.items() }, "backups": list(backups.values()), - "last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup, - "last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup, + "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, + "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, }, ) @@ -181,11 +181,11 @@ async def handle_create( @websocket_api.require_admin @websocket_api.websocket_command( { - vol.Required("type"): "backup/generate_with_strategy_settings", + vol.Required("type"): "backup/generate_with_automatic_settings", } ) @websocket_api.async_response -async def handle_create_with_strategy_settings( +async def handle_create_with_automatic_settings( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], @@ -202,7 +202,7 @@ async def handle_create_with_strategy_settings( include_homeassistant=True, # always include HA name=config_data.create_backup.name, password=config_data.create_backup.password, - with_strategy_settings=True, + with_automatic_settings=True, ) connection.send_result(msg["id"], backup) diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 9ef865955fe..8cbf34895f9 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,11 +78,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -110,8 +110,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -139,8 +139,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -168,8 +168,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -197,8 +197,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index f43a7ed7a2c..58a5162b1bf 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -190,8 +190,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -225,8 +225,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': '2024-10-26T04:45:00+01:00', - 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_attempted_automatic_backup': '2024-10-26T04:45:00+01:00', + 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'retention': dict({ 'copies': 3, 'days': 7, @@ -256,8 +256,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -287,8 +287,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': '2024-10-27T04:45:00+01:00', - 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_attempted_automatic_backup': '2024-10-27T04:45:00+01:00', + 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'retention': dict({ 'copies': None, 'days': 7, @@ -318,8 +318,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -349,8 +349,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -379,8 +379,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -410,8 +410,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -442,8 +442,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -473,8 +473,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -504,8 +504,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -536,8 +536,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -567,8 +567,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -598,8 +598,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -630,8 +630,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -661,8 +661,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -692,8 +692,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -724,8 +724,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -755,8 +755,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -786,8 +786,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -818,8 +818,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -849,8 +849,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -884,8 +884,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -920,8 +920,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -951,8 +951,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -982,8 +982,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': 7, @@ -1014,8 +1014,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': 7, @@ -1045,8 +1045,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1076,8 +1076,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1108,8 +1108,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1139,8 +1139,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1170,8 +1170,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1202,8 +1202,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1233,8 +1233,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1264,8 +1264,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -1296,8 +1296,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -1327,8 +1327,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1358,8 +1358,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1390,8 +1390,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1421,8 +1421,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1451,8 +1451,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1474,8 +1474,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1500,8 +1500,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1539,11 +1539,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1568,8 +1568,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1607,11 +1607,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1660,11 +1660,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1697,11 +1697,11 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1745,11 +1745,11 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1788,11 +1788,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1841,11 +1841,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1895,11 +1895,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1950,11 +1950,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_automatic_settings': False, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2003,11 +2003,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2056,11 +2056,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2109,11 +2109,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2163,11 +2163,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_automatic_settings': False, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2607,11 +2607,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2649,11 +2649,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2692,11 +2692,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2729,7 +2729,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), dict({ 'addons': list([ @@ -2756,11 +2756,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2799,11 +2799,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 302f4e07011..a9b4674ad96 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -260,8 +260,8 @@ async def test_async_initiate_backup( assert result["result"] == { "backups": [], "agent_errors": {}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -424,8 +424,8 @@ async def test_async_initiate_backup_with_agent_error( assert result["result"] == { "backups": [], "agent_errors": {}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -496,7 +496,7 @@ async def test_async_initiate_backup_with_agent_error( "name": "Core 2025.1.0", "protected": False, "size": 123, - "with_strategy_settings": False, + "with_automatic_settings": False, } await ws_client.send_json_auto_id( @@ -513,8 +513,8 @@ async def test_async_initiate_backup_with_agent_error( assert result["result"] == { "agent_errors": {}, "backups": [expected_backup_data], - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await hass.async_block_till_done() @@ -522,7 +522,7 @@ async def test_async_initiate_backup_with_agent_error( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_strategy_settings": False, + "with_automatic_settings": False, } ] diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 665512eca97..1a0e2cc1a81 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -55,8 +55,8 @@ DEFAULT_STORAGE_DATA: dict[str, Any] = { "name": None, "password": None, }, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "retention": { "copies": None, "days": None, @@ -276,7 +276,7 @@ async def test_delete( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_strategy_settings": False, + "with_automatic_settings": False, } ] }, @@ -487,7 +487,7 @@ async def test_generate_calls_create( "include_homeassistant": True, "name": None, "password": None, - "with_strategy_settings": True, + "with_automatic_settings": True, }, ), ( @@ -509,7 +509,7 @@ async def test_generate_calls_create( "include_homeassistant": True, "name": "test-name", "password": "test-password", - "with_strategy_settings": True, + "with_automatic_settings": True, }, ), ], @@ -522,7 +522,7 @@ async def test_generate_with_default_settings_calls_create( create_backup_settings: dict[str, Any], expected_call_params: dict[str, Any], ) -> None: - """Test backup/generate_with_strategy_settings calls async_initiate_backup.""" + """Test backup/generate_with_automatic_settings calls async_initiate_backup.""" await setup_backup_integration(hass, with_hassio=False) client = await hass_ws_client(hass) @@ -540,7 +540,7 @@ async def test_generate_with_default_settings_calls_create( return_value=NewBackup(backup_job_id="abc123"), ) as generate_backup: await client.send_json_auto_id( - {"type": "backup/generate_with_strategy_settings"} + {"type": "backup/generate_with_automatic_settings"} ) result = await client.receive_json() assert result["success"] @@ -780,8 +780,8 @@ async def test_agents_info( "password": "test-password", }, "retention": {"copies": 3, "days": 7}, - "last_attempted_strategy_backup": "2024-10-26T04:45:00+01:00", - "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_attempted_automatic_backup": "2024-10-26T04:45:00+01:00", + "last_completed_automatic_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "daily"}, }, }, @@ -798,8 +798,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": 3, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "never"}, }, }, @@ -816,8 +816,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": 7}, - "last_attempted_strategy_backup": "2024-10-27T04:45:00+01:00", - "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_attempted_automatic_backup": "2024-10-27T04:45:00+01:00", + "last_completed_automatic_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "never"}, }, }, @@ -834,8 +834,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "mon"}, }, }, @@ -852,8 +852,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "sat"}, }, }, @@ -1022,7 +1022,7 @@ async def test_config_update_errors( @pytest.mark.parametrize( ( "command", - "last_completed_strategy_backup", + "last_completed_automatic_backup", "time_1", "time_2", "attempted_backup_time", @@ -1154,7 +1154,7 @@ async def test_config_schedule_logic( hass_storage: dict[str, Any], create_backup: AsyncMock, command: dict[str, Any], - last_completed_strategy_backup: str, + last_completed_automatic_backup: str, time_1: str, time_2: str, attempted_backup_time: str, @@ -1179,8 +1179,8 @@ async def test_config_schedule_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": last_completed_strategy_backup, - "last_completed_strategy_backup": last_completed_strategy_backup, + "last_attempted_automatic_backup": last_completed_automatic_backup, + "last_completed_automatic_backup": last_completed_automatic_backup, "schedule": {"state": "daily"}, }, } @@ -1210,11 +1210,11 @@ async def test_config_schedule_logic( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == attempted_backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == completed_backup_time ) @@ -1251,22 +1251,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1290,22 +1290,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1329,27 +1329,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1373,27 +1373,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1417,22 +1417,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1456,22 +1456,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1495,27 +1495,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1539,12 +1539,12 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1595,8 +1595,8 @@ async def test_config_retention_copies_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": last_backup_time, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": last_backup_time, "schedule": {"state": "daily"}, }, } @@ -1628,11 +1628,11 @@ async def test_config_retention_copies_logic( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == backup_time ) @@ -1641,7 +1641,7 @@ async def test_config_retention_copies_logic( ("backup_command", "backup_time"), [ ( - {"type": "backup/generate_with_strategy_settings"}, + {"type": "backup/generate_with_automatic_settings"}, "2024-11-11T12:00:00+01:00", ), ( @@ -1672,22 +1672,22 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1708,22 +1708,22 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1744,27 +1744,27 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1785,27 +1785,27 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1852,8 +1852,8 @@ async def test_config_retention_copies_logic_manual_backup( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "daily"}, }, } @@ -1889,11 +1889,11 @@ async def test_config_retention_copies_logic_manual_backup( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == backup_time ) @@ -1922,17 +1922,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1955,17 +1955,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1988,22 +1988,22 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2026,17 +2026,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2059,17 +2059,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2092,22 +2092,22 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2155,8 +2155,8 @@ async def test_config_retention_days_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": last_backup_time, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": last_backup_time, "schedule": {"state": "never"}, }, } diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index bd8e80e0666..57c801e0d68 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -171,7 +171,7 @@ async def test_agents_list_backups( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": None, + "with_automatic_settings": None, } ] @@ -195,8 +195,8 @@ async def test_agents_list_backups_fail_cloud( assert response["result"] == { "agent_errors": {"cloud.cloud": "Failed to list backups"}, "backups": [], - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } @@ -218,7 +218,7 @@ async def test_agents_list_backups_fail_cloud( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ( diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 45aa28c19d6..c342c006732 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -341,7 +341,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ( @@ -362,7 +362,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ], diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 81876b5c3d1..25ae2e3a2f6 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -104,7 +104,7 @@ async def test_agents_list_backups( "name": "Kitchen sink syncer", "protected": False, "size": 1234, - "with_strategy_settings": None, + "with_automatic_settings": None, } ] @@ -183,7 +183,7 @@ async def test_agents_upload( "name": "Test", "protected": False, "size": 0.0, - "with_strategy_settings": False, + "with_automatic_settings": False, } From a6089b497a908ce6d0e18a92de5ef0cc5807457d Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 18:03:27 +0100 Subject: [PATCH 0833/1198] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to?= =?UTF-8?q?=202.3.2=20(#133499)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/light.py | 3 --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/fjaraskupan/light.py b/homeassistant/components/fjaraskupan/light.py index b33904c805d..f0083591d4d 100644 --- a/homeassistant/components/fjaraskupan/light.py +++ b/homeassistant/components/fjaraskupan/light.py @@ -4,8 +4,6 @@ from __future__ import annotations from typing import Any -from fjaraskupan import COMMAND_LIGHT_ON_OFF - from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -62,7 +60,6 @@ class Light(CoordinatorEntity[FjaraskupanCoordinator], LightEntity): if self.is_on: async with self.coordinator.async_connect_and_update() as device: await device.send_dim(0) - await device.send_command(COMMAND_LIGHT_ON_OFF) @property def is_on(self) -> bool: diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index cc368b3e92f..2fd49aac5ee 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.1"] + "requirements": ["fjaraskupan==2.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 47929f65916..af2457b8d88 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -915,7 +915,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b55231f898..f7f79ed6200 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -774,7 +774,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From 920de9060398829a1d3d4f13cd817ea7392a6976 Mon Sep 17 00:00:00 2001 From: peteS-UK <64092177+peteS-UK@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:22:22 +0000 Subject: [PATCH 0834/1198] Increase Squeezebox config_flow test coverage to 100% (#133484) Co-authored-by: Joost Lekkerkerker --- .../components/squeezebox/test_config_flow.py | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/tests/components/squeezebox/test_config_flow.py b/tests/components/squeezebox/test_config_flow.py index 0a03bcc291c..f2c9636c470 100644 --- a/tests/components/squeezebox/test_config_flow.py +++ b/tests/components/squeezebox/test_config_flow.py @@ -166,6 +166,67 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "invalid_auth"} +async def test_form_validate_exception(hass: HomeAssistant) -> None: + """Test we handle exception.""" + + with ( + patch( + "pysqueezebox.Server.async_query", + return_value={"uuid": UUID}, + ), + patch( + "homeassistant.components.squeezebox.async_setup_entry", + return_value=True, + ), + patch( + "homeassistant.components.squeezebox.config_flow.async_discover", + mock_discover, + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "edit" + + with patch( + "homeassistant.components.squeezebox.config_flow.Server.async_query", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_HTTPS: False, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == HOST + assert result["data"] == { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_HTTPS: False, + } + + async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( From fc622e398f288cadecd8eccdfb6e57001ef1c8f5 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:24:12 +0100 Subject: [PATCH 0835/1198] add exception translation to enphase_envoy (#132483) --- .../components/enphase_envoy/__init__.py | 9 +++++++-- .../components/enphase_envoy/coordinator.py | 20 ++++++++++++++++--- .../enphase_envoy/quality_scale.yaml | 4 +--- .../components/enphase_envoy/strings.json | 11 ++++++++++ 4 files changed, 36 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index db36cab1288..f4fe4aff2cb 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -51,8 +51,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b # wait for the next discovery to find the device at its new address # and update the config entry so we do not mix up devices. raise ConfigEntryNotReady( - f"Unexpected device found at {host}; expected {entry.unique_id}, " - f"found {envoy.serial_number}" + translation_domain=DOMAIN, + translation_key="unexpected_device", + translation_placeholders={ + "host": host, + "expected_serial": str(entry.unique_id), + "actual_serial": str(envoy.serial_number), + }, ) entry.runtime_data = coordinator diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index 00bc7666f78..386661402de 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -18,7 +18,7 @@ from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import INVALID_AUTH_ERRORS +from .const import DOMAIN, INVALID_AUTH_ERRORS SCAN_INTERVAL = timedelta(seconds=60) @@ -158,9 +158,23 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # token likely expired or firmware changed, try to re-authenticate self._setup_complete = False continue - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="authentication_error", + translation_placeholders={ + "host": envoy.host, + "args": err.args[0], + }, + ) from err except EnvoyError as err: - raise UpdateFailed(f"Error communicating with API: {err}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="envoy_error", + translation_placeholders={ + "host": envoy.host, + "args": err.args[0], + }, + ) from err # if we have a firmware version from previous setup, compare to current one # when envoy gets new firmware there will be an authentication failure diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index c4077b8df67..4b83c2886f7 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -108,9 +108,7 @@ rules: entity-device-class: done entity-disabled-by-default: done entity-translations: done - exception-translations: - status: todo - comment: pending https://github.com/home-assistant/core/pull/132483 + exception-translations: done icon-translations: todo reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index a338deb9638..a78d0bc032a 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -371,5 +371,16 @@ "name": "Grid enabled" } } + }, + "exceptions": { + "unexpected_device": { + "message": "Unexpected Envoy serial-number found at {host}; expected {expected_serial}, found {actual_serial}" + }, + "authentication_error": { + "message": "Envoy authentication failure on {host}: {args}" + }, + "envoy_error": { + "message": "Error communicating with Envoy API on {host}: {args}" + } } } From 51d63ba50872331a8a8aff2f86695ba37e897aca Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 18:30:46 +0100 Subject: [PATCH 0836/1198] Store automatic backup flag in backup metadata (#133500) --- homeassistant/components/backup/manager.py | 52 +++++++++++++------ homeassistant/components/backup/models.py | 8 +++ homeassistant/components/backup/util.py | 1 + homeassistant/components/backup/websocket.py | 4 +- homeassistant/components/hassio/backup.py | 3 ++ .../components/kitchen_sink/backup.py | 1 + tests/components/backup/common.py | 5 +- .../backup/snapshots/test_backup.ambr | 2 +- .../backup/snapshots/test_websocket.ambr | 32 ++++++------ tests/components/backup/test_manager.py | 51 ++++++++++-------- tests/components/backup/test_websocket.py | 2 +- tests/components/cloud/test_backup.py | 5 ++ tests/components/hassio/test_backup.py | 8 ++- tests/components/kitchen_sink/test_backup.py | 5 ++ 14 files changed, 120 insertions(+), 59 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index e2c4f91730f..99373b1942a 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -23,7 +23,7 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import integration_platform +from homeassistant.helpers import instance_id, integration_platform from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util @@ -200,6 +200,7 @@ class BackupReaderWriter(abc.ABC): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -445,16 +446,18 @@ class BackupManager: if (backup_id := agent_backup.backup_id) not in backups: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_automatic_settings = None + with_automatic_settings = self.is_our_automatic_backup( + agent_backup, await instance_id.async_get(self.hass) + ) backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, backup_id=backup_id, date=agent_backup.date, database_included=agent_backup.database_included, + extra_metadata=agent_backup.extra_metadata, failed_agent_ids=failed_agent_ids, folders=agent_backup.folders, homeassistant_included=agent_backup.homeassistant_included, @@ -494,16 +497,18 @@ class BackupManager: if backup is None: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_automatic_settings = None + with_automatic_settings = self.is_our_automatic_backup( + result, await instance_id.async_get(self.hass) + ) backup = ManagerBackup( agent_ids=[], addons=result.addons, backup_id=result.backup_id, date=result.date, database_included=result.database_included, + extra_metadata=result.extra_metadata, failed_agent_ids=failed_agent_ids, folders=result.folders, homeassistant_included=result.homeassistant_included, @@ -517,6 +522,22 @@ class BackupManager: return (backup, agent_errors) + @staticmethod + def is_our_automatic_backup( + backup: AgentBackup, our_instance_id: str + ) -> bool | None: + """Check if a backup was created by us and return automatic_settings flag. + + Returns `None` if the backup was not created by us, or if the + automatic_settings flag is not a boolean. + """ + if backup.extra_metadata.get("instance_id") != our_instance_id: + return None + with_automatic_settings = backup.extra_metadata.get("with_automatic_settings") + if not isinstance(with_automatic_settings, bool): + return None + return with_automatic_settings + async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]: """Delete a backup.""" agent_errors: dict[str, Exception] = {} @@ -598,7 +619,7 @@ class BackupManager: open_stream=written_backup.open_stream, ) await written_backup.release_stream() - self.known_backups.add(written_backup.backup, agent_errors, False) + self.known_backups.add(written_backup.backup, agent_errors) async def async_create_backup( self, @@ -699,6 +720,10 @@ class BackupManager: new_backup, self._backup_task = await self._reader_writer.async_create_backup( agent_ids=agent_ids, backup_name=backup_name, + extra_metadata={ + "instance_id": await instance_id.async_get(self.hass), + "with_automatic_settings": with_automatic_settings, + }, include_addons=include_addons, include_all_addons=include_all_addons, include_database=include_database, @@ -747,9 +772,7 @@ class BackupManager: # create backup was successful, update last_completed_automatic_backup self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() - self.known_backups.add( - written_backup.backup, agent_errors, with_automatic_settings - ) + self.known_backups.add(written_backup.backup, agent_errors) # delete old backups more numerous than copies await delete_backups_exceeding_configured_count(self) @@ -870,7 +893,6 @@ class KnownBackups: backup["backup_id"]: KnownBackup( backup_id=backup["backup_id"], failed_agent_ids=backup["failed_agent_ids"], - with_automatic_settings=backup["with_automatic_settings"], ) for backup in stored_backups } @@ -883,13 +905,11 @@ class KnownBackups: self, backup: AgentBackup, agent_errors: dict[str, Exception], - with_automatic_settings: bool, ) -> None: """Add a backup.""" self._backups[backup.backup_id] = KnownBackup( backup_id=backup.backup_id, failed_agent_ids=list(agent_errors), - with_automatic_settings=with_automatic_settings, ) self._manager.store.save() @@ -911,14 +931,12 @@ class KnownBackup: backup_id: str failed_agent_ids: list[str] - with_automatic_settings: bool def to_dict(self) -> StoredKnownBackup: """Convert known backup to a dict.""" return { "backup_id": self.backup_id, "failed_agent_ids": self.failed_agent_ids, - "with_automatic_settings": self.with_automatic_settings, } @@ -927,7 +945,6 @@ class StoredKnownBackup(TypedDict): backup_id: str failed_agent_ids: list[str] - with_automatic_settings: bool class CoreBackupReaderWriter(BackupReaderWriter): @@ -945,6 +962,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -969,6 +987,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): agent_ids=agent_ids, backup_id=backup_id, backup_name=backup_name, + extra_metadata=extra_metadata, include_database=include_database, date_str=date_str, on_progress=on_progress, @@ -987,6 +1006,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_id: str, backup_name: str, date_str: str, + extra_metadata: dict[str, bool | str], include_database: bool, on_progress: Callable[[ManagerStateEvent], None], password: str | None, @@ -1012,6 +1032,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_data = { "compressed": True, "date": date_str, + "extra": extra_metadata, "homeassistant": { "exclude_database": not include_database, "version": HAVERSION, @@ -1035,6 +1056,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_id=backup_id, database_included=include_database, date=date_str, + extra_metadata=extra_metadata, folders=[], homeassistant_included=True, homeassistant_version=HAVERSION, diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py index 6306d9f1fec..a937933f04c 100644 --- a/homeassistant/components/backup/models.py +++ b/homeassistant/components/backup/models.py @@ -33,6 +33,7 @@ class AgentBackup: backup_id: str date: str database_included: bool + extra_metadata: dict[str, bool | str] folders: list[Folder] homeassistant_included: bool homeassistant_version: str | None # None if homeassistant_included is False @@ -44,6 +45,12 @@ class AgentBackup: """Return a dict representation of this backup.""" return asdict(self) + def as_frontend_json(self) -> dict: + """Return a dict representation of this backup for sending to frontend.""" + return { + key: val for key, val in asdict(self).items() if key != "extra_metadata" + } + @classmethod def from_dict(cls, data: dict[str, Any]) -> Self: """Create an instance from a JSON serialization.""" @@ -52,6 +59,7 @@ class AgentBackup: backup_id=data["backup_id"], date=data["date"], database_included=data["database_included"], + extra_metadata=data["extra_metadata"], folders=[Folder(folder) for folder in data["folders"]], homeassistant_included=data["homeassistant_included"], homeassistant_version=data["homeassistant_version"], diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index 1d8252cc30b..bb01a9a4e3f 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -60,6 +60,7 @@ def read_backup(backup_path: Path) -> AgentBackup: backup_id=cast(str, data["slug"]), database_included=database_included, date=cast(str, data["date"]), + extra_metadata=cast(dict[str, bool | str], data.get("metadata", {})), folders=folders, homeassistant_included=homeassistant_included, homeassistant_version=homeassistant_version, diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index abe3d372be5..2fee84e39bb 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -51,7 +51,7 @@ async def handle_info( "agent_errors": { agent_id: str(err) for agent_id, err in agent_errors.items() }, - "backups": list(backups.values()), + "backups": [backup.as_frontend_json() for backup in backups.values()], "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, }, @@ -81,7 +81,7 @@ async def handle_details( "agent_errors": { agent_id: str(err) for agent_id, err in agent_errors.items() }, - "backup": backup, + "backup": backup.as_frontend_json() if backup else None, }, ) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 4bc6dff44d2..1b7cf930588 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -104,6 +104,7 @@ def _backup_details_to_agent_backup( backup_id=details.slug, database_included=database_included, date=details.date.isoformat(), + extra_metadata=details.extra or {}, folders=[Folder(folder) for folder in details.folders], homeassistant_included=homeassistant_included, homeassistant_version=details.homeassistant, @@ -202,6 +203,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -242,6 +244,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): location=locations or LOCATION_CLOUD_BACKUP, homeassistant_exclude_database=not include_database, background=True, + extra=extra_metadata, ) ) backup_task = self._hass.async_create_task( diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py index 615364f55ee..c4a045aeefc 100644 --- a/homeassistant/components/kitchen_sink/backup.py +++ b/homeassistant/components/kitchen_sink/backup.py @@ -58,6 +58,7 @@ class KitchenSinkBackupAgent(BackupAgent): backup_id="abc123", database_included=False, date="1970-01-01T00:00:00Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index b06b8a5ef5d..ffecd1c4186 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch from homeassistant.components.backup import ( DOMAIN, @@ -29,6 +29,7 @@ TEST_BACKUP_ABC123 = AgentBackup( backup_id="abc123", database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={"instance_id": ANY, "with_automatic_settings": True}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -43,6 +44,7 @@ TEST_BACKUP_DEF456 = AgentBackup( backup_id="def456", database_included=False, date="1980-01-01T00:00:00.000Z", + extra_metadata={"instance_id": "unknown_uuid", "with_automatic_settings": True}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -69,6 +71,7 @@ class BackupAgentTest(BackupAgent): backup_id="abc123", database_included=True, date="1970-01-01T00:00:00Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 8cbf34895f9..f21de9d9fad 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,7 +78,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 58a5162b1bf..1607e2e15d9 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1539,7 +1539,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1607,7 +1607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1660,7 +1660,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1788,7 +1788,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1841,7 +1841,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1950,7 +1950,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_automatic_settings': False, + 'with_automatic_settings': None, }), ]), 'last_attempted_automatic_backup': None, @@ -2163,7 +2163,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_automatic_settings': False, + 'with_automatic_settings': None, }), ]), 'last_attempted_automatic_backup': None, @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2607,7 +2607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2649,7 +2649,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2692,7 +2692,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2756,7 +2756,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2799,7 +2799,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index a9b4674ad96..5795309501d 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -121,6 +121,10 @@ async def test_async_create_backup( assert create_backup.call_args == call( agent_ids=["backup.local"], backup_name="Core 2025.1.0", + extra_metadata={ + "instance_id": hass.data["core.uuid"], + "with_automatic_settings": False, + }, include_addons=None, include_all_addons=False, include_database=True, @@ -325,6 +329,10 @@ async def test_async_initiate_backup( assert backup_json_dict == { "compressed": True, "date": ANY, + "extra": { + "instance_id": hass.data["core.uuid"], + "with_automatic_settings": False, + }, "homeassistant": { "exclude_database": not include_database, "version": "2025.1.0", @@ -345,30 +353,30 @@ async def test_async_initiate_backup( backup_agent_ids = backup_data.pop("agent_ids") assert backup_agent_ids == agent_ids + assert backup_data == { + "addons": [], + "backup_id": ANY, + "database_included": include_database, + "date": ANY, + "failed_agent_ids": [], + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.1.0", + "name": name, + "protected": bool(password), + "size": ANY, + "with_automatic_settings": False, + } - backup = AgentBackup.from_dict(backup_data) - - assert backup == AgentBackup( - addons=[], - backup_id=ANY, - database_included=include_database, - date=ANY, - folders=[], - homeassistant_included=True, - homeassistant_version="2025.1.0", - name=name, - protected=bool(password), - size=ANY, - ) for agent_id in agent_ids: agent = agents[agent_id] assert len(agent._backups) == 1 - agent_backup = agent._backups[backup.backup_id] - assert agent_backup.backup_id == backup.backup_id - assert agent_backup.date == backup.date - assert agent_backup.name == backup.name - assert agent_backup.protected == backup.protected - assert agent_backup.size == backup.size + agent_backup = agent._backups[backup_data["backup_id"]] + assert agent_backup.backup_id == backup_data["backup_id"] + assert agent_backup.date == backup_data["date"] + assert agent_backup.name == backup_data["name"] + assert agent_backup.protected == backup_data["protected"] + assert agent_backup.size == backup_data["size"] outer_tar = mocked_tarfile.return_value core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value @@ -380,7 +388,7 @@ async def test_async_initiate_backup( tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) backup_directory = hass.config.path(backup_directory) - assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar" + assert tar_file_path == f"{backup_directory}/{backup_data["backup_id"]}.tar" @pytest.mark.usefixtures("mock_backup_generation") @@ -522,7 +530,6 @@ async def test_async_initiate_backup_with_agent_error( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_automatic_settings": False, } ] diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 1a0e2cc1a81..a0860f49149 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -34,6 +34,7 @@ from tests.typing import WebSocketGenerator BACKUP_CALL = call( agent_ids=["test.test-agent"], backup_name="test-name", + extra_metadata={"instance_id": ANY, "with_automatic_settings": True}, include_addons=["test-addon"], include_all_addons=False, include_database=True, @@ -276,7 +277,6 @@ async def test_delete( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_automatic_settings": False, } ] }, diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 57c801e0d68..93747ca25f7 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -108,6 +108,7 @@ def mock_list_files() -> Generator[MagicMock]: "backup_id": "23e64aec", "date": "2024-11-22T11:48:48.727189+01:00", "database_included": True, + "extra_metadata": {}, "folders": [], "homeassistant_included": True, "homeassistant_version": "2024.12.0.dev0", @@ -335,6 +336,7 @@ async def test_agents_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -390,6 +392,7 @@ async def test_agents_upload_fail_put( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -438,6 +441,7 @@ async def test_agents_upload_fail_cloud( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -479,6 +483,7 @@ async def test_agents_upload_not_protected( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index c342c006732..9338313c87d 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -12,7 +12,7 @@ from datetime import datetime from io import StringIO import os from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch from aiohasupervisor.exceptions import ( SupervisorBadRequestError, @@ -445,6 +445,7 @@ async def test_agent_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -622,6 +623,10 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( addons=None, background=True, compressed=True, + extra={ + "instance_id": ANY, + "with_automatic_settings": False, + }, folders=None, homeassistant_exclude_database=False, homeassistant=True, @@ -876,6 +881,7 @@ async def test_agent_receive_remote_backup( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 25ae2e3a2f6..9e46845e1cb 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -14,6 +14,7 @@ from homeassistant.components.backup import ( ) from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.helpers import instance_id from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -137,6 +138,10 @@ async def test_agents_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={ + "instance_id": await instance_id.async_get(hass), + "with_automatic_settings": False, + }, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", From bb2d027532a0b481abf4f3b0536bb8c0d199cafe Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 18 Dec 2024 19:11:13 +0100 Subject: [PATCH 0837/1198] Add Peblar Rocksolid EV Chargers integration (#133501) * Add Peblar Rocksolid EV Chargers integration * Process review comments --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/peblar/__init__.py | 54 ++++++++ .../components/peblar/config_flow.py | 71 +++++++++++ homeassistant/components/peblar/const.py | 10 ++ .../components/peblar/coordinator.py | 37 ++++++ homeassistant/components/peblar/entity.py | 26 ++++ homeassistant/components/peblar/manifest.json | 11 ++ .../components/peblar/quality_scale.yaml | 79 ++++++++++++ homeassistant/components/peblar/sensor.py | 73 +++++++++++ homeassistant/components/peblar/strings.json | 25 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 ++ requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/peblar/__init__.py | 1 + tests/components/peblar/conftest.py | 48 ++++++++ .../peblar/fixtures/system_information.json | 57 +++++++++ tests/components/peblar/test_config_flow.py | 115 ++++++++++++++++++ 20 files changed, 633 insertions(+) create mode 100644 homeassistant/components/peblar/__init__.py create mode 100644 homeassistant/components/peblar/config_flow.py create mode 100644 homeassistant/components/peblar/const.py create mode 100644 homeassistant/components/peblar/coordinator.py create mode 100644 homeassistant/components/peblar/entity.py create mode 100644 homeassistant/components/peblar/manifest.json create mode 100644 homeassistant/components/peblar/quality_scale.yaml create mode 100644 homeassistant/components/peblar/sensor.py create mode 100644 homeassistant/components/peblar/strings.json create mode 100644 tests/components/peblar/__init__.py create mode 100644 tests/components/peblar/conftest.py create mode 100644 tests/components/peblar/fixtures/system_information.json create mode 100644 tests/components/peblar/test_config_flow.py diff --git a/.strict-typing b/.strict-typing index 899b22af35f..a96597da4c6 100644 --- a/.strict-typing +++ b/.strict-typing @@ -363,6 +363,7 @@ homeassistant.components.otbr.* homeassistant.components.overkiz.* homeassistant.components.p1_monitor.* homeassistant.components.panel_custom.* +homeassistant.components.peblar.* homeassistant.components.peco.* homeassistant.components.persistent_notification.* homeassistant.components.pi_hole.* diff --git a/CODEOWNERS b/CODEOWNERS index 8effcc49336..382fbffecaa 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1113,6 +1113,8 @@ build.json @home-assistant/supervisor /tests/components/palazzetti/ @dotvav /homeassistant/components/panel_custom/ @home-assistant/frontend /tests/components/panel_custom/ @home-assistant/frontend +/homeassistant/components/peblar/ @frenck +/tests/components/peblar/ @frenck /homeassistant/components/peco/ @IceBotYT /tests/components/peco/ @IceBotYT /homeassistant/components/pegel_online/ @mib1185 diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py new file mode 100644 index 00000000000..559b124c772 --- /dev/null +++ b/homeassistant/components/peblar/__init__.py @@ -0,0 +1,54 @@ +"""Integration for Peblar EV chargers.""" + +from __future__ import annotations + +from aiohttp import CookieJar +from peblar import ( + AccessMode, + Peblar, + PeblarAuthenticationError, + PeblarConnectionError, + PeblarError, +) + +from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_create_clientsession + +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator + +PLATFORMS = [Platform.SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: + """Set up Peblar from a config entry.""" + + peblar = Peblar( + host=entry.data[CONF_HOST], + session=async_create_clientsession(hass, cookie_jar=CookieJar(unsafe=True)), + ) + try: + await peblar.login(password=entry.data[CONF_PASSWORD]) + api = await peblar.rest_api(enable=True, access_mode=AccessMode.READ_WRITE) + except PeblarConnectionError as err: + raise ConfigEntryNotReady("Could not connect to Peblar charger") from err + except PeblarAuthenticationError as err: + raise ConfigEntryError("Could not login to Peblar charger") from err + except PeblarError as err: + raise ConfigEntryNotReady( + "Unknown error occurred while connecting to Peblar charger" + ) from err + + coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: + """Unload Peblar config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py new file mode 100644 index 00000000000..056d4a68be6 --- /dev/null +++ b/homeassistant/components/peblar/config_flow.py @@ -0,0 +1,71 @@ +"""Config flow to configure the Peblar integration.""" + +from __future__ import annotations + +from typing import Any + +from aiohttp import CookieJar +from peblar import Peblar, PeblarAuthenticationError, PeblarConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_create_clientsession +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN, LOGGER + + +class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a Peblar config flow.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors = {} + + if user_input is not None: + peblar = Peblar( + host=user_input[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + info = await peblar.system_information() + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors[CONF_HOST] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(info.product_serial_number) + self._abort_if_unique_id_configured() + return self.async_create_entry(title="Peblar", data=user_input) + else: + user_input = {} + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required( + CONF_HOST, default=user_input.get(CONF_HOST) + ): TextSelector(TextSelectorConfig(autocomplete="off")), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/const.py b/homeassistant/components/peblar/const.py new file mode 100644 index 00000000000..b986c866d16 --- /dev/null +++ b/homeassistant/components/peblar/const.py @@ -0,0 +1,10 @@ +"""Constants for the Peblar integration.""" + +from __future__ import annotations + +import logging +from typing import Final + +DOMAIN: Final = "peblar" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py new file mode 100644 index 00000000000..8270905648f --- /dev/null +++ b/homeassistant/components/peblar/coordinator.py @@ -0,0 +1,37 @@ +"""Data update coordinator for Peblar EV chargers.""" + +from datetime import timedelta + +from peblar import PeblarApi, PeblarError, PeblarMeter + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + +type PeblarConfigEntry = ConfigEntry[PeblarMeterDataUpdateCoordinator] + + +class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): + """Class to manage fetching Peblar meter data.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, api: PeblarApi + ) -> None: + """Initialize the coordinator.""" + self.api = api + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} meter", + update_interval=timedelta(seconds=10), + ) + + async def _async_update_data(self) -> PeblarMeter: + """Fetch data from the Peblar device.""" + try: + return await self.api.meter() + except PeblarError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py new file mode 100644 index 00000000000..6951cf6c21f --- /dev/null +++ b/homeassistant/components/peblar/entity.py @@ -0,0 +1,26 @@ +"""Base entity for the Peblar integration.""" + +from __future__ import annotations + +from homeassistant.const import CONF_HOST +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator + + +class PeblarEntity(CoordinatorEntity[PeblarMeterDataUpdateCoordinator]): + """Defines a Peblar entity.""" + + _attr_has_entity_name = True + + def __init__(self, entry: PeblarConfigEntry) -> None: + """Initialize the Peblar entity.""" + super().__init__(coordinator=entry.runtime_data) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{entry.data[CONF_HOST]}", + identifiers={(DOMAIN, str(entry.unique_id))}, + manufacturer="Peblar", + name="Peblar EV charger", + ) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json new file mode 100644 index 00000000000..6de605c95dc --- /dev/null +++ b/homeassistant/components/peblar/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "peblar", + "name": "Peblar", + "codeowners": ["@frenck"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/peblar", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["peblar==0.2.1"] +} diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml new file mode 100644 index 00000000000..51bd60cc4b4 --- /dev/null +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not register custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not have any custom actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + # Gold + devices: todo + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: + status: exempt + comment: | + The coordinator needs translation when the update failed. + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py new file mode 100644 index 00000000000..eafca23e125 --- /dev/null +++ b/homeassistant/components/peblar/sensor.py @@ -0,0 +1,73 @@ +"""Support for Peblar sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from peblar import PeblarMeter + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import PeblarConfigEntry +from .entity import PeblarEntity + + +@dataclass(frozen=True, kw_only=True) +class PeblarSensorDescription(SensorEntityDescription): + """Describe an Peblar sensor.""" + + value_fn: Callable[[PeblarMeter], int | None] + + +SENSORS: tuple[PeblarSensorDescription, ...] = ( + PeblarSensorDescription( + key="energy_total", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=2, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda x: x.energy_total, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar sensors based on a config entry.""" + async_add_entities( + PeblarSensorEntity(entry, description) for description in SENSORS + ) + + +class PeblarSensorEntity(PeblarEntity, SensorEntity): + """Defines a Peblar sensor.""" + + entity_description: PeblarSensorDescription + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSensorDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(entry) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json new file mode 100644 index 00000000000..9bf4803b592 --- /dev/null +++ b/homeassistant/components/peblar/strings.json @@ -0,0 +1,25 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Peblar charger on your home network.", + "password": "The same password as you use to log in to the Peblar device' local web interface." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 8e88e8a2ae8..599cc43c08b 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -452,6 +452,7 @@ FLOWS = { "p1_monitor", "palazzetti", "panasonic_viera", + "peblar", "peco", "pegel_online", "permobil", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index bd3c9eb04f9..48fedd9c127 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4618,6 +4618,12 @@ "integration_type": "virtual", "supported_by": "upb" }, + "peblar": { + "name": "Peblar", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "peco": { "name": "PECO Outage Counter", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index 15b96e0a802..ca7195ef92f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3386,6 +3386,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.peblar.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.peco.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index af2457b8d88..1b1938b2e4f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1599,6 +1599,9 @@ panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 +# homeassistant.components.peblar +peblar==0.2.1 + # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f7f79ed6200..93a7979600d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1326,6 +1326,9 @@ panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 +# homeassistant.components.peblar +peblar==0.2.1 + # homeassistant.components.peco peco==0.0.30 diff --git a/tests/components/peblar/__init__.py b/tests/components/peblar/__init__.py new file mode 100644 index 00000000000..9180d51e98b --- /dev/null +++ b/tests/components/peblar/__init__.py @@ -0,0 +1 @@ +"""Integration tests for the Peblar integration.""" diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py new file mode 100644 index 00000000000..dfe6aabc6bc --- /dev/null +++ b/tests/components/peblar/conftest.py @@ -0,0 +1,48 @@ +"""Fixtures for the Peblar integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +from peblar.models import PeblarSystemInformation +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Peblar", + domain=DOMAIN, + data={ + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGSPIDERS", + }, + unique_id="23-45-A4O-MOF", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[None]: + """Mock setting up a config entry.""" + with patch("homeassistant.components.peblar.async_setup_entry", return_value=True): + yield + + +@pytest.fixture +def mock_peblar() -> Generator[MagicMock]: + """Return a mocked Peblar client.""" + with patch( + "homeassistant.components.peblar.config_flow.Peblar", autospec=True + ) as peblar_mock: + peblar = peblar_mock.return_value + peblar.system_information.return_value = PeblarSystemInformation.from_json( + load_fixture("system_information.json", DOMAIN) + ) + yield peblar diff --git a/tests/components/peblar/fixtures/system_information.json b/tests/components/peblar/fixtures/system_information.json new file mode 100644 index 00000000000..dcec52a37fe --- /dev/null +++ b/tests/components/peblar/fixtures/system_information.json @@ -0,0 +1,57 @@ +{ + "BopCalIGainA": 264625, + "BopCalIGainB": 267139, + "BopCalIGainC": 239155, + "CanChangeChargingPhases": false, + "CanChargeSinglePhase": true, + "CanChargeThreePhases": false, + "CustomerId": "PBLR-0000645", + "CustomerUpdatePackagePubKey": "-----BEGIN PUBLIC KEY-----\nlorem ipsum\n-----END PUBLIC KEY-----\n", + "EthMacAddr": "00:0F:11:58:86:97", + "FwIdent": "1.6.1+1+WL-1", + "Hostname": "PBLR-0000645", + "HwFixedCableRating": 20, + "HwFwCompat": "wlac-2", + "HwHas4pRelay": false, + "HwHasBop": true, + "HwHasBuzzer": true, + "HwHasDualSocket": false, + "HwHasEichrechtLaserMarking": false, + "HwHasEthernet": true, + "HwHasLed": true, + "HwHasLte": false, + "HwHasMeter": true, + "HwHasMeterDisplay": true, + "HwHasPlc": false, + "HwHasRfid": true, + "HwHasRs485": true, + "HwHasShutter": false, + "HwHasSocket": false, + "HwHasTpm": false, + "HwHasWlan": true, + "HwMaxCurrent": 16, + "HwOneOrThreePhase": 3, + "HwUKCompliant": false, + "MainboardPn": "6004-2300-7600", + "MainboardSn": "23-38-A4E-2MC", + "MeterCalIGainA": 267369, + "MeterCalIGainB": 228286, + "MeterCalIGainC": 246455, + "MeterCalIRmsOffsetA": 15573, + "MeterCalIRmsOffsetB": 268422963, + "MeterCalIRmsOffsetC": 9082, + "MeterCalPhaseA": 250, + "MeterCalPhaseB": 271, + "MeterCalPhaseC": 271, + "MeterCalVGainA": 250551, + "MeterCalVGainB": 246074, + "MeterCalVGainC": 230191, + "MeterFwIdent": "b9cbcd", + "NorFlash": true, + "ProductModelName": "WLAC1-H11R0WE0ICR00", + "ProductPn": "6004-2300-8002", + "ProductSn": "23-45-A4O-MOF", + "ProductVendorName": "Peblar", + "WlanApMacAddr": "00:0F:11:58:86:98", + "WlanStaMacAddr": "00:0F:11:58:86:99" +} diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py new file mode 100644 index 00000000000..0b2fa89e068 --- /dev/null +++ b/tests/components/peblar/test_config_flow.py @@ -0,0 +1,115 @@ +"""Configuration flow tests for the Peblar integration.""" + +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow(hass: HomeAssistant) -> None: + """Test the full happy path user flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + assert not config_entry.options + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {CONF_HOST: "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_user_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show user form on a connection error.""" + mock_peblar.login.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGCATS!", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES!", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES!", + } + assert not config_entry.options + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test configuration flow aborts when the device is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGSPIDERS", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 53ef96c63ea03b16aacad6dd1b178e854c4ba868 Mon Sep 17 00:00:00 2001 From: TJ Horner Date: Wed, 18 Dec 2024 10:21:03 -0800 Subject: [PATCH 0838/1198] weatherkit: use stale data for up to an hour if updates fail (#130398) --- .../components/weatherkit/coordinator.py | 19 ++++- tests/components/weatherkit/__init__.py | 36 +++++---- .../components/weatherkit/test_coordinator.py | 81 ++++++++++++++++--- tests/components/weatherkit/test_sensor.py | 5 +- tests/components/weatherkit/test_weather.py | 20 +++-- 5 files changed, 126 insertions(+), 35 deletions(-) diff --git a/homeassistant/components/weatherkit/coordinator.py b/homeassistant/components/weatherkit/coordinator.py index ddabba2fc1f..6438d7503db 100644 --- a/homeassistant/components/weatherkit/coordinator.py +++ b/homeassistant/components/weatherkit/coordinator.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta from apple_weatherkit import DataSetType from apple_weatherkit.client import WeatherKitApiClient, WeatherKitApiClientError @@ -20,12 +20,15 @@ REQUESTED_DATA_SETS = [ DataSetType.HOURLY_FORECAST, ] +STALE_DATA_THRESHOLD = timedelta(hours=1) + class WeatherKitDataUpdateCoordinator(DataUpdateCoordinator): """Class to manage fetching data from the API.""" config_entry: ConfigEntry supported_data_sets: list[DataSetType] | None = None + last_updated_at: datetime | None = None def __init__( self, @@ -62,10 +65,20 @@ class WeatherKitDataUpdateCoordinator(DataUpdateCoordinator): if not self.supported_data_sets: await self.update_supported_data_sets() - return await self.client.get_weather_data( + updated_data = await self.client.get_weather_data( self.config_entry.data[CONF_LATITUDE], self.config_entry.data[CONF_LONGITUDE], self.supported_data_sets, ) except WeatherKitApiClientError as exception: - raise UpdateFailed(exception) from exception + if self.data is None or ( + self.last_updated_at is not None + and datetime.now() - self.last_updated_at > STALE_DATA_THRESHOLD + ): + raise UpdateFailed(exception) from exception + + LOGGER.debug("Using stale data because update failed: %s", exception) + return self.data + else: + self.last_updated_at = datetime.now() + return updated_data diff --git a/tests/components/weatherkit/__init__.py b/tests/components/weatherkit/__init__.py index 99c856a7e37..bc7c31fe8d8 100644 --- a/tests/components/weatherkit/__init__.py +++ b/tests/components/weatherkit/__init__.py @@ -1,5 +1,6 @@ """Tests for the Apple WeatherKit integration.""" +from contextlib import contextmanager from unittest.mock import patch from apple_weatherkit import DataSetType @@ -26,20 +27,13 @@ EXAMPLE_CONFIG_DATA = { } -async def init_integration( - hass: HomeAssistant, +@contextmanager +def mock_weather_response( is_night_time: bool = False, has_hourly_forecast: bool = True, has_daily_forecast: bool = True, -) -> MockConfigEntry: - """Set up the WeatherKit integration in Home Assistant.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Home", - unique_id="0123456", - data=EXAMPLE_CONFIG_DATA, - ) - +): + """Mock a successful WeatherKit API response.""" weather_response = load_json_object_fixture("weatherkit/weather_response.json") available_data_sets = [DataSetType.CURRENT_WEATHER] @@ -68,8 +62,22 @@ async def init_integration( return_value=available_data_sets, ), ): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + yield + + +async def init_integration( + hass: HomeAssistant, +) -> MockConfigEntry: + """Set up the WeatherKit integration in Home Assistant.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Home", + unique_id="0123456", + data=EXAMPLE_CONFIG_DATA, + ) + + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() return entry diff --git a/tests/components/weatherkit/test_coordinator.py b/tests/components/weatherkit/test_coordinator.py index eff142f3d94..7cc78179f44 100644 --- a/tests/components/weatherkit/test_coordinator.py +++ b/tests/components/weatherkit/test_coordinator.py @@ -4,30 +4,93 @@ from datetime import timedelta from unittest.mock import patch from apple_weatherkit.client import WeatherKitApiClientError +from freezegun.api import FrozenDateTimeFactory from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow -from . import init_integration +from . import init_integration, mock_weather_response from tests.common import async_fire_time_changed -async def test_failed_updates(hass: HomeAssistant) -> None: - """Test that we properly handle failed updates.""" - await init_integration(hass) +async def test_update_uses_stale_data_before_threshold( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that stale data from the last successful update is used if an update failure occurs before the threshold.""" + with mock_weather_response(): + await init_integration(hass) + + state = hass.states.get("weather.home") + assert state + assert state.state != STATE_UNAVAILABLE + + initial_state = state.state + + # Expect stale data to be used before one hour with patch( "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", side_effect=WeatherKitApiClientError, ): - async_fire_time_changed( - hass, - utcnow() + timedelta(minutes=5), - ) + freezer.tick(timedelta(minutes=59)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("weather.home") + assert state + assert state.state == initial_state + + +async def test_update_becomes_unavailable_after_threshold( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that the entity becomes unavailable if an update failure occurs after the threshold.""" + with mock_weather_response(): + await init_integration(hass) + + # Expect state to be unavailable after one hour + + with patch( + "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", + side_effect=WeatherKitApiClientError, + ): + freezer.tick(timedelta(hours=1, minutes=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get("weather.home") assert state assert state.state == STATE_UNAVAILABLE + + +async def test_update_recovers_after_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that a successful update after repeated failures recovers the entity's state.""" + with mock_weather_response(): + await init_integration(hass) + + # Trigger a failure after threshold + + with patch( + "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", + side_effect=WeatherKitApiClientError, + ): + freezer.tick(timedelta(hours=1, minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Expect that a successful update recovers the entity + + with mock_weather_response(): + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("weather.home") + assert state + assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/weatherkit/test_sensor.py b/tests/components/weatherkit/test_sensor.py index 6c6999c6bfd..6ded9a779d5 100644 --- a/tests/components/weatherkit/test_sensor.py +++ b/tests/components/weatherkit/test_sensor.py @@ -6,7 +6,7 @@ import pytest from homeassistant.core import HomeAssistant -from . import init_integration +from . import init_integration, mock_weather_response @pytest.mark.parametrize( @@ -20,7 +20,8 @@ async def test_sensor_values( hass: HomeAssistant, entity_name: str, expected_value: Any ) -> None: """Test that various sensor values match what we expect.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) state = hass.states.get(entity_name) assert state diff --git a/tests/components/weatherkit/test_weather.py b/tests/components/weatherkit/test_weather.py index ba20276c22e..ec4ce2ba3b3 100644 --- a/tests/components/weatherkit/test_weather.py +++ b/tests/components/weatherkit/test_weather.py @@ -23,12 +23,13 @@ from homeassistant.components.weatherkit.const import ATTRIBUTION from homeassistant.const import ATTR_ATTRIBUTION, ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant -from . import init_integration +from . import init_integration, mock_weather_response async def test_current_weather(hass: HomeAssistant) -> None: """Test states of the current weather.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -49,7 +50,8 @@ async def test_current_weather(hass: HomeAssistant) -> None: async def test_current_weather_nighttime(hass: HomeAssistant) -> None: """Test that the condition is clear-night when it's sunny and night time.""" - await init_integration(hass, is_night_time=True) + with mock_weather_response(is_night_time=True): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -58,7 +60,8 @@ async def test_current_weather_nighttime(hass: HomeAssistant) -> None: async def test_daily_forecast_missing(hass: HomeAssistant) -> None: """Test that daily forecast is not supported when WeatherKit doesn't support it.""" - await init_integration(hass, has_daily_forecast=False) + with mock_weather_response(has_daily_forecast=False): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -69,7 +72,8 @@ async def test_daily_forecast_missing(hass: HomeAssistant) -> None: async def test_hourly_forecast_missing(hass: HomeAssistant) -> None: """Test that hourly forecast is not supported when WeatherKit doesn't support it.""" - await init_integration(hass, has_hourly_forecast=False) + with mock_weather_response(has_hourly_forecast=False): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -86,7 +90,8 @@ async def test_hourly_forecast( hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test states of the hourly forecast.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, @@ -109,7 +114,8 @@ async def test_daily_forecast( hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test states of the daily forecast.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, From 70ad4ee454f04da2cd75778f8c83f202da910469 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:32:51 +0100 Subject: [PATCH 0839/1198] Add select platform to IronOS (#132218) --- homeassistant/components/iron_os/__init__.py | 1 + homeassistant/components/iron_os/icons.json | 32 +- homeassistant/components/iron_os/select.py | 208 ++++++++ homeassistant/components/iron_os/strings.json | 76 +++ tests/components/iron_os/conftest.py | 17 +- .../iron_os/snapshots/test_select.ambr | 469 ++++++++++++++++++ tests/components/iron_os/test_select.py | 164 ++++++ 7 files changed, 963 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/iron_os/select.py create mode 100644 tests/components/iron_os/snapshots/test_select.ambr create mode 100644 tests/components/iron_os/test_select.py diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 0fe5acc2db6..9655f7bfcdd 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -28,6 +28,7 @@ from .coordinator import ( PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.NUMBER, + Platform.SELECT, Platform.SENSOR, Platform.UPDATE, ] diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index eadcc17bb37..0d26b027c3f 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -63,13 +63,39 @@ "min_voltage_per_cell": { "default": "mdi:fuel-cell" }, - "min_dc_voltage_cells": { - "default": "mdi:battery-arrow-down" - }, "power_limit": { "default": "mdi:flash-alert" } }, + "select": { + "locking_mode": { + "default": "mdi:download-lock" + }, + "orientation_mode": { + "default": "mdi:screen-rotation" + }, + "autostart_mode": { + "default": "mdi:power-standby" + }, + "animation_speed": { + "default": "mdi:image-refresh" + }, + "min_dc_voltage_cells": { + "default": "mdi:fuel-cell" + }, + "temp_unit": { + "default": "mdi:temperature-celsius", + "state": { + "fahrenheit": "mdi:temperature-fahrenheit" + } + }, + "desc_scroll_speed": { + "default": "mdi:message-text-fast" + }, + "logo_duration": { + "default": "mdi:clock-digital" + } + }, "sensor": { "live_temperature": { "default": "mdi:soldering-iron" diff --git a/homeassistant/components/iron_os/select.py b/homeassistant/components/iron_os/select.py new file mode 100644 index 00000000000..c863e076f0b --- /dev/null +++ b/homeassistant/components/iron_os/select.py @@ -0,0 +1,208 @@ +"""Select platform for IronOS integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import Enum, StrEnum +from typing import Any + +from pynecil import ( + AnimationSpeed, + AutostartMode, + BatteryType, + CharSetting, + CommunicationError, + LockingMode, + LogoDuration, + ScreenOrientationMode, + ScrollSpeed, + SettingsDataResponse, + TempUnit, +) + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IronOSConfigEntry +from .const import DOMAIN +from .coordinator import IronOSCoordinators +from .entity import IronOSBaseEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class IronOSSelectEntityDescription(SelectEntityDescription): + """Describes IronOS select entity.""" + + value_fn: Callable[[SettingsDataResponse], str | None] + characteristic: CharSetting + raw_value_fn: Callable[[str], Any] | None = None + + +class PinecilSelect(StrEnum): + """Select controls for Pinecil device.""" + + MIN_DC_VOLTAGE_CELLS = "min_dc_voltage_cells" + ORIENTATION_MODE = "orientation_mode" + ANIMATION_SPEED = "animation_speed" + AUTOSTART_MODE = "autostart_mode" + TEMP_UNIT = "temp_unit" + DESC_SCROLL_SPEED = "desc_scroll_speed" + LOCKING_MODE = "locking_mode" + LOGO_DURATION = "logo_duration" + + +def enum_to_str(enum: Enum | None) -> str | None: + """Convert enum name to lower-case string.""" + return enum.name.lower() if isinstance(enum, Enum) else None + + +PINECIL_SELECT_DESCRIPTIONS: tuple[IronOSSelectEntityDescription, ...] = ( + IronOSSelectEntityDescription( + key=PinecilSelect.MIN_DC_VOLTAGE_CELLS, + translation_key=PinecilSelect.MIN_DC_VOLTAGE_CELLS, + characteristic=CharSetting.MIN_DC_VOLTAGE_CELLS, + value_fn=lambda x: enum_to_str(x.get("min_dc_voltage_cells")), + raw_value_fn=lambda value: BatteryType[value.upper()], + options=[x.name.lower() for x in BatteryType], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.ORIENTATION_MODE, + translation_key=PinecilSelect.ORIENTATION_MODE, + characteristic=CharSetting.ORIENTATION_MODE, + value_fn=lambda x: enum_to_str(x.get("orientation_mode")), + raw_value_fn=lambda value: ScreenOrientationMode[value.upper()], + options=[x.name.lower() for x in ScreenOrientationMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.ANIMATION_SPEED, + translation_key=PinecilSelect.ANIMATION_SPEED, + characteristic=CharSetting.ANIMATION_SPEED, + value_fn=lambda x: enum_to_str(x.get("animation_speed")), + raw_value_fn=lambda value: AnimationSpeed[value.upper()], + options=[x.name.lower() for x in AnimationSpeed], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.AUTOSTART_MODE, + translation_key=PinecilSelect.AUTOSTART_MODE, + characteristic=CharSetting.AUTOSTART_MODE, + value_fn=lambda x: enum_to_str(x.get("autostart_mode")), + raw_value_fn=lambda value: AutostartMode[value.upper()], + options=[x.name.lower() for x in AutostartMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.TEMP_UNIT, + translation_key=PinecilSelect.TEMP_UNIT, + characteristic=CharSetting.TEMP_UNIT, + value_fn=lambda x: enum_to_str(x.get("temp_unit")), + raw_value_fn=lambda value: TempUnit[value.upper()], + options=[x.name.lower() for x in TempUnit], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.DESC_SCROLL_SPEED, + translation_key=PinecilSelect.DESC_SCROLL_SPEED, + characteristic=CharSetting.DESC_SCROLL_SPEED, + value_fn=lambda x: enum_to_str(x.get("desc_scroll_speed")), + raw_value_fn=lambda value: ScrollSpeed[value.upper()], + options=[x.name.lower() for x in ScrollSpeed], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.LOCKING_MODE, + translation_key=PinecilSelect.LOCKING_MODE, + characteristic=CharSetting.LOCKING_MODE, + value_fn=lambda x: enum_to_str(x.get("locking_mode")), + raw_value_fn=lambda value: LockingMode[value.upper()], + options=[x.name.lower() for x in LockingMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.LOGO_DURATION, + translation_key=PinecilSelect.LOGO_DURATION, + characteristic=CharSetting.LOGO_DURATION, + value_fn=lambda x: enum_to_str(x.get("logo_duration")), + raw_value_fn=lambda value: LogoDuration[value.upper()], + options=[x.name.lower() for x in LogoDuration], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up select entities from a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + IronOSSelectEntity(coordinator, description) + for description in PINECIL_SELECT_DESCRIPTIONS + ) + + +class IronOSSelectEntity(IronOSBaseEntity, SelectEntity): + """Implementation of a IronOS select entity.""" + + entity_description: IronOSSelectEntityDescription + + def __init__( + self, + coordinator: IronOSCoordinators, + entity_description: IronOSSelectEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__( + coordinator.live_data, entity_description, entity_description.characteristic + ) + + self.settings = coordinator.settings + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + + return self.entity_description.value_fn(self.settings.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + + if raw_value_fn := self.entity_description.raw_value_fn: + value = raw_value_fn(option) + try: + await self.coordinator.device.write( + self.entity_description.characteristic, value + ) + except CommunicationError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="submit_setting_failed", + ) from e + await self.settings.async_request_refresh() + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + await super().async_added_to_hass() + self.async_on_remove( + self.settings.async_add_listener( + self._handle_coordinator_update, self.entity_description.characteristic + ) + ) + await self.settings.async_request_refresh() diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 13528104f8c..04c55280550 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -1,4 +1,8 @@ { + "common": { + "slow": "Slow", + "fast": "Fast" + }, "config": { "step": { "user": { @@ -84,6 +88,78 @@ "name": "Long-press temperature step" } }, + "select": { + "min_dc_voltage_cells": { + "name": "Power source", + "state": { + "no_battery": "External power supply (DC)", + "battery_3s": "3S (3 cells)", + "battery_4s": "4S (4 cells)", + "battery_5s": "5S (5 cells)", + "battery_6s": "6S (6 cells)" + } + }, + "orientation_mode": { + "name": "Display orientation mode", + "state": { + "right_handed": "Right-handed", + "left_handed": "Left-handed", + "auto": "Auto" + } + }, + "animation_speed": { + "name": "Animation speed", + "state": { + "off": "[%key:common::state::off%]", + "slow": "[%key:component::iron_os::common::slow%]", + "medium": "Medium", + "fast": "[%key:component::iron_os::common::fast%]" + } + }, + "autostart_mode": { + "name": "Start-up behavior", + "state": { + "disabled": "[%key:common::state::disabled%]", + "soldering": "Soldering mode", + "sleeping": "Sleeping mode", + "idle": "Idle mode" + } + }, + "temp_unit": { + "name": "Temperature display unit", + "state": { + "celsius": "Celsius (C°)", + "fahrenheit": "Fahrenheit (F°)" + } + }, + "desc_scroll_speed": { + "name": "Scrolling speed", + "state": { + "slow": "[%key:component::iron_os::common::slow%]", + "fast": "[%key:component::iron_os::common::fast%]" + } + }, + "locking_mode": { + "name": "Button locking mode", + "state": { + "off": "[%key:common::state::off%]", + "boost_only": "Boost only", + "full_locking": "Full locking" + } + }, + "logo_duration": { + "name": "Boot logo duration", + "state": { + "off": "[%key:common::state::off%]", + "seconds_1": "1 second", + "seconds_2": "2 second", + "seconds_3": "3 second", + "seconds_4": "4 second", + "seconds_5": "5 second", + "loop": "Loop" + } + } + }, "sensor": { "live_temperature": { "name": "Tip temperature" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index 9091694e6a5..356c7358c55 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -6,12 +6,20 @@ from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak from pynecil import ( + AnimationSpeed, + AutostartMode, + BatteryType, DeviceInfoResponse, LatestRelease, LiveDataResponse, + LockingMode, + LogoDuration, OperatingMode, PowerSource, + ScreenOrientationMode, + ScrollSpeed, SettingsDataResponse, + TempUnit, ) import pytest @@ -151,7 +159,7 @@ def mock_pynecil() -> Generator[AsyncMock]: client.get_settings.return_value = SettingsDataResponse( sleep_temp=150, sleep_timeout=5, - min_dc_voltage_cells=0, + min_dc_voltage_cells=BatteryType.BATTERY_3S, min_volltage_per_cell=3.3, qc_ideal_voltage=9.0, accel_sensitivity=7, @@ -168,6 +176,13 @@ def mock_pynecil() -> Generator[AsyncMock]: hall_sensitivity=7, pd_negotiation_timeout=2.0, display_brightness=3, + orientation_mode=ScreenOrientationMode.RIGHT_HANDED, + animation_speed=AnimationSpeed.MEDIUM, + autostart_mode=AutostartMode.IDLE, + temp_unit=TempUnit.CELSIUS, + desc_scroll_speed=ScrollSpeed.FAST, + logo_duration=LogoDuration.LOOP, + locking_mode=LockingMode.FULL_LOCKING, ) client.get_live_data.return_value = LiveDataResponse( live_temp=298, diff --git a/tests/components/iron_os/snapshots/test_select.ambr b/tests/components/iron_os/snapshots/test_select.ambr new file mode 100644 index 00000000000..ce6045c1243 --- /dev/null +++ b/tests/components/iron_os/snapshots/test_select.ambr @@ -0,0 +1,469 @@ +# serializer version: 1 +# name: test_state[select.pinecil_animation_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'slow', + 'medium', + 'fast', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_animation_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Animation speed', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_animation_speed', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_animation_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Animation speed', + 'options': list([ + 'off', + 'slow', + 'medium', + 'fast', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_animation_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_state[select.pinecil_boot_logo_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'seconds_1', + 'seconds_2', + 'seconds_3', + 'seconds_4', + 'seconds_5', + 'loop', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_boot_logo_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boot logo duration', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_logo_duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_boot_logo_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Boot logo duration', + 'options': list([ + 'off', + 'seconds_1', + 'seconds_2', + 'seconds_3', + 'seconds_4', + 'seconds_5', + 'loop', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_boot_logo_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'loop', + }) +# --- +# name: test_state[select.pinecil_button_locking_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'boost_only', + 'full_locking', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_button_locking_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Button locking mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_locking_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_button_locking_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Button locking mode', + 'options': list([ + 'off', + 'boost_only', + 'full_locking', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_button_locking_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'full_locking', + }) +# --- +# name: test_state[select.pinecil_display_orientation_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'right_handed', + 'left_handed', + 'auto', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_display_orientation_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display orientation mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_orientation_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_display_orientation_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Display orientation mode', + 'options': list([ + 'right_handed', + 'left_handed', + 'auto', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_display_orientation_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'right_handed', + }) +# --- +# name: test_state[select.pinecil_power_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_battery', + 'battery_3s', + 'battery_4s', + 'battery_5s', + 'battery_6s', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_power_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power source', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_min_dc_voltage_cells', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_power_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Power source', + 'options': list([ + 'no_battery', + 'battery_3s', + 'battery_4s', + 'battery_5s', + 'battery_6s', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_power_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'battery_3s', + }) +# --- +# name: test_state[select.pinecil_scrolling_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'slow', + 'fast', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_scrolling_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scrolling speed', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_desc_scroll_speed', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_scrolling_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Scrolling speed', + 'options': list([ + 'slow', + 'fast', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_scrolling_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'fast', + }) +# --- +# name: test_state[select.pinecil_start_up_behavior-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disabled', + 'soldering', + 'sleeping', + 'idle', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_start_up_behavior', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start-up behavior', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_autostart_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_start_up_behavior-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Start-up behavior', + 'options': list([ + 'disabled', + 'soldering', + 'sleeping', + 'idle', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_start_up_behavior', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_state[select.pinecil_temperature_display_unit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'celsius', + 'fahrenheit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_temperature_display_unit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature display unit', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_unit', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_temperature_display_unit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Temperature display unit', + 'options': list([ + 'celsius', + 'fahrenheit', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_temperature_display_unit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'celsius', + }) +# --- diff --git a/tests/components/iron_os/test_select.py b/tests/components/iron_os/test_select.py new file mode 100644 index 00000000000..5e981e1618e --- /dev/null +++ b/tests/components/iron_os/test_select.py @@ -0,0 +1,164 @@ +"""Tests for the IronOS select platform.""" + +from collections.abc import AsyncGenerator +from datetime import timedelta +from enum import Enum +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import ( + AnimationSpeed, + BatteryType, + CharSetting, + CommunicationError, + LockingMode, + LogoDuration, + ScreenOrientationMode, + ScrollSpeed, + TempUnit, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def select_only() -> AsyncGenerator[None]: + """Enable only the select platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.SELECT], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the IronOS select platform states.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "option", "call_params"), + [ + ( + "select.pinecil_power_source", + "battery_3s", + (CharSetting.MIN_DC_VOLTAGE_CELLS, BatteryType.BATTERY_3S), + ), + ( + "select.pinecil_display_orientation_mode", + "right_handed", + (CharSetting.ORIENTATION_MODE, ScreenOrientationMode.RIGHT_HANDED), + ), + ( + "select.pinecil_animation_speed", + "medium", + (CharSetting.ANIMATION_SPEED, AnimationSpeed.MEDIUM), + ), + ( + "select.pinecil_temperature_display_unit", + "fahrenheit", + (CharSetting.TEMP_UNIT, TempUnit.FAHRENHEIT), + ), + ( + "select.pinecil_scrolling_speed", + "fast", + (CharSetting.DESC_SCROLL_SPEED, ScrollSpeed.FAST), + ), + ( + "select.pinecil_button_locking_mode", + "full_locking", + (CharSetting.LOCKING_MODE, LockingMode.FULL_LOCKING), + ), + ( + "select.pinecil_boot_logo_duration", + "loop", + (CharSetting.LOGO_DURATION, LogoDuration.LOOP), + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_select_option( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + entity_id: str, + option: str, + call_params: tuple[Enum, ...], +) -> None: + """Test the IronOS select option service.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + service_data={ATTR_OPTION: option}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert len(mock_pynecil.write.mock_calls) == 1 + mock_pynecil.write.assert_called_once_with(*call_params) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_select_option_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS select option service exception.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.write.side_effect = CommunicationError + + with pytest.raises( + ServiceValidationError, + match="Failed to submit setting to device, try again later", + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + service_data={ATTR_OPTION: "battery_3s"}, + target={ATTR_ENTITY_ID: "select.pinecil_power_source"}, + blocking=True, + ) From 352e948d56b2ba048d536c63d542e9c35646b068 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:33:33 +0100 Subject: [PATCH 0840/1198] Add tests for already_configured erros in IronOS integration (#132265) --- .../components/iron_os/quality_scale.yaml | 2 +- tests/components/iron_os/test_config_flow.py | 54 ++++++++++++++++--- 2 files changed, 49 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index 5ede3d6971d..922702b8260 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -6,7 +6,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: todo + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/iron_os/test_config_flow.py b/tests/components/iron_os/test_config_flow.py index 231ec6cc3d6..e1ac8fb9f00 100644 --- a/tests/components/iron_os/test_config_flow.py +++ b/tests/components/iron_os/test_config_flow.py @@ -4,6 +4,8 @@ from __future__ import annotations from unittest.mock import AsyncMock, MagicMock +import pytest + from homeassistant.components.iron_os import DOMAIN from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER from homeassistant.core import HomeAssistant @@ -11,9 +13,12 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import DEFAULT_NAME, PINECIL_SERVICE_INFO, USER_INPUT +from tests.common import MockConfigEntry -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, discovery: MagicMock + +@pytest.mark.usefixtures("discovery") +async def test_async_step_user( + hass: HomeAssistant, mock_setup_entry: AsyncMock ) -> None: """Test the user config flow.""" result = await hass.config_entries.flow.async_init( @@ -32,10 +37,31 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("discovery") +async def test_async_step_user_device_added_between_steps( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test the device gets added via another flow between steps.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_no_device_discovered( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - discovery: MagicMock, + hass: HomeAssistant, discovery: MagicMock ) -> None: """Test setup with no device discoveries.""" discovery.return_value = [] @@ -48,7 +74,7 @@ async def test_form_no_device_discovered( async def test_async_step_bluetooth(hass: HomeAssistant) -> None: - """Test discovery via bluetooth..""" + """Test discovery via bluetooth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_BLUETOOTH}, @@ -64,3 +90,19 @@ async def test_async_step_bluetooth(hass: HomeAssistant) -> None: assert result["title"] == DEFAULT_NAME assert result["data"] == {} assert result["result"].unique_id == "c0:ff:ee:c0:ff:ee" + + +async def test_async_step_bluetooth_devices_already_setup( + hass: HomeAssistant, config_entry: AsyncMock +) -> None: + """Test we can't start a flow if there is already a config entry.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=PINECIL_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 51bead32293308882d066b64c41bdf7ae22f7846 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:34:49 +0100 Subject: [PATCH 0841/1198] Update number platform values before add in APSystems and add tests (#131938) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/apsystems/number.py | 2 +- tests/components/apsystems/conftest.py | 1 + .../apsystems/snapshots/test_number.ambr | 58 ++++++++++++++++++ tests/components/apsystems/test_number.py | 61 +++++++++++++++++++ 4 files changed, 121 insertions(+), 1 deletion(-) create mode 100644 tests/components/apsystems/snapshots/test_number.ambr create mode 100644 tests/components/apsystems/test_number.py diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index 01e991f5188..6463d10f3e8 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -20,7 +20,7 @@ async def async_setup_entry( ) -> None: """Set up the sensor platform.""" - add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)]) + add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)], True) class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 0feccf21578..7a48ff7db3f 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -59,6 +59,7 @@ def mock_apsystems() -> Generator[MagicMock]: operating=False, ) mock_api.get_device_power_status.return_value = True + mock_api.get_max_power.return_value = 666 yield mock_api diff --git a/tests/components/apsystems/snapshots/test_number.ambr b/tests/components/apsystems/snapshots/test_number.ambr new file mode 100644 index 00000000000..a2b82e23596 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_all_entities[number.mock_title_max_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 1000, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.mock_title_max_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max output', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_output', + 'unique_id': 'MY_SERIAL_NUMBER_output_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.mock_title_max_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Mock Title Max output', + 'max': 1000, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_title_max_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '666', + }) +# --- diff --git a/tests/components/apsystems/test_number.py b/tests/components/apsystems/test_number.py new file mode 100644 index 00000000000..5868bd3da34 --- /dev/null +++ b/tests/components/apsystems/test_number.py @@ -0,0 +1,61 @@ +"""Test the APSystem number module.""" + +import datetime +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +SCAN_INTERVAL = datetime.timedelta(seconds=30) + + +async def test_number( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test number command.""" + await setup_integration(hass, mock_config_entry) + entity_id = "number.mock_title_max_output" + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50.1}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_apsystems.set_max_power.assert_called_once_with(50) + mock_apsystems.get_max_power.return_value = 50 + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == "50" + + +@pytest.mark.usefixtures("mock_apsystems") +@patch("homeassistant.components.apsystems.PLATFORMS", [Platform.NUMBER]) +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From 4daf6dd41d11443f3c0f36b18401fffc1f979768 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 19:39:35 +0100 Subject: [PATCH 0842/1198] Bump gardena_bluetooth to 1.5.0 (#133502) --- homeassistant/components/gardena_bluetooth/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index da5c08c38c5..28bba1015f5 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.4"] + "requirements": ["gardena-bluetooth==1.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1b1938b2e4f..79f1411ea42 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -956,7 +956,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 93a7979600d..32e815babdd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -809,7 +809,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 From 0ff2a0d66db24887a6d5213a5268b36443322547 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Wed, 18 Dec 2024 19:46:30 +0100 Subject: [PATCH 0843/1198] Add "cancel room setpoint override" button to opentherm_gw (#132162) --- .../components/opentherm_gw/button.py | 13 ++++++- .../components/opentherm_gw/strings.json | 5 +++ tests/components/opentherm_gw/test_button.py | 34 +++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/opentherm_gw/button.py b/homeassistant/components/opentherm_gw/button.py index bac50295199..00b91ad33e0 100644 --- a/homeassistant/components/opentherm_gw/button.py +++ b/homeassistant/components/opentherm_gw/button.py @@ -16,7 +16,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION +from .const import ( + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, +) from .entity import OpenThermEntity, OpenThermEntityDescription @@ -30,6 +35,12 @@ class OpenThermButtonEntityDescription( BUTTON_DESCRIPTIONS: tuple[OpenThermButtonEntityDescription, ...] = ( + OpenThermButtonEntityDescription( + key="cancel_room_setpoint_override", + translation_key="cancel_room_setpoint_override", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + action=lambda hub: hub.set_room_setpoint(0), + ), OpenThermButtonEntityDescription( key="restart_button", device_class=ButtonDeviceClass.RESTART, diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 834168eb113..4c452da41ae 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -158,6 +158,11 @@ "name": "Programmed change has priority over override" } }, + "button": { + "cancel_room_setpoint_override": { + "name": "Cancel room setpoint override" + } + }, "select": { "gpio_mode_n": { "name": "GPIO {gpio_id} mode", diff --git a/tests/components/opentherm_gw/test_button.py b/tests/components/opentherm_gw/test_button.py index b02a9d9fef0..d8de52559e7 100644 --- a/tests/components/opentherm_gw/test_button.py +++ b/tests/components/opentherm_gw/test_button.py @@ -16,6 +16,40 @@ from .conftest import MINIMAL_STATUS from tests.common import MockConfigEntry +async def test_cancel_room_setpoint_override_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test cancel room setpoint override button.""" + + mock_pyotgw.return_value.set_target_temp = AsyncMock(return_value=0) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + button_entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-cancel_room_setpoint_override", + ) + ) is not None + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: button_entity_id, + }, + blocking=True, + ) + + mock_pyotgw.return_value.set_target_temp.assert_awaited_once_with(0, True) + + async def test_restart_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 3a8b0b3ea6c3111b605c35c0dec1a6b8833df0af Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 18 Dec 2024 20:46:52 +0200 Subject: [PATCH 0844/1198] Use Switcher _async_call_api in climate (#133230) --- .../components/switcher_kis/climate.py | 25 +++---------------- .../components/switcher_kis/entity.py | 7 +++--- 2 files changed, 7 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index 5285e7549ef..2fc4a331676 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.api.remotes import SwitcherBreezeRemote from aioswitcher.device import ( DeviceCategory, @@ -38,6 +37,8 @@ from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity from .utils import get_breeze_remote_manager +API_CONTROL_BREEZE_DEVICE = "control_breeze_device" + DEVICE_MODE_TO_HA = { ThermostatMode.COOL: HVACMode.COOL, ThermostatMode.HEAT: HVACMode.HEAT, @@ -155,27 +156,7 @@ class SwitcherClimateEntity(SwitcherEntity, ClimateEntity): async def _async_control_breeze_device(self, **kwargs: Any) -> None: """Call Switcher Control Breeze API.""" - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await swapi.control_breeze_device(self._remote, **kwargs) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call Breeze control for {self.name} failed, " - f"response/error: {response or error}" - ) + await self._async_call_api(API_CONTROL_BREEZE_DEVICE, self._remote, **kwargs) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" diff --git a/homeassistant/components/switcher_kis/entity.py b/homeassistant/components/switcher_kis/entity.py index e24f59a4a1c..82b892d548d 100644 --- a/homeassistant/components/switcher_kis/entity.py +++ b/homeassistant/components/switcher_kis/entity.py @@ -3,7 +3,8 @@ import logging from typing import Any -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse +from aioswitcher.api import SwitcherApi +from aioswitcher.api.messages import SwitcherBaseResponse from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr @@ -27,7 +28,7 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} ) - async def _async_call_api(self, api: str, *args: Any) -> None: + async def _async_call_api(self, api: str, *args: Any, **kwargs: Any) -> None: """Call Switcher API.""" _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) response: SwitcherBaseResponse | None = None @@ -41,7 +42,7 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): self.coordinator.data.device_key, self.coordinator.token, ) as swapi: - response = await getattr(swapi, api)(*args) + response = await getattr(swapi, api)(*args, **kwargs) except (TimeoutError, OSError, RuntimeError) as err: error = repr(err) From b7ff27122ad711316551a8d7ac07e62c41e0d7eb Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:47:41 -0500 Subject: [PATCH 0845/1198] Add support for Nice G.O. HAE00080 wall station (#133186) --- homeassistant/components/nice_go/const.py | 4 ++-- homeassistant/components/nice_go/cover.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py index a6635368f7b..c02bcb3c234 100644 --- a/homeassistant/components/nice_go/const.py +++ b/homeassistant/components/nice_go/const.py @@ -15,8 +15,8 @@ CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) SUPPORTED_DEVICE_TYPES = { - Platform.LIGHT: ["WallStation"], - Platform.SWITCH: ["WallStation"], + Platform.LIGHT: ["WallStation", "WallStation_ESP32"], + Platform.SWITCH: ["WallStation", "WallStation_ESP32"], } KNOWN_UNSUPPORTED_DEVICE_TYPES = { Platform.LIGHT: ["Mms100"], diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index a823e931804..6360e398b96 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -21,6 +21,7 @@ from .entity import NiceGOEntity DEVICE_CLASSES = { "WallStation": CoverDeviceClass.GARAGE, "Mms100": CoverDeviceClass.GATE, + "WallStation_ESP32": CoverDeviceClass.GARAGE, } PARALLEL_UPDATES = 1 From c8f050ecbcd60712da41b07ebae3ff8af43b4b1f Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 20:08:57 +0100 Subject: [PATCH 0846/1198] Fix the local_file.update_file_path action's name and description (#133509) --- homeassistant/components/local_file/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index abf31a6f94e..393cc5f2e46 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -39,8 +39,8 @@ }, "services": { "update_file_path": { - "name": "Updates file path", - "description": "Use this action to change the file displayed by the camera.", + "name": "Update file path", + "description": "Changes the file displayed by the camera.", "fields": { "file_path": { "name": "File path", From 19e6867f1ae078d84ff3e1973d703c4b86504b89 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 18 Dec 2024 20:22:33 +0100 Subject: [PATCH 0847/1198] Reolink translate errors (#132301) --- homeassistant/components/reolink/__init__.py | 4 +- homeassistant/components/reolink/button.py | 26 ++-- homeassistant/components/reolink/camera.py | 14 +-- homeassistant/components/reolink/light.py | 47 +++---- homeassistant/components/reolink/number.py | 28 ++--- .../components/reolink/quality_scale.yaml | 2 +- homeassistant/components/reolink/select.py | 20 +-- homeassistant/components/reolink/services.py | 13 +- homeassistant/components/reolink/siren.py | 26 +--- homeassistant/components/reolink/strings.json | 39 ++++++ homeassistant/components/reolink/switch.py | 40 ++---- homeassistant/components/reolink/update.py | 5 +- homeassistant/components/reolink/util.py | 104 ++++++++++++++++ tests/components/reolink/test_util.py | 115 ++++++++++++++++++ 14 files changed, 331 insertions(+), 152 deletions(-) create mode 100644 tests/components/reolink/test_util.py diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index ae0badb3d84..29dfb4ee57b 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -73,7 +73,9 @@ async def async_setup_entry( ) as err: await host.stop() raise ConfigEntryNotReady( - f"Error while trying to setup {host.api.host}:{host.api.port}: {err!s}" + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + translation_placeholders={"host": host.api.host, "err": str(err)}, ) from err except BaseException: await host.stop() diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index cd1e1b05fae..6b1fcc65a2f 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import GuardEnum, Host, PtzEnum -from reolink_aio.exceptions import ReolinkError import voluptuous as vol from homeassistant.components.button import ( @@ -18,7 +17,6 @@ from homeassistant.components.button import ( from homeassistant.components.camera import CameraEntityFeature from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, @@ -31,7 +29,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 ATTR_SPEED = "speed" @@ -205,22 +203,18 @@ class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): ): self._attr_supported_features = SUPPORT_PTZ_SPEED + @raise_translated_error async def async_press(self) -> None: """Execute the button action.""" - try: - await self.entity_description.method(self._host.api, self._channel) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel) + @raise_translated_error async def async_ptz_move(self, **kwargs: Any) -> None: """PTZ move with speed.""" speed = kwargs[ATTR_SPEED] - try: - await self._host.api.set_ptz_command( - self._channel, command=self.entity_description.ptz_cmd, speed=speed - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_ptz_command( + self._channel, command=self.entity_description.ptz_cmd, speed=speed + ) class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): @@ -237,9 +231,7 @@ class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): self.entity_description = entity_description super().__init__(reolink_data) + @raise_translated_error async def async_press(self) -> None: """Execute the button action.""" - try: - await self.entity_description.method(self._host.api) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api) diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index 26ef0b0f4fc..d9b3cb67f70 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -6,7 +6,6 @@ from dataclasses import dataclass import logging from reolink_aio.api import DUAL_LENS_MODELS -from reolink_aio.exceptions import ReolinkError from homeassistant.components.camera import ( Camera, @@ -14,11 +13,10 @@ from homeassistant.components.camera import ( CameraEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 @@ -142,13 +140,11 @@ class ReolinkCamera(ReolinkChannelCoordinatorEntity, Camera): self._channel, self.entity_description.stream ) + @raise_translated_error async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response from the camera.""" - try: - return await self._host.api.get_snapshot( - self._channel, self.entity_description.stream - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + return await self._host.api.get_snapshot( + self._channel, self.entity_description.stream + ) diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 3bd9a120798..bbb9592dd76 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Host -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -17,7 +16,6 @@ from homeassistant.components.light import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -26,7 +24,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -154,37 +152,28 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): return round(255 * bright_pct / 100.0) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn light off.""" - try: - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, False - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, False + ) self.async_write_ha_state() + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn light on.""" if ( brightness := kwargs.get(ATTR_BRIGHTNESS) ) is not None and self.entity_description.set_brightness_fn is not None: brightness_pct = int(brightness / 255.0 * 100) - try: - await self.entity_description.set_brightness_fn( - self._host.api, self._channel, brightness_pct - ) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err - - try: - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, True + await self.entity_description.set_brightness_fn( + self._host.api, self._channel, brightness_pct ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, True + ) self.async_write_ha_state() @@ -209,18 +198,14 @@ class ReolinkHostLightEntity(ReolinkHostCoordinatorEntity, LightEntity): """Return true if light is on.""" return self.entity_description.is_on_fn(self._host.api) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn light off.""" - try: - await self.entity_description.turn_on_off_fn(self._host.api, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn(self._host.api, False) self.async_write_ha_state() + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn light on.""" - try: - await self.entity_description.turn_on_off_fn(self._host.api, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn(self._host.api, True) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 692b43bca9e..e4b52c85d45 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Chime, Host -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.number import ( NumberEntity, @@ -16,7 +15,6 @@ from homeassistant.components.number import ( ) from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -27,7 +25,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -589,14 +587,10 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._host.api, self._channel) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._host.api, self._channel, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, value) self.async_write_ha_state() @@ -621,14 +615,10 @@ class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._host.api) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._host.api, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, value) self.async_write_ha_state() @@ -654,12 +644,8 @@ class ReolinkChimeNumberEntity(ReolinkChimeCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._chime) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._chime, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, value) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/quality_scale.yaml b/homeassistant/components/reolink/quality_scale.yaml index 540cf19e22a..5cc054b7a4c 100644 --- a/homeassistant/components/reolink/quality_scale.yaml +++ b/homeassistant/components/reolink/quality_scale.yaml @@ -54,7 +54,7 @@ rules: entity-device-class: done entity-disabled-by-default: done entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: done diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index 8625f7fb600..7a74be2e28c 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -19,12 +19,10 @@ from reolink_aio.api import ( StatusLedEnum, TrackMethodEnum, ) -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory, UnitOfDataRate, UnitOfFrequency from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -33,7 +31,7 @@ from .entity import ( ReolinkChimeCoordinatorEntity, ReolinkChimeEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 @@ -354,14 +352,10 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): self._log_error = True return option + @raise_translated_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - try: - await self.entity_description.method(self._host.api, self._channel, option) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, option) self.async_write_ha_state() @@ -396,12 +390,8 @@ class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): self._log_error = True return option + @raise_translated_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - try: - await self.entity_description.method(self._chime, option) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, option) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/services.py b/homeassistant/components/reolink/services.py index 326093e7a93..acd31fe0d7d 100644 --- a/homeassistant/components/reolink/services.py +++ b/homeassistant/components/reolink/services.py @@ -4,18 +4,17 @@ from __future__ import annotations from reolink_aio.api import Chime from reolink_aio.enums import ChimeToneEnum -from reolink_aio.exceptions import InvalidParameterError, ReolinkError import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_DEVICE_ID from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr from .const import DOMAIN from .host import ReolinkHost -from .util import get_device_uid_and_ch +from .util import get_device_uid_and_ch, raise_translated_error ATTR_RINGTONE = "ringtone" @@ -24,6 +23,7 @@ ATTR_RINGTONE = "ringtone" def async_setup_services(hass: HomeAssistant) -> None: """Set up Reolink services.""" + @raise_translated_error async def async_play_chime(service_call: ServiceCall) -> None: """Play a ringtone.""" service_data = service_call.data @@ -58,12 +58,7 @@ def async_setup_services(hass: HomeAssistant) -> None: ) ringtone = service_data[ATTR_RINGTONE] - try: - await chime.play(ChimeToneEnum[ringtone].value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await chime.play(ChimeToneEnum[ringtone].value) hass.services.async_register( DOMAIN, diff --git a/homeassistant/components/reolink/siren.py b/homeassistant/components/reolink/siren.py index cb12eb5d38c..74bb227d078 100644 --- a/homeassistant/components/reolink/siren.py +++ b/homeassistant/components/reolink/siren.py @@ -5,8 +5,6 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - from homeassistant.components.siren import ( ATTR_DURATION, ATTR_VOLUME_LEVEL, @@ -15,11 +13,10 @@ from homeassistant.components.siren import ( SirenEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -77,26 +74,15 @@ class ReolinkSirenEntity(ReolinkChannelCoordinatorEntity, SirenEntity): self.entity_description = entity_description super().__init__(reolink_data, channel) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the siren.""" if (volume := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: - try: - await self._host.api.set_volume(self._channel, int(volume * 100)) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_volume(self._channel, int(volume * 100)) duration = kwargs.get(ATTR_DURATION) - try: - await self._host.api.set_siren(self._channel, True, duration) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_siren(self._channel, True, duration) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the siren.""" - try: - await self._host.api.set_siren(self._channel, False, None) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_siren(self._channel, False, None) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index ac73581ce22..53152131bdb 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -55,6 +55,45 @@ }, "service_not_chime": { "message": "Reolink play_chime error: {device_name} is not a chime" + }, + "invalid_parameter": { + "message": "Invalid input parameter: {err}" + }, + "api_error": { + "message": "The device responded with a error: {err}" + }, + "invalid_content_type": { + "message": "Received a different content type than expected: {err}" + }, + "invalid_credentials": { + "message": "Invalid credentials: {err}" + }, + "login_error": { + "message": "Error during login attempt: {err}" + }, + "no_data": { + "message": "Device returned no data: {err}" + }, + "unexpected_data": { + "message": "Device returned unexpected data: {err}" + }, + "not_supported": { + "message": "Function not supported by this device: {err}" + }, + "subscription_error": { + "message": "Error during ONVIF subscription: {err}" + }, + "connection_error": { + "message": "Could not connect to the device: {err}" + }, + "timeout": { + "message": "Timeout waiting on a response: {err}" + }, + "firmware_install_error": { + "message": "Error trying to update Reolink firmware: {err}" + }, + "config_entry_not_ready": { + "message": "Error while trying to setup {host}: {err}" } }, "issues": { diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index c274609599d..b970d04c257 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -7,12 +7,10 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Chime, Host -from reolink_aio.exceptions import ReolinkError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -25,7 +23,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -430,20 +428,16 @@ class ReolinkSwitchEntity(ReolinkChannelCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._host.api, self._channel) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._host.api, self._channel, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._host.api, self._channel, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, False) self.async_write_ha_state() @@ -466,20 +460,16 @@ class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._host.api) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._host.api, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._host.api, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, False) self.async_write_ha_state() @@ -503,18 +493,14 @@ class ReolinkChimeSwitchEntity(ReolinkChimeCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._chime) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._chime, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._chime, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, False) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index aa607e2b29e..5a8c7d7dc08 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -24,6 +24,7 @@ from homeassistant.helpers.update_coordinator import ( ) from . import DEVICE_UPDATE_INTERVAL +from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, @@ -196,7 +197,9 @@ class ReolinkUpdateBaseEntity( await self._host.api.update_firmware(self._channel) except ReolinkError as err: raise HomeAssistantError( - f"Error trying to update Reolink firmware: {err}" + translation_domain=DOMAIN, + translation_key="firmware_install_error", + translation_placeholders={"err": str(err)}, ) from err finally: self.async_write_ha_state() diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index 98c0e7b925b..1a6eab3f61d 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -2,10 +2,28 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass +from typing import Any, ParamSpec, TypeVar + +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + InvalidContentTypeError, + InvalidParameterError, + LoginError, + NoDataError, + NotSupportedError, + ReolinkConnectionError, + ReolinkError, + ReolinkTimeoutError, + SubscriptionError, + UnexpectedDataError, +) from homeassistant import config_entries from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -53,3 +71,89 @@ def get_device_uid_and_ch( else: ch = host.api.channel_for_uid(device_uid[1]) return (device_uid, ch, is_chime) + + +T = TypeVar("T") +P = ParamSpec("P") + + +# Decorators +def raise_translated_error( + func: Callable[P, Awaitable[T]], +) -> Callable[P, Coroutine[Any, Any, T]]: + """Wrap a reolink-aio function to translate any potential errors.""" + + async def decorator_raise_translated_error(*args: P.args, **kwargs: P.kwargs) -> T: + """Try a reolink-aio function and translate any potential errors.""" + try: + return await func(*args, **kwargs) + except InvalidParameterError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_parameter", + translation_placeholders={"err": str(err)}, + ) from err + except ApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="api_error", + translation_placeholders={"err": str(err)}, + ) from err + except InvalidContentTypeError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_content_type", + translation_placeholders={"err": str(err)}, + ) from err + except CredentialsInvalidError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_credentials", + translation_placeholders={"err": str(err)}, + ) from err + except LoginError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="login_error", + translation_placeholders={"err": str(err)}, + ) from err + except NoDataError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="no_data", + translation_placeholders={"err": str(err)}, + ) from err + except UnexpectedDataError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unexpected_data", + translation_placeholders={"err": str(err)}, + ) from err + except NotSupportedError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="not_supported", + translation_placeholders={"err": str(err)}, + ) from err + except SubscriptionError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="subscription_error", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkConnectionError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="connection_error", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkTimeoutError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="timeout", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + + return decorator_raise_translated_error diff --git a/tests/components/reolink/test_util.py b/tests/components/reolink/test_util.py new file mode 100644 index 00000000000..f66f4682b98 --- /dev/null +++ b/tests/components/reolink/test_util.py @@ -0,0 +1,115 @@ +"""Test the Reolink util functions.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + InvalidContentTypeError, + InvalidParameterError, + LoginError, + NoDataError, + NotSupportedError, + ReolinkConnectionError, + ReolinkError, + ReolinkTimeoutError, + SubscriptionError, + UnexpectedDataError, +) + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("side_effect", "expected"), + [ + ( + ApiError("Test error"), + HomeAssistantError, + ), + ( + CredentialsInvalidError("Test error"), + HomeAssistantError, + ), + ( + InvalidContentTypeError("Test error"), + HomeAssistantError, + ), + ( + InvalidParameterError("Test error"), + ServiceValidationError, + ), + ( + LoginError("Test error"), + HomeAssistantError, + ), + ( + NoDataError("Test error"), + HomeAssistantError, + ), + ( + NotSupportedError("Test error"), + HomeAssistantError, + ), + ( + ReolinkConnectionError("Test error"), + HomeAssistantError, + ), + ( + ReolinkError("Test error"), + HomeAssistantError, + ), + ( + ReolinkTimeoutError("Test error"), + HomeAssistantError, + ), + ( + SubscriptionError("Test error"), + HomeAssistantError, + ), + ( + UnexpectedDataError("Test error"), + HomeAssistantError, + ), + ], +) +async def test_try_function( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + side_effect: ReolinkError, + expected: Exception, +) -> None: + """Test try_function error translations using number entity.""" + reolink_connect.volume.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" + + reolink_connect.set_volume.side_effect = side_effect + with pytest.raises(expected): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.set_volume.reset_mock(side_effect=True) From 8a8be71f96f57cc67f43d9335d6e33af5bc9c96a Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 20:53:05 +0100 Subject: [PATCH 0848/1198] Add tests for cover and increase test coverage for slide_local (#133515) --- .../components/slide_local/quality_scale.yaml | 2 +- .../slide_local/snapshots/test_cover.ambr | 51 +++++ tests/components/slide_local/test_cover.py | 215 ++++++++++++++++++ tests/components/slide_local/test_init.py | 36 +++ 4 files changed, 303 insertions(+), 1 deletion(-) create mode 100644 tests/components/slide_local/snapshots/test_cover.ambr create mode 100644 tests/components/slide_local/test_cover.py diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 4833f19e2b2..c3ce12efd80 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -28,7 +28,7 @@ rules: action-exceptions: done reauthentication-flow: todo parallel-updates: done - test-coverage: todo + test-coverage: done integration-owner: done docs-installation-parameters: done docs-configuration-parameters: done diff --git a/tests/components/slide_local/snapshots/test_cover.ambr b/tests/components/slide_local/snapshots/test_cover.ambr new file mode 100644 index 00000000000..d9283618a47 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_cover.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_all_entities[cover.slide_bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.slide_bedroom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1234567890ab', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.slide_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'current_position': 100, + 'device_class': 'curtain', + 'friendly_name': 'slide bedroom', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.slide_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/slide_local/test_cover.py b/tests/components/slide_local/test_cover.py new file mode 100644 index 00000000000..e0e4a0741d8 --- /dev/null +++ b/tests/components/slide_local/test_cover.py @@ -0,0 +1,215 @@ +"""Tests for the Slide Local cover platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from goslideapi.goslideapi import ClientConnectionError +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + CoverState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform +from .const import SLIDE_INFO_DATA + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_connection_error( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection error.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ClientConnectionError, SLIDE_INFO_DATA] + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == STATE_UNAVAILABLE + + freezer.tick(delta=timedelta(minutes=2)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + +async def test_state_change( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection error.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ + dict(SLIDE_INFO_DATA, pos=0.0), + dict(SLIDE_INFO_DATA, pos=0.4), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=0.8), + ] + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSING + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSED + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPENING + + +async def test_open_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test open cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_open.assert_called_once() + + +async def test_close_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test close cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_close.assert_called_once() + + +async def test_stop_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test stop cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_stop.assert_called_once() + + +async def test_set_position( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test set cover position.""" + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ + dict(SLIDE_INFO_DATA, pos=0.0), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=0.0), + ] + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: "cover.slide_bedroom", ATTR_POSITION: 1.0}, + blocking=True, + ) + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSED + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: "cover.slide_bedroom", ATTR_POSITION: 0.0}, + blocking=True, + ) + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + assert len(mock_slide_api.slide_set_position.mock_calls) == 2 diff --git a/tests/components/slide_local/test_init.py b/tests/components/slide_local/test_init.py index 7b0a2d83164..ec9a12f9eeb 100644 --- a/tests/components/slide_local/test_init.py +++ b/tests/components/slide_local/test_init.py @@ -2,8 +2,10 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ClientConnectionError from syrupy import SnapshotAssertion +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -27,3 +29,37 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_raise_config_entry_not_ready_when_offline( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, +) -> None: + """Config entry state is SETUP_RETRY when slide is offline.""" + + mock_slide_api.slide_info.side_effect = [ClientConnectionError, None] + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + assert len(hass.config_entries.flow.async_progress()) == 0 + + +async def test_raise_config_entry_not_ready_when_empty_data( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, +) -> None: + """Config entry state is SETUP_RETRY when slide is offline.""" + + mock_slide_api.slide_info.return_value = None + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + assert len(hass.config_entries.flow.async_progress()) == 0 From ff8bc763c3f667039b3034d972c871e7439a6833 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 21:29:52 +0100 Subject: [PATCH 0849/1198] Ensure indices needed by data migrators exist (#133367) * Ensure indices needed by data migrators exist * Update test * Improve test * Ignore index error on char(0) columns * Adjust tests * Address review comments * Add comment motivating magic number --- .../components/recorder/db_schema.py | 38 ++- .../components/recorder/migration.py | 223 ++++++++++++++---- tests/components/recorder/test_migrate.py | 8 +- ..._migration_run_time_migrations_remember.py | 203 +++++++++------- .../recorder/test_purge_v32_schema.py | 4 +- 5 files changed, 340 insertions(+), 136 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index fb57a1c73e2..fa4162f4183 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -6,7 +6,7 @@ from collections.abc import Callable from datetime import datetime, timedelta import logging import time -from typing import Any, Self, cast +from typing import Any, Final, Self, cast import ciso8601 from fnv_hash_fast import fnv1a_32 @@ -130,7 +130,8 @@ METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" -LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" +LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_updated_ts" +LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36 CONTEXT_ID_BIN_MAX_LENGTH = 16 MYSQL_COLLATE = "utf8mb4_unicode_ci" @@ -350,6 +351,17 @@ class Events(Base): return None +class LegacyEvents(LegacyBase): + """Event history data with event_id, used for schema migration.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENTS + event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True) + context_id: Mapped[str | None] = mapped_column( + String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True + ) + + class EventData(Base): """Event data history.""" @@ -575,6 +587,28 @@ class States(Base): ) +class LegacyStates(LegacyBase): + """State change history with entity_id, used for schema migration.""" + + __table_args__ = ( + Index( + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, + "entity_id", + "last_updated_ts", + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATES + state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + last_updated_ts: Mapped[float | None] = mapped_column( + TIMESTAMP_TYPE, default=time.time, index=True + ) + context_id: Mapped[str | None] = mapped_column( + String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True + ) + + class StateAttributes(Base): """State attribute change history.""" diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 74e3b08f51c..33790ec65b2 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -23,6 +23,7 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) +from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm.session import Session from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint from sqlalchemy.sql.expression import true @@ -59,7 +60,7 @@ from .db_schema import ( BIG_INTEGER_SQL, CONTEXT_ID_BIN_MAX_LENGTH, DOUBLE_PRECISION_TYPE_SQL, - LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX, + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, LEGACY_STATES_EVENT_ID_INDEX, MYSQL_COLLATE, MYSQL_DEFAULT_CHARSET, @@ -169,6 +170,24 @@ _COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] } +def _unindexable_legacy_column( + instance: Recorder, base: type[DeclarativeBase], err: Exception +) -> bool: + """Ignore index errors on char(0) columns.""" + # The error code is hard coded because the PyMySQL library may not be + # installed when using other database engines than MySQL or MariaDB. + # 1167: The used storage engine can't index column '%s' + return bool( + base == LegacyBase + and isinstance(err, OperationalError) + and instance.engine + and instance.engine.dialect.name == SupportedDialect.MYSQL + and isinstance(err.orig, BaseException) + and err.orig.args + and err.orig.args[0] == 1167 + ) + + def raise_if_exception_missing_str(ex: Exception, match_substrs: Iterable[str]) -> None: """Raise if the exception and cause do not contain the match substrs.""" lower_ex_strs = [str(ex).lower(), str(ex.__cause__).lower()] @@ -471,14 +490,19 @@ def migrate_data_live( def _create_index( - session_maker: Callable[[], Session], table_name: str, index_name: str + instance: Recorder, + session_maker: Callable[[], Session], + table_name: str, + index_name: str, + *, + base: type[DeclarativeBase] = Base, ) -> None: """Create an index for the specified table. The index name should match the name given for the index within the table definition described in the models """ - table = Table(table_name, Base.metadata) + table = Table(table_name, base.metadata) _LOGGER.debug("Looking up index %s for table %s", index_name, table_name) # Look up the index object by name from the table is the models index_list = [idx for idx in table.indexes if idx.name == index_name] @@ -498,10 +522,18 @@ def _create_index( connection = session.connection() index.create(connection) except (InternalError, OperationalError, ProgrammingError) as err: + if _unindexable_legacy_column(instance, base, err): + _LOGGER.debug( + "Can't add legacy index %s to column %s, continuing", + index_name, + table_name, + ) + return raise_if_exception_missing_str(err, ["already exists", "duplicate"]) _LOGGER.warning( "Index %s already exists on %s, continuing", index_name, table_name ) + return _LOGGER.warning("Finished adding index `%s` to table `%s`", index_name, table_name) @@ -1040,7 +1072,12 @@ class _SchemaVersion2Migrator(_SchemaVersionMigrator, target_version=2): def _apply_update(self) -> None: """Version specific update method.""" # Create compound start/end index for recorder_runs - _create_index(self.session_maker, "recorder_runs", "ix_recorder_runs_start_end") + _create_index( + self.instance, + self.session_maker, + "recorder_runs", + "ix_recorder_runs_start_end", + ) # This used to create ix_states_last_updated bit it was removed in version 32 @@ -1075,7 +1112,9 @@ class _SchemaVersion5Migrator(_SchemaVersionMigrator, target_version=5): def _apply_update(self) -> None: """Version specific update method.""" # Create supporting index for States.event_id foreign key - _create_index(self.session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + _create_index( + self.instance, self.session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX + ) class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): @@ -1086,7 +1125,9 @@ class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): "events", ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ) - _create_index(self.session_maker, "events", "ix_events_context_id") + _create_index( + self.instance, self.session_maker, "events", "ix_events_context_id" + ) # This used to create ix_events_context_user_id, # but it was removed in version 28 _add_columns( @@ -1094,7 +1135,9 @@ class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): "states", ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ) - _create_index(self.session_maker, "states", "ix_states_context_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id" + ) # This used to create ix_states_context_user_id, # but it was removed in version 28 @@ -1148,7 +1191,9 @@ class _SchemaVersion10Migrator(_SchemaVersionMigrator, target_version=10): class _SchemaVersion11Migrator(_SchemaVersionMigrator, target_version=11): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "states", "ix_states_old_state_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_old_state_id" + ) # _update_states_table_with_foreign_key_options first drops foreign # key constraints, and then re-adds them with the correct settings. @@ -1390,13 +1435,20 @@ class _SchemaVersion25Migrator(_SchemaVersionMigrator, target_version=25): "states", [f"attributes_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "states", "ix_states_attributes_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_attributes_id" + ) class _SchemaVersion26Migrator(_SchemaVersionMigrator, target_version=26): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "statistics_runs", "ix_statistics_runs_start") + _create_index( + self.instance, + self.session_maker, + "statistics_runs", + "ix_statistics_runs_start", + ) class _SchemaVersion27Migrator(_SchemaVersionMigrator, target_version=27): @@ -1405,7 +1457,7 @@ class _SchemaVersion27Migrator(_SchemaVersionMigrator, target_version=27): _add_columns( self.session_maker, "events", [f"data_id {self.column_types.big_int_type}"] ) - _create_index(self.session_maker, "events", "ix_events_data_id") + _create_index(self.instance, self.session_maker, "events", "ix_events_data_id") class _SchemaVersion28Migrator(_SchemaVersionMigrator, target_version=28): @@ -1425,7 +1477,9 @@ class _SchemaVersion28Migrator(_SchemaVersionMigrator, target_version=28): "context_parent_id VARCHAR(36)", ], ) - _create_index(self.session_maker, "states", "ix_states_context_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id" + ) # Once there are no longer any state_changed events # in the events table we can drop the index on states.event_id @@ -1452,7 +1506,10 @@ class _SchemaVersion29Migrator(_SchemaVersionMigrator, target_version=29): ) try: _create_index( - self.session_maker, "statistics_meta", "ix_statistics_meta_statistic_id" + self.instance, + self.session_maker, + "statistics_meta", + "ix_statistics_meta_statistic_id", ) except DatabaseError: # There may be duplicated statistics_meta entries, delete duplicates @@ -1460,7 +1517,10 @@ class _SchemaVersion29Migrator(_SchemaVersionMigrator, target_version=29): with session_scope(session=self.session_maker()) as session: delete_statistics_meta_duplicates(self.instance, session) _create_index( - self.session_maker, "statistics_meta", "ix_statistics_meta_statistic_id" + self.instance, + self.session_maker, + "statistics_meta", + "ix_statistics_meta_statistic_id", ) @@ -1494,14 +1554,24 @@ class _SchemaVersion31Migrator(_SchemaVersionMigrator, target_version=31): f"last_changed_ts {self.column_types.timestamp_type}", ], ) - _create_index(self.session_maker, "events", "ix_events_time_fired_ts") _create_index( - self.session_maker, "events", "ix_events_event_type_time_fired_ts" + self.instance, self.session_maker, "events", "ix_events_time_fired_ts" ) _create_index( - self.session_maker, "states", "ix_states_entity_id_last_updated_ts" + self.instance, + self.session_maker, + "events", + "ix_events_event_type_time_fired_ts", + ) + _create_index( + self.instance, + self.session_maker, + "states", + "ix_states_entity_id_last_updated_ts", + ) + _create_index( + self.instance, self.session_maker, "states", "ix_states_last_updated_ts" ) - _create_index(self.session_maker, "states", "ix_states_last_updated_ts") _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) @@ -1559,16 +1629,23 @@ class _SchemaVersion34Migrator(_SchemaVersionMigrator, target_version=34): f"last_reset_ts {self.column_types.timestamp_type}", ], ) - _create_index(self.session_maker, "statistics", "ix_statistics_start_ts") _create_index( - self.session_maker, "statistics", "ix_statistics_statistic_id_start_ts" + self.instance, self.session_maker, "statistics", "ix_statistics_start_ts" ) _create_index( + self.instance, + self.session_maker, + "statistics", + "ix_statistics_statistic_id_start_ts", + ) + _create_index( + self.instance, self.session_maker, "statistics_short_term", "ix_statistics_short_term_start_ts", ) _create_index( + self.instance, self.session_maker, "statistics_short_term", "ix_statistics_short_term_statistic_id_start_ts", @@ -1618,8 +1695,12 @@ class _SchemaVersion36Migrator(_SchemaVersionMigrator, target_version=36): f"context_parent_id_bin {self.column_types.context_bin_type}", ], ) - _create_index(self.session_maker, "events", "ix_events_context_id_bin") - _create_index(self.session_maker, "states", "ix_states_context_id_bin") + _create_index( + self.instance, self.session_maker, "events", "ix_events_context_id_bin" + ) + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id_bin" + ) class _SchemaVersion37Migrator(_SchemaVersionMigrator, target_version=37): @@ -1630,10 +1711,15 @@ class _SchemaVersion37Migrator(_SchemaVersionMigrator, target_version=37): "events", [f"event_type_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "events", "ix_events_event_type_id") + _create_index( + self.instance, self.session_maker, "events", "ix_events_event_type_id" + ) _drop_index(self.session_maker, "events", "ix_events_event_type_time_fired_ts") _create_index( - self.session_maker, "events", "ix_events_event_type_id_time_fired_ts" + self.instance, + self.session_maker, + "events", + "ix_events_event_type_id_time_fired_ts", ) @@ -1645,9 +1731,14 @@ class _SchemaVersion38Migrator(_SchemaVersionMigrator, target_version=38): "states", [f"metadata_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "states", "ix_states_metadata_id") _create_index( - self.session_maker, "states", "ix_states_metadata_id_last_updated_ts" + self.instance, self.session_maker, "states", "ix_states_metadata_id" + ) + _create_index( + self.instance, + self.session_maker, + "states", + "ix_states_metadata_id_last_updated_ts", ) @@ -1731,8 +1822,15 @@ class _SchemaVersion40Migrator(_SchemaVersionMigrator, target_version=40): class _SchemaVersion41Migrator(_SchemaVersionMigrator, target_version=41): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "event_types", "ix_event_types_event_type") - _create_index(self.session_maker, "states_meta", "ix_states_meta_entity_id") + _create_index( + self.instance, + self.session_maker, + "event_types", + "ix_event_types_event_type", + ) + _create_index( + self.instance, self.session_maker, "states_meta", "ix_states_meta_entity_id" + ) class _SchemaVersion42Migrator(_SchemaVersionMigrator, target_version=42): @@ -2319,7 +2417,7 @@ class DataMigrationStatus: class BaseMigration(ABC): """Base class for migrations.""" - index_to_drop: tuple[str, str] | None = None + index_to_drop: tuple[str, str, type[DeclarativeBase]] | None = None required_schema_version = 0 # Schema version required to run migration queries max_initial_schema_version: int # Skip migration if db created after this version migration_version = 1 @@ -2349,12 +2447,12 @@ class BaseMigration(ABC): """Migrate some data, returns True if migration is completed.""" status = self.migrate_data_impl(instance) if status.migration_done: - if self.index_to_drop is not None: - table, index = self.index_to_drop - _drop_index(instance.get_session, table, index) with session_scope(session=instance.get_session()) as session: self.migration_done(instance, session) _mark_migration_done(session, self.__class__) + if self.index_to_drop is not None: + table, index, _ = self.index_to_drop + _drop_index(instance.get_session, table, index) return not status.needs_migrate @abstractmethod @@ -2393,25 +2491,31 @@ class BaseMigration(ABC): "Data migration '%s' needed, schema too old", self.migration_id ) return True + has_needed_index = self._has_needed_index(session) + if has_needed_index is True: + # The index to be removed by the migration still exists + _LOGGER.info( + "Data migration '%s' needed, index to drop still exists", + self.migration_id, + ) + return True if self.migration_changes.get(self.migration_id, -1) >= self.migration_version: # The migration changes table indicates that the migration has been done _LOGGER.debug( "Data migration '%s' not needed, already completed", self.migration_id ) return False - # We do not know if the migration is done from the - # migration changes table so we must check the index and data - # This is the slow path - if ( - self.index_to_drop is not None - and get_index_by_name(session, self.index_to_drop[0], self.index_to_drop[1]) - is not None - ): + if has_needed_index is False: + # The index to be removed by the migration does not exist, but the migration + # changes table indicates that the migration has not been done _LOGGER.info( - "Data migration '%s' needed, index to drop still exists", + "Data migration '%s' needed, index to drop does not exist", self.migration_id, ) return True + # We do not know if the migration is done from the + # migration changes table or the index so we must check the data + # This is the slow path needs_migrate = self.needs_migrate_impl(instance, session) if needs_migrate.migration_done: _mark_migration_done(session, self.__class__) @@ -2422,6 +2526,13 @@ class BaseMigration(ABC): ) return needs_migrate.needs_migrate + def _has_needed_index(self, session: Session) -> bool | None: + """Check if the index needed by the migration exists.""" + if self.index_to_drop is None: + return None + table_name, index_name, _ = self.index_to_drop + return get_index_by_name(session, table_name, index_name) is not None + class BaseOffLineMigration(BaseMigration): """Base class for off line migrations.""" @@ -2435,6 +2546,7 @@ class BaseOffLineMigration(BaseMigration): _LOGGER.debug("Migration not needed for '%s'", self.migration_id) self.migration_done(instance, session) return + self._ensure_index_exists(instance) _LOGGER.warning( "The database is about to do data migration step '%s', %s", self.migration_id, @@ -2449,6 +2561,25 @@ class BaseOffLineMigration(BaseMigration): """Migrate some data, returns True if migration is completed.""" return self._migrate_data(instance) + def _ensure_index_exists(self, instance: Recorder) -> None: + """Ensure the index needed by the migration exists.""" + if not self.index_to_drop: + return + table_name, index_name, base = self.index_to_drop + with session_scope(session=instance.get_session()) as session: + if get_index_by_name(session, table_name, index_name) is not None: + return + _LOGGER.warning( + ( + "Data migration step '%s' needs index `%s` on table `%s`, but " + "it does not exist and will be added now" + ), + self.migration_id, + index_name, + table_name, + ) + _create_index(instance, instance.get_session, table_name, index_name, base=base) + class BaseRunTimeMigration(BaseMigration): """Base class for run time migrations.""" @@ -2492,7 +2623,7 @@ class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "state_context_id_as_binary" migration_version = 2 - index_to_drop = ("states", "ix_states_context_id") + index_to_drop = ("states", "ix_states_context_id", LegacyBase) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate states context_ids to use binary format, return True if completed.""" @@ -2536,7 +2667,7 @@ class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "event_context_id_as_binary" migration_version = 2 - index_to_drop = ("events", "ix_events_context_id") + index_to_drop = ("events", "ix_events_context_id", LegacyBase) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate events context_ids to use binary format, return True if completed.""" @@ -2814,7 +2945,11 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): migration_id = "entity_id_post_migration" max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 - index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) + index_to_drop = ( + TABLE_STATES, + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, + LegacyBase, + ) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate some data, returns True if migration is completed.""" diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 462db70496a..052e9202715 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -600,7 +600,7 @@ async def test_schema_migrate( start=self.recorder_runs_manager.recording_start, created=dt_util.utcnow() ) - def _sometimes_failing_create_index(*args): + def _sometimes_failing_create_index(*args, **kwargs): """Make the first index create raise a retryable error to ensure we retry.""" if recorder_db_url.startswith("mysql://"): nonlocal create_calls @@ -609,7 +609,7 @@ async def test_schema_migrate( mysql_exception = OperationalError("statement", {}, []) mysql_exception.orig = Exception(1205, "retryable") raise mysql_exception - real_create_index(*args) + real_create_index(*args, **kwargs) with ( patch( @@ -712,7 +712,7 @@ def test_forgiving_add_index(recorder_db_url: str) -> None: instance = Mock() instance.get_session = Mock(return_value=session) migration._create_index( - instance.get_session, "states", "ix_states_context_id_bin" + instance, instance.get_session, "states", "ix_states_context_id_bin" ) engine.dispose() @@ -788,7 +788,7 @@ def test_forgiving_add_index_with_other_db_types( with patch( "homeassistant.components.recorder.migration.Table", return_value=mocked_table ): - migration._create_index(Mock(), "states", "ix_states_context_id") + migration._create_index(Mock(), Mock(), "states", "ix_states_context_id") assert "already exists on states" in caplog.text assert "continuing" in caplog.text diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index fa14570bc6b..677abd6083c 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,6 +1,6 @@ """Test run time migrations are remembered in the migration_changes table.""" -from collections.abc import Callable +from collections.abc import Callable, Generator import importlib import sys from unittest.mock import Mock, patch @@ -8,6 +8,7 @@ from unittest.mock import Mock, patch import pytest from sqlalchemy import create_engine from sqlalchemy.orm import Session +from sqlalchemy.schema import Index from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics @@ -87,138 +88,165 @@ def _create_engine_test( @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @pytest.mark.parametrize( - ("initial_version", "expected_migrator_calls"), + ("initial_version", "expected_migrator_calls", "expected_created_indices"), + # expected_migrator_calls is a dict of + # migrator_id: (needs_migrate_calls, migrate_data_calls) [ ( 27, { - "state_context_id_as_binary": 1, - "event_context_id_as_binary": 1, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 1, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 1), + "event_context_id_as_binary": (0, 1), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (1, 1), + "entity_id_post_migration": (0, 1), }, + [ + "ix_states_context_id", + "ix_events_context_id", + "ix_states_entity_id_last_updated_ts", + ], ), ( 28, { - "state_context_id_as_binary": 1, - "event_context_id_as_binary": 1, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 1), + "event_context_id_as_binary": (0, 1), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + [ + "ix_states_context_id", + "ix_events_context_id", + "ix_states_entity_id_last_updated_ts", + ], ), ( 36, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + ["ix_states_entity_id_last_updated_ts"], ), ( 37, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + ["ix_states_entity_id_last_updated_ts"], ), ( 38, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 0, - "event_id_post_migration": 0, - "entity_id_post_migration": 0, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (0, 0), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 0), }, + [], ), ( SCHEMA_VERSION, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 0, - "event_id_post_migration": 0, - "entity_id_post_migration": 0, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (0, 0), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 0), }, + [], ), ], ) -async def test_data_migrator_new_database( +async def test_data_migrator_logic( async_test_recorder: RecorderInstanceGenerator, initial_version: int, - expected_migrator_calls: dict[str, int], + expected_migrator_calls: dict[str, tuple[int, int]], + expected_created_indices: list[str], ) -> None: - """Test that the data migrators are not executed on a new database.""" + """Test the data migrator logic. + + - The data migrators should not be executed on a new database. + - Indices needed by the migrators should be created if missing. + """ config = {recorder.CONF_COMMIT_INTERVAL: 1} - def needs_migrate_mock() -> Mock: - return Mock( - spec_set=[], - return_value=migration.DataMigrationStatus( - needs_migrate=False, migration_done=True + def migrator_mock() -> dict[str, Mock]: + return { + "needs_migrate": Mock( + spec_set=[], + return_value=migration.DataMigrationStatus( + needs_migrate=True, migration_done=False + ), ), - ) + "migrate_data": Mock(spec_set=[], return_value=True), + } migrator_mocks = { - "state_context_id_as_binary": needs_migrate_mock(), - "event_context_id_as_binary": needs_migrate_mock(), - "event_type_id_migration": needs_migrate_mock(), - "entity_id_migration": needs_migrate_mock(), - "event_id_post_migration": needs_migrate_mock(), - "entity_id_post_migration": needs_migrate_mock(), + "state_context_id_as_binary": migrator_mock(), + "event_context_id_as_binary": migrator_mock(), + "event_type_id_migration": migrator_mock(), + "entity_id_migration": migrator_mock(), + "event_id_post_migration": migrator_mock(), + "entity_id_post_migration": migrator_mock(), } + def patch_check( + migrator_id: str, migrator_class: type[migration.BaseMigration] + ) -> Generator[None]: + return patch.object( + migrator_class, + "needs_migrate_impl", + side_effect=migrator_mocks[migrator_id]["needs_migrate"], + ) + + def patch_migrate( + migrator_id: str, migrator_class: type[migration.BaseMigration] + ) -> Generator[None]: + return patch.object( + migrator_class, + "migrate_data", + side_effect=migrator_mocks[migrator_id]["migrate_data"], + ) + with ( - patch.object( - migration.StatesContextIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["state_context_id_as_binary"], - ), - patch.object( - migration.EventsContextIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_context_id_as_binary"], - ), - patch.object( - migration.EventTypeIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_type_id_migration"], - ), - patch.object( - migration.EntityIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["entity_id_migration"], - ), - patch.object( - migration.EventIDPostMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_id_post_migration"], - ), - patch.object( - migration.EntityIDPostMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["entity_id_post_migration"], - ), + patch_check("state_context_id_as_binary", migration.StatesContextIDMigration), + patch_check("event_context_id_as_binary", migration.EventsContextIDMigration), + patch_check("event_type_id_migration", migration.EventTypeIDMigration), + patch_check("entity_id_migration", migration.EntityIDMigration), + patch_check("event_id_post_migration", migration.EventIDPostMigration), + patch_check("entity_id_post_migration", migration.EntityIDPostMigration), + patch_migrate("state_context_id_as_binary", migration.StatesContextIDMigration), + patch_migrate("event_context_id_as_binary", migration.EventsContextIDMigration), + patch_migrate("event_type_id_migration", migration.EventTypeIDMigration), + patch_migrate("entity_id_migration", migration.EntityIDMigration), + patch_migrate("event_id_post_migration", migration.EventIDPostMigration), + patch_migrate("entity_id_post_migration", migration.EntityIDPostMigration), patch( CREATE_ENGINE_TARGET, new=_create_engine_test( SCHEMA_MODULE_CURRENT, initial_version=initial_version ), ), + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, ): async with ( async_test_home_assistant() as hass, @@ -231,8 +259,15 @@ async def test_data_migrator_new_database( await hass.async_block_till_done() await hass.async_stop() + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == expected_created_indices + + # Check each data migrator's needs_migrate_impl and migrate_data methods were called + # the expected number of times. for migrator, mock in migrator_mocks.items(): - assert len(mock.mock_calls) == expected_migrator_calls[migrator] + needs_migrate_calls, migrate_data_calls = expected_migrator_calls[migrator] + assert len(mock["needs_migrate"].mock_calls) == needs_migrate_calls + assert len(mock["migrate_data"].mock_calls) == migrate_data_calls @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 2bd1e7fd7f7..d68d1550268 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -1027,7 +1027,7 @@ async def test_purge_can_mix_legacy_and_new_format( def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" + recorder_mock, recorder_mock.get_session, "states", "ix_states_event_id" ) recorder_mock.use_legacy_events_index = True @@ -1178,7 +1178,7 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" + recorder_mock, recorder_mock.get_session, "states", "ix_states_event_id" ) recorder_mock.use_legacy_events_index = True From 1bdda0249e775cf24611a2d45e2f161c2b7a372e Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:38:52 +0100 Subject: [PATCH 0850/1198] Bump PyViCare to 2.39.0 (#133519) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 0bb5594e829..72bc3de53d8 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.38.0"] + "requirements": ["PyViCare==2.39.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 79f1411ea42..75d0a88b009 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.38.0 +PyViCare==2.39.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 32e815babdd..2052aa1d560 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.38.0 +PyViCare==2.39.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From e4bb351d2d11d0b8cad56debd48cc3126919c4e4 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:41:22 +0100 Subject: [PATCH 0851/1198] Bump uiprotect to 7.1.0 (#133520) * Bump uiprotect to version 7.1.0 * Add aiports to bootstrap fixture in unifiprotect tests --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/unifiprotect/conftest.py | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 81ef72ec50d..1226f96c253 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==7.0.2", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.1.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 75d0a88b009..49c8bc8534f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2910,7 +2910,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.0.2 +uiprotect==7.1.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2052aa1d560..1b1226cd6d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2329,7 +2329,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.0.2 +uiprotect==7.1.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index 3ed559b71ec..352c33297ba 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -98,6 +98,7 @@ def bootstrap_fixture(nvr: NVR): data["events"] = [] data["doorlocks"] = [] data["chimes"] = [] + data["aiports"] = [] return Bootstrap.from_unifi_dict(**data) From ba3fca53b0ed81e0aa41fb734034df69a7af305e Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 18 Dec 2024 21:49:32 +0100 Subject: [PATCH 0852/1198] Reolink platinum quality scale (#133514) --- homeassistant/components/reolink/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7aced174e30..e5e8afc1d63 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,6 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], + "quality_scale": "platinum", "requirements": ["reolink-aio==0.11.5"] } From 2a9082559a143431faa641c24bd27c73c4019adb Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 22:35:58 +0100 Subject: [PATCH 0853/1198] Fix names and description of two actions (#133528) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The two actions enable_motion_recording and disable_motion_recording use "Enables" and "Disables" in their names. This is inconsistent with the name of the actions, all other actions of this component, and the standard way of naming them, too. In addition the description of the latter misses the "s" which causes additional inconsistency – especially in translations. --- homeassistant/components/amcrest/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/amcrest/strings.json b/homeassistant/components/amcrest/strings.json index 816511bf05e..807c75e1ac8 100644 --- a/homeassistant/components/amcrest/strings.json +++ b/homeassistant/components/amcrest/strings.json @@ -41,7 +41,7 @@ } }, "enable_motion_recording": { - "name": "Enables motion recording", + "name": "Enable motion recording", "description": "Enables recording a clip to camera storage when motion is detected.", "fields": { "entity_id": { @@ -51,8 +51,8 @@ } }, "disable_motion_recording": { - "name": "Disables motion recording", - "description": "Disable recording a clip to camera storage when motion is detected.", + "name": "Disable motion recording", + "description": "Disables recording a clip to camera storage when motion is detected.", "fields": { "entity_id": { "name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]", From 9e6a8638ddf514da45c52c934e2f8320242abf17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 21:38:57 +0000 Subject: [PATCH 0854/1198] Bump idasen-ha to 2.6.3 (#133508) This is a minor bump that adds py.typed --- homeassistant/components/idasen_desk/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 2f53ec20e11..7f44f8bbf44 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "integration_type": "device", "iot_class": "local_push", - "requirements": ["idasen-ha==2.6.2"] + "requirements": ["idasen-ha==2.6.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 49c8bc8534f..d8dc08ca301 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1181,7 +1181,7 @@ ical==8.2.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.3 # homeassistant.components.network ifaddr==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1b1226cd6d6..a988c0836b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1001,7 +1001,7 @@ ical==8.2.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.3 # homeassistant.components.network ifaddr==0.2.0 From 03707e6308628b6010045282d5e8b522bdd932dc Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 22:40:30 +0100 Subject: [PATCH 0855/1198] Improve field descriptions for Download file action (#133413) * Improve field descriptions for Download file action Currently two of the field descriptions for the Download file action don't explain exactly what should be entered but rather explain these like additional actions. The third, the Overwrite file option is misleading as it does not refer to an existing file. This commit fixes both issues by explaining the purpose of all three fields in a slightly more detailed fashion. * Update homeassistant/components/downloader/strings.json Co-authored-by: Josef Zweck * Update homeassistant/components/downloader/strings.json Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- homeassistant/components/downloader/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/downloader/strings.json b/homeassistant/components/downloader/strings.json index 11a2bda8fce..7db7ea459d7 100644 --- a/homeassistant/components/downloader/strings.json +++ b/homeassistant/components/downloader/strings.json @@ -23,15 +23,15 @@ }, "subdir": { "name": "Subdirectory", - "description": "Download into subdirectory." + "description": "Relative download path." }, "filename": { "name": "Filename", - "description": "Determine the filename." + "description": "Custom name for the downloaded file." }, "overwrite": { "name": "Overwrite", - "description": "Whether to overwrite the file or not." + "description": "Overwrite file if it exists." } } } From 9f3c549f8d58eef6740442ed941a9f588b9db0b2 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 18 Dec 2024 23:46:18 +0100 Subject: [PATCH 0856/1198] Add integration setup tests to Peblar Rocksolid EV Chargers (#133532) --- tests/components/peblar/conftest.py | 7 +-- tests/components/peblar/test_init.py | 69 ++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 tests/components/peblar/test_init.py diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index dfe6aabc6bc..583b2cbe7a5 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -38,9 +38,10 @@ def mock_setup_entry() -> Generator[None]: @pytest.fixture def mock_peblar() -> Generator[MagicMock]: """Return a mocked Peblar client.""" - with patch( - "homeassistant.components.peblar.config_flow.Peblar", autospec=True - ) as peblar_mock: + with ( + patch("homeassistant.components.peblar.Peblar", autospec=True) as peblar_mock, + patch("homeassistant.components.peblar.config_flow.Peblar", new=peblar_mock), + ): peblar = peblar_mock.return_value peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py new file mode 100644 index 00000000000..78508501ba8 --- /dev/null +++ b/tests/components/peblar/test_init.py @@ -0,0 +1,69 @@ +"""Integration tests for the Peblar integration.""" + +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar configuration entry loading/unloading.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert len(mock_peblar.login.mock_calls) == 1 + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + "exception", + [PeblarConnectionError, PeblarError], +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + exception: Exception, +) -> None: + """Test the Peblar configuration entry not ready.""" + mock_peblar.login.side_effect = exception + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_peblar.login.mock_calls) == 1 + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_config_entry_authentication_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> None: + """Test authentication error, aborts setup.""" + mock_config_entry.add_to_hass(hass) + + mock_peblar.login.side_effect = PeblarAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR From 0076bd838942e908c2ad922d0b5964882bb20daf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 22:47:24 +0000 Subject: [PATCH 0857/1198] Simplify Idasen Desk entity properties (#133536) --- homeassistant/components/idasen_desk/cover.py | 12 +++++------ .../components/idasen_desk/sensor.py | 21 +++++-------------- 2 files changed, 10 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/idasen_desk/cover.py b/homeassistant/components/idasen_desk/cover.py index 95474ea8750..a8ba0983e99 100644 --- a/homeassistant/components/idasen_desk/cover.py +++ b/homeassistant/components/idasen_desk/cover.py @@ -12,7 +12,7 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -46,7 +46,6 @@ class IdasenDeskCover(IdasenDeskEntity, CoverEntity): def __init__(self, coordinator: IdasenDeskCoordinator) -> None: """Initialize an Idasen Desk cover.""" super().__init__(coordinator.address, coordinator) - self._attr_current_cover_position = self._desk.height_percent @property def is_closed(self) -> bool: @@ -83,8 +82,7 @@ class IdasenDeskCover(IdasenDeskEntity, CoverEntity): "Failed to move to specified position: Bluetooth error" ) from err - @callback - def _handle_coordinator_update(self, *args: Any) -> None: - """Handle data update.""" - self._attr_current_cover_position = self._desk.height_percent - self.async_write_ha_state() + @property + def current_cover_position(self) -> int | None: + """Return the current cover position.""" + return self._desk.height_percent diff --git a/homeassistant/components/idasen_desk/sensor.py b/homeassistant/components/idasen_desk/sensor.py index d4f629b85a8..4613d316a52 100644 --- a/homeassistant/components/idasen_desk/sensor.py +++ b/homeassistant/components/idasen_desk/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +12,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import UnitOfLength -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import IdasenDeskConfigEntry, IdasenDeskCoordinator @@ -68,17 +67,7 @@ class IdasenDeskSensor(IdasenDeskEntity, SensorEntity): super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - async def async_added_to_hass(self) -> None: - """When entity is added to hass.""" - await super().async_added_to_hass() - self._update_native_value() - - @callback - def _handle_coordinator_update(self, *args: Any) -> None: - """Handle data update.""" - self._update_native_value() - super()._handle_coordinator_update() - - def _update_native_value(self) -> None: - """Update the native value attribute.""" - self._attr_native_value = self.entity_description.value_fn(self.coordinator) + @property + def native_value(self) -> float | None: + """Return the value reported by the sensor.""" + return self.entity_description.value_fn(self.coordinator) From 35601480d2a8aeed9f6993ef1d5f523ac3b3823b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 12:48:39 -1000 Subject: [PATCH 0858/1198] Bump aiohttp to 3.11.11 (#133530) --- homeassistant/components/image/__init__.py | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index ea235127894..dbb5962eabf 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -348,7 +348,7 @@ async def async_get_still_stream( # While this results in additional bandwidth usage, # given the low frequency of image updates, it is acceptable. frame.extend(frame) - await response.write(frame) # type: ignore[arg-type] + await response.write(frame) return True event = asyncio.Event() diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index add20ef0870..49a6841d3a1 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.2b2 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 91acea30b52..af79a173bab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.2b2", - "aiohttp==3.11.10", + "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index e4346c3e517..a6fda6760d4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.2b2 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 3fe08a722330e63a5e45951586aa409f70b696ff Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 00:39:14 +0100 Subject: [PATCH 0859/1198] Add zeroconf discovery to Peblar Rocksolid EV chargers (#133529) --- .../components/peblar/config_flow.py | 62 +++++- homeassistant/components/peblar/manifest.json | 3 +- homeassistant/components/peblar/strings.json | 12 +- homeassistant/generated/zeroconf.py | 4 + tests/components/peblar/test_config_flow.py | 208 +++++++++++++++++- 5 files changed, 285 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index 056d4a68be6..a9cfb7d89b9 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -8,6 +8,7 @@ from aiohttp import CookieJar from peblar import Peblar, PeblarAuthenticationError, PeblarConnectionError import voluptuous as vol +from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -25,6 +26,8 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _host: str + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -49,7 +52,9 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - await self.async_set_unique_id(info.product_serial_number) + await self.async_set_unique_id( + info.product_serial_number, raise_on_progress=False + ) self._abort_if_unique_id_configured() return self.async_create_entry(title="Peblar", data=user_input) else: @@ -69,3 +74,58 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery of a Peblar device.""" + if not (sn := discovery_info.properties.get("sn")): + return self.async_abort(reason="no_serial_number") + + await self.async_set_unique_id(sn) + self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) + + self._host = discovery_info.host + self.context.update({"configuration_url": f"http://{discovery_info.host}"}) + return await self.async_step_zeroconf_confirm() + + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by zeroconf.""" + errors = {} + + if user_input is not None: + peblar = Peblar( + host=self._host, + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title="Peblar", + data={ + CONF_HOST: self._host, + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + + return self.async_show_form( + step_id="zeroconf_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 6de605c95dc..1ae2a491ba9 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,5 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["peblar==0.2.1"] + "requirements": ["peblar==0.2.1"], + "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 9bf4803b592..e5fa1e85a6a 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -11,6 +11,15 @@ "host": "The hostname or IP address of your Peblar charger on your home network.", "password": "The same password as you use to log in to the Peblar device' local web interface." } + }, + "zeroconf_confirm": { + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + } } }, "error": { @@ -19,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_serial_number": "The discovered Peblar device did not provide a serial number." } } } diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 2c914c2d240..66c576d8840 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -558,6 +558,10 @@ ZEROCONF = { "manufacturer": "nettigo", }, }, + { + "domain": "peblar", + "name": "pblr-*", + }, { "domain": "powerfox", "name": "powerfox*", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index 0b2fa89e068..4e3ab008047 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -1,12 +1,14 @@ """Configuration flow tests for the Peblar integration.""" +from ipaddress import ip_address from unittest.mock import MagicMock from peblar import PeblarAuthenticationError, PeblarConnectionError import pytest +from homeassistant.components import zeroconf from homeassistant.components.peblar.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,3 +115,207 @@ async def test_user_flow_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_peblar") +async def test_zeroconf_flow(hass: HomeAssistant) -> None: + """Test the zeroconf happy flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0].get("flow_id") == result["flow_id"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "OMGPINEAPPLES"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPINEAPPLES", + } + assert not config_entry.options + + +async def test_zeroconf_flow_abort_no_serial(hass: HomeAssistant) -> None: + """Test the zeroconf aborts when it advertises incompatible data.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={}, + type="mock_type", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_serial_number" + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {"base": "unknown"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show form on a error.""" + mock_peblar.login.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + assert not config_entry.options + + +@pytest.mark.usefixtures("mock_peblar") +async def test_zeroconf_flow_not_discovered_again( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the zeroconf doesn't re-discover an existing device.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow_with_zeroconf_in_progress(hass: HomeAssistant) -> None: + """Test the full happy path user flow from start to finish. + + While zeroconf discovery is already in progress. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 2 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + assert not hass.config_entries.flow.async_progress() From 99698ef95d31be56bbc1c80b77babab194aab5a1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 19:41:53 -1000 Subject: [PATCH 0860/1198] Optimize start time state queries for PostgreSQL (#133228) --- .../components/recorder/history/modern.py | 76 ++++++-- .../components/recorder/statistics.py | 61 ++++-- tests/components/recorder/test_history.py | 124 +++++++++++++ tests/components/recorder/test_statistics.py | 173 +++++++++++++++++- 4 files changed, 400 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 9159bbc6181..279ca9c9eea 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -27,8 +27,13 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..const import LAST_REPORTED_SCHEMA_VERSION -from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States +from ..const import LAST_REPORTED_SCHEMA_VERSION, SupportedDialect +from ..db_schema import ( + SHARED_ATTR_OR_LEGACY_ATTRIBUTES, + StateAttributes, + States, + StatesMeta, +) from ..filters import Filters from ..models import ( LazyState, @@ -145,6 +150,7 @@ def _significant_states_stmt( no_attributes: bool, include_start_time_state: bool, run_start_ts: float | None, + lateral_join_for_start_time: bool, ) -> Select | CompoundSelect: """Query the database for significant state changes.""" include_last_changed = not significant_changes_only @@ -184,6 +190,7 @@ def _significant_states_stmt( metadata_ids, no_attributes, include_last_changed, + lateral_join_for_start_time, ).subquery(), no_attributes, include_last_changed, @@ -254,6 +261,7 @@ def get_significant_states_with_session( start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None + lateral_join_for_start_time = instance.dialect_name == SupportedDialect.POSTGRESQL stmt = lambda_stmt( lambda: _significant_states_stmt( start_time_ts, @@ -265,6 +273,7 @@ def get_significant_states_with_session( no_attributes, include_start_time_state, run_start_ts, + lateral_join_for_start_time, ), track_on=[ bool(single_metadata_id), @@ -556,30 +565,61 @@ def _get_start_time_state_for_entities_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, + lateral_join_for_start_time: bool, ) -> Select: """Baked query to get states for specific entities.""" # We got an include-list of entities, accelerate the query by filtering already # in the inner and the outer query. + if lateral_join_for_start_time: + # PostgreSQL does not support index skip scan/loose index scan + # https://wiki.postgresql.org/wiki/Loose_indexscan + # so we need to do a lateral join to get the max last_updated_ts + # for each metadata_id as a group-by is too slow. + # https://github.com/home-assistant/core/issues/132865 + max_metadata_id = StatesMeta.metadata_id.label("max_metadata_id") + max_last_updated = ( + select(func.max(States.last_updated_ts)) + .where( + (States.metadata_id == max_metadata_id) + & (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + ) + .subquery() + .lateral() + ) + most_recent_states_for_entities_by_date = ( + select(max_metadata_id, max_last_updated.c[0].label("max_last_updated")) + .select_from(StatesMeta) + .join( + max_last_updated, + StatesMeta.metadata_id == max_metadata_id, + ) + .where(StatesMeta.metadata_id.in_(metadata_ids)) + ).subquery() + else: + # Simple group-by for MySQL and SQLite, must use less + # than 1000 metadata_ids in the IN clause for MySQL + # or it will optimize poorly. + most_recent_states_for_entities_by_date = ( + select( + States.metadata_id.label("max_metadata_id"), + func.max(States.last_updated_ts).label("max_last_updated"), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) + .group_by(States.metadata_id) + .subquery() + ) + stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) .join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - ), + most_recent_states_for_entities_by_date, and_( States.metadata_id == most_recent_states_for_entities_by_date.c.max_metadata_id, @@ -621,6 +661,7 @@ def _get_start_time_state_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, + lateral_join_for_start_time: bool, ) -> Select: """Return the states at a specific point in time.""" if single_metadata_id: @@ -641,6 +682,7 @@ def _get_start_time_state_stmt( metadata_ids, no_attributes, include_last_changed, + lateral_join_for_start_time, ) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 3f1d5b981e3..9e47ca43c5b 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,6 +63,7 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, + StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -1669,6 +1670,7 @@ def _augment_result_with_change( drop_sum = "sum" not in _types prev_sums = {} if tmp := _statistics_at_time( + hass, session, {metadata[statistic_id][0] for statistic_id in result}, table, @@ -2032,22 +2034,50 @@ def _generate_statistics_at_time_stmt( metadata_ids: set[int], start_time_ts: float, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + lateral_join_for_start_time: bool, ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" stmt = _generate_select_columns_for_types_stmt(table, types) - stmt += lambda q: q.join( - ( - most_recent_statistic_ids := ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), - ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() + if lateral_join_for_start_time: + # PostgreSQL does not support index skip scan/loose index scan + # https://wiki.postgresql.org/wiki/Loose_indexscan + # so we need to do a lateral join to get the max max_start_ts + # for each metadata_id as a group-by is too slow. + # https://github.com/home-assistant/core/issues/132865 + max_metadata_id = StatisticsMeta.id.label("max_metadata_id") + max_start = ( + select(func.max(table.start_ts)) + .filter(table.metadata_id == max_metadata_id) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .subquery() + .lateral() + ) + most_recent_statistic_ids = ( + select(max_metadata_id, max_start.c[0].label("max_start_ts")) + .select_from(StatisticsMeta) + .join( + max_start, + StatisticsMeta.id == max_metadata_id, ) - ), + .where(StatisticsMeta.id.in_(metadata_ids)) + ).subquery() + else: + # Simple group-by for MySQL and SQLite, must use less + # than 1000 metadata_ids in the IN clause for MySQL + # or it will optimize poorly. + most_recent_statistic_ids = ( + select( + func.max(table.start_ts).label("max_start_ts"), + table.metadata_id.label("max_metadata_id"), + ) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .group_by(table.metadata_id) + .subquery() + ) + stmt += lambda q: q.join( + most_recent_statistic_ids, and_( table.start_ts == most_recent_statistic_ids.c.max_start_ts, table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, @@ -2057,6 +2087,7 @@ def _generate_statistics_at_time_stmt( def _statistics_at_time( + hass: HomeAssistant, session: Session, metadata_ids: set[int], table: type[StatisticsBase], @@ -2065,7 +2096,11 @@ def _statistics_at_time( ) -> Sequence[Row] | None: """Return last known statistics, earlier than start_time, for the metadata_ids.""" start_time_ts = start_time.timestamp() - stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types) + dialect_name = get_instance(hass).dialect_name + lateral_join_for_start_time = dialect_name == SupportedDialect.POSTGRESQL + stmt = _generate_statistics_at_time_stmt( + table, metadata_ids, start_time_ts, types, lateral_join_for_start_time + ) return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt)) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index 28b8275247c..eea4605039b 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -1014,3 +1014,127 @@ async def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty ) -> None: """Test get_last_state_changes returns an empty dict when entities not in the db.""" assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} + + +@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +async def test_get_significant_states_with_session_uses_lateral_with_postgresql( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test get_significant_states_with_session uses the lateral path with PostgreSQL.""" + entity_id = "media_player.test" + hass.states.async_set("any.other", "on") + await async_wait_recording_done(hass) + hass.states.async_set(entity_id, "off") + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, {"any": 1}) + return hass.states.get(entity_id) + + start = dt_util.utcnow().replace(microsecond=0) + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=100) + point3 = start + timedelta(seconds=1, microseconds=200) + end = point + timedelta(seconds=1, microseconds=400) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [set_state("idle")] + + freezer.move_to(point2) + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + start_time = point2 + timedelta(microseconds=10) + hist = history.get_significant_states( + hass=hass, + start_time=start_time, # Pick a point where we will generate a start time state + end_time=end, + entity_ids=[entity_id, "any.other"], + include_start_time_state=True, + ) + assert len(hist[entity_id]) == 2 + + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" in sqlalchemy_logs + + +@pytest.mark.skip_on_db_engine(["postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +async def test_get_significant_states_with_session_uses_non_lateral_without_postgresql( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test get_significant_states_with_session does not use a the lateral path without PostgreSQL.""" + entity_id = "media_player.test" + hass.states.async_set("any.other", "on") + await async_wait_recording_done(hass) + hass.states.async_set(entity_id, "off") + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, {"any": 1}) + return hass.states.get(entity_id) + + start = dt_util.utcnow().replace(microsecond=0) + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=100) + point3 = start + timedelta(seconds=1, microseconds=200) + end = point + timedelta(seconds=1, microseconds=400) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [set_state("idle")] + + freezer.move_to(point2) + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + start_time = point2 + timedelta(microseconds=10) + hist = history.get_significant_states( + hass=hass, + start_time=start_time, # Pick a point where we will generate a start time state + end_time=end, + entity_ids=[entity_id, "any.other"], + include_start_time_state=True, + ) + assert len(hist[entity_id]) == 2 + + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" not in sqlalchemy_logs diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 6b1e1a655db..55029c3eacf 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1914,20 +1914,185 @@ def test_cache_key_for_generate_max_mean_min_statistic_in_sub_period_stmt() -> N assert cache_key_1 != cache_key_3 -def test_cache_key_for_generate_statistics_at_time_stmt() -> None: +@pytest.mark.parametrize("lateral_join_for_start_time", [True, False]) +def test_cache_key_for_generate_statistics_at_time_stmt( + lateral_join_for_start_time: bool, +) -> None: """Test cache key for _generate_statistics_at_time_stmt.""" - stmt = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) + stmt = _generate_statistics_at_time_stmt( + StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time + ) cache_key_1 = stmt._generate_cache_key() - stmt2 = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) + stmt2 = _generate_statistics_at_time_stmt( + StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time + ) cache_key_2 = stmt2._generate_cache_key() assert cache_key_1 == cache_key_2 stmt3 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, {"sum", "mean"} + StatisticsShortTerm, + {0}, + 0.0, + {"sum", "mean"}, + lateral_join_for_start_time, ) cache_key_3 = stmt3._generate_cache_key() assert cache_key_1 != cache_key_3 +@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") +async def test_statistics_at_time_uses_lateral_query_with_postgresql( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics_at_time uses a lateral query with PostgreSQL.""" + await async_wait_recording_done(hass) + assert "Compiling statistics for" not in caplog.text + assert "Statistics already compiled" not in caplog.text + + zero = dt_util.utcnow() + period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) + period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) + period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) + period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) + + external_statistics = ( + { + "start": period1, + "last_reset": None, + "state": 0, + "sum": 2, + }, + { + "start": period2, + "last_reset": None, + "state": 1, + "sum": 3, + }, + { + "start": period3, + "last_reset": None, + "state": 2, + "sum": 5, + }, + { + "start": period4, + "last_reset": None, + "state": 3, + "sum": 8, + }, + ) + external_metadata = { + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.total_energy_import", + "unit_of_measurement": "kWh", + } + + async_import_statistics(hass, external_metadata, external_statistics) + await async_wait_recording_done(hass) + # Get change from far in the past + stats = statistics_during_period( + hass, + zero, + period="hour", + statistic_ids={"sensor.total_energy_import"}, + types={"change", "sum"}, + ) + assert stats + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" in sqlalchemy_logs + + +@pytest.mark.skip_on_db_engine(["postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") +async def test_statistics_at_time_uses_non_lateral_query_without_postgresql( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics_at_time does not use a lateral query without PostgreSQL.""" + await async_wait_recording_done(hass) + assert "Compiling statistics for" not in caplog.text + assert "Statistics already compiled" not in caplog.text + + zero = dt_util.utcnow() + period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) + period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) + period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) + period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) + + external_statistics = ( + { + "start": period1, + "last_reset": None, + "state": 0, + "sum": 2, + }, + { + "start": period2, + "last_reset": None, + "state": 1, + "sum": 3, + }, + { + "start": period3, + "last_reset": None, + "state": 2, + "sum": 5, + }, + { + "start": period4, + "last_reset": None, + "state": 3, + "sum": 8, + }, + ) + external_metadata = { + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.total_energy_import", + "unit_of_measurement": "kWh", + } + + async_import_statistics(hass, external_metadata, external_statistics) + await async_wait_recording_done(hass) + # Get change from far in the past + stats = statistics_during_period( + hass, + zero, + period="hour", + statistic_ids={"sensor.total_energy_import"}, + types={"change", "sum"}, + ) + assert stats + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" not in sqlalchemy_logs + + @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") async def test_change( From 681863f80ede507acec069e92c5f6a4dd9c91d44 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Thu, 19 Dec 2024 08:32:46 +0100 Subject: [PATCH 0861/1198] Use mV and mA as units for electrical power measurement in Matter (#133505) --- homeassistant/components/matter/sensor.py | 12 ++++---- .../matter/snapshots/test_sensor.ambr | 28 +++++++++++++++++-- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index d71cd52a0c6..de4fdfe2685 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -222,10 +222,10 @@ DISCOVERY_SCHEMAS = [ platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( key="PowerSourceBatVoltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, + native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, entity_category=EntityCategory.DIAGNOSTIC, - measurement_to_ha=lambda x: x / 1000, state_class=SensorStateClass.MEASUREMENT, ), entity_class=MatterSensor, @@ -596,10 +596,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementVoltage", device_class=SensorDeviceClass.VOLTAGE, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfElectricPotential.VOLT, + native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, suggested_display_precision=0, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), @@ -610,10 +610,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementActiveCurrent", device_class=SensorDeviceClass.CURRENT, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, suggested_display_precision=2, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=( diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 60a3d33a130..e452ce45f1d 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1357,6 +1357,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1627,6 +1630,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1792,6 +1798,9 @@ 'sensor': dict({ 'suggested_display_precision': 0, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1987,6 +1996,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2012,7 +2024,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3.05', + 'state': '3.050', }) # --- # name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-entry] @@ -2291,6 +2303,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2659,6 +2674,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2886,6 +2904,9 @@ 'sensor': dict({ 'suggested_display_precision': 0, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3034,6 +3055,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3059,7 +3083,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.0', + 'state': '0.000', }) # --- # name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-entry] From ddd2ba6c4af0b6a8529330fc28ae29213eef316b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 19 Dec 2024 08:36:29 +0100 Subject: [PATCH 0862/1198] Set default min/max color temperature in hue lights (#133548) --- homeassistant/components/hue/v1/light.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/hue/v1/light.py b/homeassistant/components/hue/v1/light.py index 78a06784b8d..e9669d226f0 100644 --- a/homeassistant/components/hue/v1/light.py +++ b/homeassistant/components/hue/v1/light.py @@ -17,6 +17,8 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, @@ -447,13 +449,13 @@ class HueLight(CoordinatorEntity, LightEntity): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if self.is_group: - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN min_mireds = self.light.controlcapabilities.get("ct", {}).get("min") # We filter out '0' too, which can be incorrectly reported by 3rd party buls if not min_mireds: - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN return color_util.color_temperature_mired_to_kelvin(min_mireds) @@ -461,14 +463,14 @@ class HueLight(CoordinatorEntity, LightEntity): def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if self.is_group: - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN if self.is_livarno: - return 500 + return 2000 # 500 mireds max_mireds = self.light.controlcapabilities.get("ct", {}).get("max") if not max_mireds: - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN return color_util.color_temperature_mired_to_kelvin(max_mireds) From 893f605d61751f9b9bb1b0c478d9b13abb3dc8dc Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 09:42:22 +0100 Subject: [PATCH 0863/1198] Revert "Update docker base image to 2024.12.1" (#133552) Revert "Update docker base image to 2024.12.1 (#133323)" This reverts commit 66dcd38701283e9e04d7eaa8257ad1d94448f6a6. --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index fafdd876f75..a8755bbbf5c 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From c8480627ca40d0d52d1f4f590fb7b99668d35eb5 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 09:56:32 +0100 Subject: [PATCH 0864/1198] Add comment motivating magic number for MySQL error codes (#133516) * Add comment motivating magic number for MySQL error codes * Pick nits --- homeassistant/components/recorder/migration.py | 2 +- homeassistant/components/recorder/util.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 33790ec65b2..d57db03f90e 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -175,7 +175,7 @@ def _unindexable_legacy_column( ) -> bool: """Ignore index errors on char(0) columns.""" # The error code is hard coded because the PyMySQL library may not be - # installed when using other database engines than MySQL or MariaDB. + # installed when using database engines other than MySQL or MariaDB. # 1167: The used storage engine can't index column '%s' return bool( base == LegacyBase diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 2e7ac0c092d..ba4c5194689 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -107,6 +107,8 @@ MAX_RESTART_TIME = timedelta(minutes=10) # Retry when one of the following MySQL errors occurred: RETRYABLE_MYSQL_ERRORS = (1205, 1206, 1213) +# The error codes are hard coded because the PyMySQL library may not be +# installed when using database engines other than MySQL or MariaDB. # 1205: Lock wait timeout exceeded; try restarting transaction # 1206: The total number of locks exceeds the lock table size # 1213: Deadlock found when trying to get lock; try restarting transaction From a3fb6e8f927ea788932a66141983dd3a357d0617 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 23:01:40 -1000 Subject: [PATCH 0865/1198] Bump pydantic to 2.10.4 (#133539) changelog: https://github.com/pydantic/pydantic/compare/v2.10.3...v2.10.4 --- homeassistant/package_constraints.txt | 2 +- requirements_test.txt | 2 +- script/gen_requirements_all.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 49a6841d3a1..62de8720278 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -126,7 +126,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.3 +pydantic==2.10.4 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/requirements_test.txt b/requirements_test.txt index 98a948cd56e..e8561eba0a5 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -14,7 +14,7 @@ license-expression==30.4.0 mock-open==1.4.0 mypy-dev==1.14.0a7 pre-commit==4.0.0 -pydantic==2.10.3 +pydantic==2.10.4 pylint==3.3.2 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 5cc609eec2a..71229d0b57d 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -159,7 +159,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.3 +pydantic==2.10.4 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 From 69a8d3f3c1d502177ffff7ac496046c2e14b64a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 23:01:58 -1000 Subject: [PATCH 0866/1198] Revert "Optimize start time state queries for PostgreSQL" (#133555) --- .../components/recorder/history/modern.py | 76 ++------ .../components/recorder/statistics.py | 61 ++---- tests/components/recorder/test_history.py | 124 ------------- tests/components/recorder/test_statistics.py | 173 +----------------- 4 files changed, 34 insertions(+), 400 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 279ca9c9eea..9159bbc6181 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -27,13 +27,8 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..const import LAST_REPORTED_SCHEMA_VERSION, SupportedDialect -from ..db_schema import ( - SHARED_ATTR_OR_LEGACY_ATTRIBUTES, - StateAttributes, - States, - StatesMeta, -) +from ..const import LAST_REPORTED_SCHEMA_VERSION +from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States from ..filters import Filters from ..models import ( LazyState, @@ -150,7 +145,6 @@ def _significant_states_stmt( no_attributes: bool, include_start_time_state: bool, run_start_ts: float | None, - lateral_join_for_start_time: bool, ) -> Select | CompoundSelect: """Query the database for significant state changes.""" include_last_changed = not significant_changes_only @@ -190,7 +184,6 @@ def _significant_states_stmt( metadata_ids, no_attributes, include_last_changed, - lateral_join_for_start_time, ).subquery(), no_attributes, include_last_changed, @@ -261,7 +254,6 @@ def get_significant_states_with_session( start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None - lateral_join_for_start_time = instance.dialect_name == SupportedDialect.POSTGRESQL stmt = lambda_stmt( lambda: _significant_states_stmt( start_time_ts, @@ -273,7 +265,6 @@ def get_significant_states_with_session( no_attributes, include_start_time_state, run_start_ts, - lateral_join_for_start_time, ), track_on=[ bool(single_metadata_id), @@ -565,61 +556,30 @@ def _get_start_time_state_for_entities_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, - lateral_join_for_start_time: bool, ) -> Select: """Baked query to get states for specific entities.""" # We got an include-list of entities, accelerate the query by filtering already # in the inner and the outer query. - if lateral_join_for_start_time: - # PostgreSQL does not support index skip scan/loose index scan - # https://wiki.postgresql.org/wiki/Loose_indexscan - # so we need to do a lateral join to get the max last_updated_ts - # for each metadata_id as a group-by is too slow. - # https://github.com/home-assistant/core/issues/132865 - max_metadata_id = StatesMeta.metadata_id.label("max_metadata_id") - max_last_updated = ( - select(func.max(States.last_updated_ts)) - .where( - (States.metadata_id == max_metadata_id) - & (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - ) - .subquery() - .lateral() - ) - most_recent_states_for_entities_by_date = ( - select(max_metadata_id, max_last_updated.c[0].label("max_last_updated")) - .select_from(StatesMeta) - .join( - max_last_updated, - StatesMeta.metadata_id == max_metadata_id, - ) - .where(StatesMeta.metadata_id.in_(metadata_ids)) - ).subquery() - else: - # Simple group-by for MySQL and SQLite, must use less - # than 1000 metadata_ids in the IN clause for MySQL - # or it will optimize poorly. - most_recent_states_for_entities_by_date = ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) .join( - most_recent_states_for_entities_by_date, + ( + most_recent_states_for_entities_by_date := ( + select( + States.metadata_id.label("max_metadata_id"), + func.max(States.last_updated_ts).label("max_last_updated"), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) + .group_by(States.metadata_id) + .subquery() + ) + ), and_( States.metadata_id == most_recent_states_for_entities_by_date.c.max_metadata_id, @@ -661,7 +621,6 @@ def _get_start_time_state_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, - lateral_join_for_start_time: bool, ) -> Select: """Return the states at a specific point in time.""" if single_metadata_id: @@ -682,7 +641,6 @@ def _get_start_time_state_stmt( metadata_ids, no_attributes, include_last_changed, - lateral_join_for_start_time, ) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 9e47ca43c5b..3f1d5b981e3 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,7 +63,6 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, - StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -1670,7 +1669,6 @@ def _augment_result_with_change( drop_sum = "sum" not in _types prev_sums = {} if tmp := _statistics_at_time( - hass, session, {metadata[statistic_id][0] for statistic_id in result}, table, @@ -2034,50 +2032,22 @@ def _generate_statistics_at_time_stmt( metadata_ids: set[int], start_time_ts: float, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], - lateral_join_for_start_time: bool, ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" stmt = _generate_select_columns_for_types_stmt(table, types) - if lateral_join_for_start_time: - # PostgreSQL does not support index skip scan/loose index scan - # https://wiki.postgresql.org/wiki/Loose_indexscan - # so we need to do a lateral join to get the max max_start_ts - # for each metadata_id as a group-by is too slow. - # https://github.com/home-assistant/core/issues/132865 - max_metadata_id = StatisticsMeta.id.label("max_metadata_id") - max_start = ( - select(func.max(table.start_ts)) - .filter(table.metadata_id == max_metadata_id) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .subquery() - .lateral() - ) - most_recent_statistic_ids = ( - select(max_metadata_id, max_start.c[0].label("max_start_ts")) - .select_from(StatisticsMeta) - .join( - max_start, - StatisticsMeta.id == max_metadata_id, - ) - .where(StatisticsMeta.id.in_(metadata_ids)) - ).subquery() - else: - # Simple group-by for MySQL and SQLite, must use less - # than 1000 metadata_ids in the IN clause for MySQL - # or it will optimize poorly. - most_recent_statistic_ids = ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), - ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() - ) stmt += lambda q: q.join( - most_recent_statistic_ids, + ( + most_recent_statistic_ids := ( + select( + func.max(table.start_ts).label("max_start_ts"), + table.metadata_id.label("max_metadata_id"), + ) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .group_by(table.metadata_id) + .subquery() + ) + ), and_( table.start_ts == most_recent_statistic_ids.c.max_start_ts, table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, @@ -2087,7 +2057,6 @@ def _generate_statistics_at_time_stmt( def _statistics_at_time( - hass: HomeAssistant, session: Session, metadata_ids: set[int], table: type[StatisticsBase], @@ -2096,11 +2065,7 @@ def _statistics_at_time( ) -> Sequence[Row] | None: """Return last known statistics, earlier than start_time, for the metadata_ids.""" start_time_ts = start_time.timestamp() - dialect_name = get_instance(hass).dialect_name - lateral_join_for_start_time = dialect_name == SupportedDialect.POSTGRESQL - stmt = _generate_statistics_at_time_stmt( - table, metadata_ids, start_time_ts, types, lateral_join_for_start_time - ) + stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types) return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt)) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index eea4605039b..28b8275247c 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -1014,127 +1014,3 @@ async def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty ) -> None: """Test get_last_state_changes returns an empty dict when entities not in the db.""" assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} - - -@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -async def test_get_significant_states_with_session_uses_lateral_with_postgresql( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test get_significant_states_with_session uses the lateral path with PostgreSQL.""" - entity_id = "media_player.test" - hass.states.async_set("any.other", "on") - await async_wait_recording_done(hass) - hass.states.async_set(entity_id, "off") - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, {"any": 1}) - return hass.states.get(entity_id) - - start = dt_util.utcnow().replace(microsecond=0) - point = start + timedelta(seconds=1) - point2 = start + timedelta(seconds=1, microseconds=100) - point3 = start + timedelta(seconds=1, microseconds=200) - end = point + timedelta(seconds=1, microseconds=400) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - states = [set_state("idle")] - - freezer.move_to(point2) - states.append(set_state("Netflix")) - - freezer.move_to(point3) - states.append(set_state("Plex")) - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - start_time = point2 + timedelta(microseconds=10) - hist = history.get_significant_states( - hass=hass, - start_time=start_time, # Pick a point where we will generate a start time state - end_time=end, - entity_ids=[entity_id, "any.other"], - include_start_time_state=True, - ) - assert len(hist[entity_id]) == 2 - - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" in sqlalchemy_logs - - -@pytest.mark.skip_on_db_engine(["postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -async def test_get_significant_states_with_session_uses_non_lateral_without_postgresql( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test get_significant_states_with_session does not use a the lateral path without PostgreSQL.""" - entity_id = "media_player.test" - hass.states.async_set("any.other", "on") - await async_wait_recording_done(hass) - hass.states.async_set(entity_id, "off") - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, {"any": 1}) - return hass.states.get(entity_id) - - start = dt_util.utcnow().replace(microsecond=0) - point = start + timedelta(seconds=1) - point2 = start + timedelta(seconds=1, microseconds=100) - point3 = start + timedelta(seconds=1, microseconds=200) - end = point + timedelta(seconds=1, microseconds=400) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - states = [set_state("idle")] - - freezer.move_to(point2) - states.append(set_state("Netflix")) - - freezer.move_to(point3) - states.append(set_state("Plex")) - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - start_time = point2 + timedelta(microseconds=10) - hist = history.get_significant_states( - hass=hass, - start_time=start_time, # Pick a point where we will generate a start time state - end_time=end, - entity_ids=[entity_id, "any.other"], - include_start_time_state=True, - ) - assert len(hist[entity_id]) == 2 - - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" not in sqlalchemy_logs diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 55029c3eacf..6b1e1a655db 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1914,185 +1914,20 @@ def test_cache_key_for_generate_max_mean_min_statistic_in_sub_period_stmt() -> N assert cache_key_1 != cache_key_3 -@pytest.mark.parametrize("lateral_join_for_start_time", [True, False]) -def test_cache_key_for_generate_statistics_at_time_stmt( - lateral_join_for_start_time: bool, -) -> None: +def test_cache_key_for_generate_statistics_at_time_stmt() -> None: """Test cache key for _generate_statistics_at_time_stmt.""" - stmt = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time - ) + stmt = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) cache_key_1 = stmt._generate_cache_key() - stmt2 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time - ) + stmt2 = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) cache_key_2 = stmt2._generate_cache_key() assert cache_key_1 == cache_key_2 stmt3 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, - {0}, - 0.0, - {"sum", "mean"}, - lateral_join_for_start_time, + StatisticsShortTerm, {0}, 0.0, {"sum", "mean"} ) cache_key_3 = stmt3._generate_cache_key() assert cache_key_1 != cache_key_3 -@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") -async def test_statistics_at_time_uses_lateral_query_with_postgresql( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test statistics_at_time uses a lateral query with PostgreSQL.""" - await async_wait_recording_done(hass) - assert "Compiling statistics for" not in caplog.text - assert "Statistics already compiled" not in caplog.text - - zero = dt_util.utcnow() - period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) - period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) - period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) - period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) - - external_statistics = ( - { - "start": period1, - "last_reset": None, - "state": 0, - "sum": 2, - }, - { - "start": period2, - "last_reset": None, - "state": 1, - "sum": 3, - }, - { - "start": period3, - "last_reset": None, - "state": 2, - "sum": 5, - }, - { - "start": period4, - "last_reset": None, - "state": 3, - "sum": 8, - }, - ) - external_metadata = { - "has_mean": False, - "has_sum": True, - "name": "Total imported energy", - "source": "recorder", - "statistic_id": "sensor.total_energy_import", - "unit_of_measurement": "kWh", - } - - async_import_statistics(hass, external_metadata, external_statistics) - await async_wait_recording_done(hass) - # Get change from far in the past - stats = statistics_during_period( - hass, - zero, - period="hour", - statistic_ids={"sensor.total_energy_import"}, - types={"change", "sum"}, - ) - assert stats - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" in sqlalchemy_logs - - -@pytest.mark.skip_on_db_engine(["postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") -async def test_statistics_at_time_uses_non_lateral_query_without_postgresql( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test statistics_at_time does not use a lateral query without PostgreSQL.""" - await async_wait_recording_done(hass) - assert "Compiling statistics for" not in caplog.text - assert "Statistics already compiled" not in caplog.text - - zero = dt_util.utcnow() - period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) - period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) - period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) - period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) - - external_statistics = ( - { - "start": period1, - "last_reset": None, - "state": 0, - "sum": 2, - }, - { - "start": period2, - "last_reset": None, - "state": 1, - "sum": 3, - }, - { - "start": period3, - "last_reset": None, - "state": 2, - "sum": 5, - }, - { - "start": period4, - "last_reset": None, - "state": 3, - "sum": 8, - }, - ) - external_metadata = { - "has_mean": False, - "has_sum": True, - "name": "Total imported energy", - "source": "recorder", - "statistic_id": "sensor.total_energy_import", - "unit_of_measurement": "kWh", - } - - async_import_statistics(hass, external_metadata, external_statistics) - await async_wait_recording_done(hass) - # Get change from far in the past - stats = statistics_during_period( - hass, - zero, - period="hour", - statistic_ids={"sensor.total_energy_import"}, - types={"change", "sum"}, - ) - assert stats - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" not in sqlalchemy_logs - - @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") async def test_change( From cd384cadbef19cc23987f5994c30a8ee69d52a15 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Thu, 19 Dec 2024 10:04:26 +0100 Subject: [PATCH 0867/1198] Fulfill IQS rule config-flow in ViCare integration (#133524) * add data_description * Apply suggestions from code review Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- homeassistant/components/vicare/quality_scale.yaml | 4 +--- homeassistant/components/vicare/strings.json | 10 ++++++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml index 436e046204f..959e2e90583 100644 --- a/homeassistant/components/vicare/quality_scale.yaml +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -1,8 +1,6 @@ rules: # Bronze - config-flow: - status: todo - comment: data_description is missing. + config-flow: done test-before-configure: done unique-config-entry: status: todo diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 77e570da779..4934507e41c 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -9,6 +9,12 @@ "password": "[%key:common::config_flow::data::password%]", "client_id": "Client ID", "heating_type": "Heating type" + }, + "data_description": { + "username": "The email address to login to your ViCare account.", + "password": "The password to login to your ViCare account.", + "client_id": "The ID of the API client created in the Viessmann developer portal.", + "heating_type": "Allows to overrule the device auto detection." } }, "reauth_confirm": { @@ -16,6 +22,10 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "client_id": "[%key:component::vicare::config::step::user::data::client_id%]" + }, + "data_description": { + "password": "[%key:component::vicare::config::step::user::data_description::password%]", + "client_id": "[%key:component::vicare::config::step::user::data_description::client_id%]" } } }, From a76f82080bd7ebabb8f502f2c71e6141efa1ac17 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 10:40:07 +0100 Subject: [PATCH 0868/1198] Create repair issues when automatic backup fails (#133513) * Create repair issues when automatic backup fails * Improve test coverage * Adjust issues --- homeassistant/components/backup/manager.py | 43 +++- homeassistant/components/backup/strings.json | 10 + tests/components/backup/test_manager.py | 209 +++++++++++++++++++ 3 files changed, 261 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 99373b1942a..4a0b8553f1c 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -23,7 +23,11 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import instance_id, integration_platform +from homeassistant.helpers import ( + instance_id, + integration_platform, + issue_registry as ir, +) from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util @@ -691,6 +695,8 @@ class BackupManager: CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) ) self.async_on_backup_event(IdleEvent()) + if with_automatic_settings: + self._update_issue_backup_failed() raise async def _async_create_backup( @@ -750,6 +756,8 @@ class BackupManager: self.async_on_backup_event( CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) ) + if with_automatic_settings: + self._update_issue_backup_failed() else: LOGGER.debug( "Generated new backup with backup_id %s, uploading to agents %s", @@ -772,6 +780,7 @@ class BackupManager: # create backup was successful, update last_completed_automatic_backup self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() + self._update_issue_after_agent_upload(agent_errors) self.known_backups.add(written_backup.backup, agent_errors) # delete old backups more numerous than copies @@ -878,6 +887,38 @@ class BackupManager: self._backup_event_subscriptions.append(on_event) return remove_subscription + def _update_issue_backup_failed(self) -> None: + """Update issue registry when a backup fails.""" + ir.async_create_issue( + self.hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + learn_more_url="homeassistant://config/backup", + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_create", + ) + + def _update_issue_after_agent_upload( + self, agent_errors: dict[str, Exception] + ) -> None: + """Update issue registry after a backup is uploaded to agents.""" + if not agent_errors: + ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed") + return + ir.async_create_issue( + self.hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + learn_more_url="homeassistant://config/backup", + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_upload_agents", + translation_placeholders={"failed_agents": ", ".join(agent_errors)}, + ) + class KnownBackups: """Track known backups.""" diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index 6ad3416b1b9..d9de2bff861 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -1,4 +1,14 @@ { + "issues": { + "automatic_backup_failed_create": { + "title": "Automatic backup could not be created", + "description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." + }, + "automatic_backup_failed_upload_agents": { + "title": "Automatic backup could not be uploaded to agents", + "description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." + } + }, "services": { "create": { "name": "Create backup", diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 5795309501d..e976ad0c099 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -34,6 +34,7 @@ from homeassistant.components.backup.manager import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from .common import ( @@ -534,6 +535,214 @@ async def test_async_initiate_backup_with_agent_error( ] +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("create_backup_command", "issues_after_create_backup"), + [ + ( + {"type": "backup/generate", "agent_ids": [LOCAL_AGENT_ID]}, + {(DOMAIN, "automatic_backup_failed")}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + set(), + ), + ], +) +async def test_create_backup_success_clears_issue( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + create_backup_command: dict[str, Any], + issues_after_create_backup: set[tuple[str, str]], +) -> None: + """Test backup issue is cleared after backup is created.""" + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + # Create a backup issue + ir.async_create_issue( + hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_create", + ) + + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": [LOCAL_AGENT_ID]}, + } + ) + result = await ws_client.receive_json() + assert result["success"] is True + + await ws_client.send_json_auto_id(create_backup_command) + result = await ws_client.receive_json() + assert result["success"] is True + + await hass.async_block_till_done() + + issue_registry = ir.async_get(hass) + assert set(issue_registry.issues) == issues_after_create_backup + + +async def delayed_boom(*args, **kwargs) -> None: + """Raise an exception after a delay.""" + + async def delayed_boom() -> None: + await asyncio.sleep(0) + raise Exception("Boom!") # noqa: TRY002 + + return (NewBackup(backup_job_id="abc123"), delayed_boom()) + + +@pytest.mark.parametrize( + ( + "create_backup_command", + "create_backup_side_effect", + "agent_upload_side_effect", + "create_backup_result", + "issues_after_create_backup", + ), + [ + # No error + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + None, + None, + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + None, + None, + True, + {}, + ), + # Error raised in async_initiate_backup + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + Exception("Boom!"), + None, + False, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + Exception("Boom!"), + None, + False, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_create", + "translation_placeholders": None, + } + }, + ), + # Error raised when awaiting the backup task + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + delayed_boom, + None, + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + delayed_boom, + None, + True, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_create", + "translation_placeholders": None, + } + }, + ), + # Error raised in async_upload_backup + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + None, + Exception("Boom!"), + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + None, + Exception("Boom!"), + True, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_upload_agents", + "translation_placeholders": {"failed_agents": "test.remote"}, + } + }, + ), + ], +) +async def test_create_backup_failure_raises_issue( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + create_backup: AsyncMock, + create_backup_command: dict[str, Any], + create_backup_side_effect: Exception | None, + agent_upload_side_effect: Exception | None, + create_backup_result: bool, + issues_after_create_backup: dict[tuple[str, str], dict[str, Any]], +) -> None: + """Test backup issue is cleared after backup is created.""" + remote_agent = BackupAgentTest("remote", backups=[]) + + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + + create_backup.side_effect = create_backup_side_effect + + await ws_client.send_json_auto_id( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.remote"]}, + } + ) + result = await ws_client.receive_json() + assert result["success"] is True + + with patch.object( + remote_agent, "async_upload_backup", side_effect=agent_upload_side_effect + ): + await ws_client.send_json_auto_id(create_backup_command) + result = await ws_client.receive_json() + assert result["success"] == create_backup_result + await hass.async_block_till_done() + + issue_registry = ir.async_get(hass) + assert set(issue_registry.issues) == set(issues_after_create_backup) + for issue_id, issue_data in issues_after_create_backup.items(): + issue = issue_registry.issues[issue_id] + assert issue.translation_key == issue_data["translation_key"] + assert issue.translation_placeholders == issue_data["translation_placeholders"] + + async def test_loading_platforms( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, From 3568bdca655caa831abf5dfc39a2d3742eb26530 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 10:48:43 +0100 Subject: [PATCH 0869/1198] Update Home Assistant base image to 2024.12.0 (#133558) --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index a8755bbbf5c..e6e149cf700 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From 79484ea7f5564928ab498f8b895465751ef82efe Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 19 Dec 2024 10:50:12 +0100 Subject: [PATCH 0870/1198] Grammar fixes for action names and descriptions (#133559) Several KNX actions contain a wrong "s" at the end of their verbs while those are missing in several of the descriptions. This commit changes all those to make them consistent with the remaining actions in KNX and the standard terminology in Home Assistant. --- homeassistant/components/knx/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 08b921f316b..d697fa79e78 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -360,8 +360,8 @@ } }, "read": { - "name": "Reads from KNX bus", - "description": "Send GroupValueRead requests to the KNX bus. Response can be used from `knx_event` and will be processed in KNX entities.", + "name": "Read from KNX bus", + "description": "Sends GroupValueRead requests to the KNX bus. Response can be used from `knx_event` and will be processed in KNX entities.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -370,8 +370,8 @@ } }, "event_register": { - "name": "Registers knx_event", - "description": "Add or remove group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", + "name": "Register knx_event", + "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -389,7 +389,7 @@ }, "exposure_register": { "name": "Expose to KNX bus", - "description": "Adds or remove exposures to KNX bus. Only exposures added with this service can be removed.", + "description": "Adds or removes exposures to KNX bus. Only exposures added with this service can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", From 9a6c749714fdfff24af830da0cbea25634d39efc Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 19 Dec 2024 10:51:30 +0100 Subject: [PATCH 0871/1198] Change 'GSuite' to 'Workspace', fix 'Start' field label (#133554) * Change 'GSuite' to 'Workspace', fix 'Start' field label Several years ago Google renamed "G Suite" to "Google Workspace", this commit applies the same change to one of the field descriptions of the set_vacation action. In addition the "Start" field of the action currently uses the common action (!) for Start which is wrong in this context, it stands for the beginning here. This commit changes this back to a local definition of this label just like "End". In German for example "Start" needs to be "Beginn" in this context while the common action is translated as "Starten". * Use "Google Workspace" for more clarity Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/google_mail/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_mail/strings.json b/homeassistant/components/google_mail/strings.json index 2c6e24109c3..f93a8581e1c 100644 --- a/homeassistant/components/google_mail/strings.json +++ b/homeassistant/components/google_mail/strings.json @@ -68,10 +68,10 @@ }, "restrict_domain": { "name": "Restrict to domain", - "description": "Restrict automatic reply to domain. This only affects GSuite accounts." + "description": "Restrict automatic reply to domain. This only affects Google Workspace accounts." }, "start": { - "name": "[%key:common::action::start%]", + "name": "Start", "description": "First day of the vacation." }, "end": { From 1c119518db79ab73a4338708769920793a9d7265 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 10:52:10 +0100 Subject: [PATCH 0872/1198] Bump codecov/codecov-action from 5.1.1 to 5.1.2 (#133547) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 5.1.1 to 5.1.2. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v5.1.1...v5.1.2) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 71924afecc8..98f4fb04e34 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1273,7 +1273,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v5.1.1 + uses: codecov/codecov-action@v5.1.2 with: fail_ci_if_error: true flags: full-suite @@ -1411,7 +1411,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v5.1.1 + uses: codecov/codecov-action@v5.1.2 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} From d35b34f14226975cbf042cb0f8ed602d28b00b74 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 00:14:32 -1000 Subject: [PATCH 0873/1198] Replace start time state query with single correlated scalar subquery (#133553) --- .../components/recorder/history/modern.py | 55 ++++++++++--------- .../components/recorder/statistics.py | 44 +++++++++------ 2 files changed, 57 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 9159bbc6181..e9af4a673c3 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -28,7 +28,12 @@ from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util from ..const import LAST_REPORTED_SCHEMA_VERSION -from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States +from ..db_schema import ( + SHARED_ATTR_OR_LEGACY_ATTRIBUTES, + StateAttributes, + States, + StatesMeta, +) from ..filters import Filters from ..models import ( LazyState, @@ -558,40 +563,38 @@ def _get_start_time_state_for_entities_stmt( include_last_changed: bool, ) -> Select: """Baked query to get states for specific entities.""" - # We got an include-list of entities, accelerate the query by filtering already - # in the inner and the outer query. + # This query is the result of significant research in + # https://github.com/home-assistant/core/issues/132865 + # A reverse index scan with a limit 1 is the fastest way to get the + # last state change before a specific point in time for all supported + # databases. Since all databases support this query as a join + # condition we can use it as a subquery to get the last state change + # before a specific point in time for all entities. stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) + .select_from(StatesMeta) .join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - ), + States, and_( - States.metadata_id - == most_recent_states_for_entities_by_date.c.max_metadata_id, States.last_updated_ts - == most_recent_states_for_entities_by_date.c.max_last_updated, + == ( + select(States.last_updated_ts) + .where( + (StatesMeta.metadata_id == States.metadata_id) + & (States.last_updated_ts < epoch_time) + & (States.last_updated_ts >= run_start_ts) + ) + .order_by(States.last_updated_ts.desc()) + .limit(1) + ) + .scalar_subquery() + .correlate(StatesMeta), + States.metadata_id == StatesMeta.metadata_id, ), ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) + .where(StatesMeta.metadata_id.in_(metadata_ids)) ) if no_attributes: return stmt diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 3f1d5b981e3..c6783a5cbc2 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,6 +63,7 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, + StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -2034,24 +2035,35 @@ def _generate_statistics_at_time_stmt( types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" + # This query is the result of significant research in + # https://github.com/home-assistant/core/issues/132865 + # A reverse index scan with a limit 1 is the fastest way to get the + # last start_time_ts before a specific point in time for all supported + # databases. Since all databases support this query as a join + # condition we can use it as a subquery to get the last start_time_ts + # before a specific point in time for all entities. stmt = _generate_select_columns_for_types_stmt(table, types) - stmt += lambda q: q.join( - ( - most_recent_statistic_ids := ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), + stmt += ( + lambda q: q.select_from(StatisticsMeta) + .join( + table, + and_( + table.start_ts + == ( + select(table.start_ts) + .where( + (StatisticsMeta.id == table.metadata_id) + & (table.start_ts < start_time_ts) + ) + .order_by(table.start_ts.desc()) + .limit(1) ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() - ) - ), - and_( - table.start_ts == most_recent_statistic_ids.c.max_start_ts, - table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, - ), + .scalar_subquery() + .correlate(StatisticsMeta), + table.metadata_id == StatisticsMeta.id, + ), + ) + .where(table.metadata_id.in_(metadata_ids)) ) return stmt From bb7abd037c5e7e6c077170e3fa881959b78957e0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 11:50:12 +0100 Subject: [PATCH 0874/1198] Revert "Revert "Improve recorder history queries (#131702)"" (#133561) Revert "Revert "Improve recorder history queries (#131702)" (#133203)" This reverts commit 74e4654c26177909e653921f27f838fd1366adc0. --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 + .../components/recorder/history/legacy.py | 18 +++++------ .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 ++ homeassistant/components/recorder/queries.py | 9 ++++++ .../recorder/table_managers/states.py | 32 +++++++++++++++++++ homeassistant/components/recorder/tasks.py | 2 -- tests/components/recorder/test_purge.py | 17 ++++++++++ 11 files changed, 102 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 365be06fd2d..7241e1fac9a 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before CONF_ORDER = "use_include_order" @@ -107,7 +107,10 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index bd477e7e4ed..2010b7373ff 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,7 +6,6 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -26,8 +25,10 @@ def entities_may_have_state_changes_after( return False -def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has any runs after a specific time.""" - return run_time >= process_timestamp( - get_instance(hass).recorder_runs_manager.first.start - ) +def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has states as old or older than run_time. + + Returns True if there may be such states. + """ + oldest_ts = get_instance(hass).states_manager.oldest_ts + return oldest_ts is not None and run_time.timestamp() >= oldest_ts diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index c85d975c3c9..35f8ed5f1ac 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before _LOGGER = logging.getLogger(__name__) @@ -142,7 +142,10 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 9d9b70586a6..61c64be105c 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1431,6 +1431,7 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) + self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index da90b296fe3..dc49ebb9768 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import RecorderRuns, StateAttributes, States +from ..db_schema import StateAttributes, States from ..filters import Filters -from ..models import process_timestamp, process_timestamp_to_utc_isoformat +from ..models import process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start: datetime, + run_start_ts: float, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,7 +447,6 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += lambda q: q.join( ( @@ -483,7 +482,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - run: RecorderRuns | None = None, + *, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -495,17 +494,16 @@ def _get_rows_with_session( ), ) - if run is None: - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + oldest_ts = get_instance(hass).states_manager.oldest_ts - if run is None or process_timestamp(run.start) > utc_point_in_time: - # History did not run before utc_point_in_time + if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): + # We don't have any states for the requested time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - run.start, utc_point_in_time, entity_ids, no_attributes + oldest_ts, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index e9af4a673c3..a8902e184ec 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -39,7 +39,6 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, - process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -251,9 +250,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -269,7 +268,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - run_start_ts, + oldest_ts, ), track_on=[ bool(single_metadata_id), @@ -416,9 +415,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -431,7 +430,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - run_start_ts, + oldest_ts, has_last_reported, ), track_on=[ @@ -603,17 +602,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_run_start_ts_for_utc_point_in_time( +def _get_oldest_possible_ts( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the start time of a run.""" - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if ( - run is not None - and (run_start := process_timestamp(run.start)) < utc_point_in_time - ): - return run_start.timestamp() - # History did not run before utc_point_in_time but we still + """Return the oldest possible timestamp. + + Returns None if there are no states as old as utc_point_in_time. + """ + + oldest_ts = get_instance(hass).states_manager.oldest_ts + if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): + return oldest_ts return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index eb67300e8d4..11f5accc978 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -122,6 +122,9 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) + with session_scope(session=instance.get_session(), read_only=True) as session: + instance.recorder_runs_manager.load_from_db(session) + instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 2e4b588a0b0..8ca7bef2691 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,6 +637,15 @@ def find_states_to_purge( ) +def find_oldest_state() -> StatementLambdaElement: + """Find the last_updated_ts of the oldest state.""" + return lambda_stmt( + lambda: select(States.last_updated_ts).where( + States.state_id.in_(select(func.min(States.state_id))) + ) + ) + + def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index d5cef759c54..fafcfa0ea61 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,7 +2,15 @@ from __future__ import annotations +from collections.abc import Sequence +from typing import Any, cast + +from sqlalchemy.engine.row import Row +from sqlalchemy.orm.session import Session + from ..db_schema import States +from ..queries import find_oldest_state +from ..util import execute_stmt_lambda_element class StatesManager: @@ -13,6 +21,12 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} + self._oldest_ts: float | None = None + + @property + def oldest_ts(self) -> float | None: + """Return the oldest timestamp.""" + return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -44,6 +58,8 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state + if self._oldest_ts is None: + self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -74,6 +90,22 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() + self._oldest_ts = None + + def load_from_db(self, session: Session) -> None: + """Update the cache. + + Must run in the recorder thread. + """ + result = cast( + Sequence[Row[Any]], + execute_stmt_lambda_element(session, find_oldest_state()), + ) + if not result: + ts = None + else: + ts = result[0].last_updated_ts + self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 783f0a80b8e..fa10c12aa68 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,8 +120,6 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): - with instance.get_session() as session: - instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index ea764b14401..c3ff5027b70 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,6 +112,9 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" + assert recorder_mock.states_manager.oldest_ts is None + oldest_ts = recorder_mock.states_manager.oldest_ts + await _add_test_states(hass) # make sure we start with 6 states @@ -127,6 +130,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -140,6 +147,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -162,6 +171,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished + # states_manager.oldest_ts should now be updated + assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -169,6 +180,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -181,6 +196,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From dd215b3d5d165c4ad76ef31947998001b4a54b65 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 12:32:15 +0100 Subject: [PATCH 0875/1198] Revert "Revert "Simplify recorder RecorderRunsManager (#131785)"" (#133564) Revert "Revert "Simplify recorder RecorderRunsManager" (#133201)" This reverts commit 980b8a91e62c449fab558318573fa756818875a6. --- .../recorder/table_managers/recorder_runs.py | 73 +++---------------- .../table_managers/test_recorder_runs.py | 32 ++------ 2 files changed, 15 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index b0b9818118b..4ca0aa18b88 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,8 +2,6 @@ from __future__ import annotations -import bisect -from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -11,34 +9,6 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns -from ..models import process_timestamp - - -def _find_recorder_run_for_start_time( - run_history: _RecorderRunsHistory, start: datetime -) -> RecorderRuns | None: - """Find the recorder run for a start time in _RecorderRunsHistory.""" - run_timestamps = run_history.run_timestamps - runs_by_timestamp = run_history.runs_by_timestamp - - # bisect_left tells us were we would insert - # a value in the list of runs after the start timestamp. - # - # The run before that (idx-1) is when the run started - # - # If idx is 0, history never ran before the start timestamp - # - if idx := bisect.bisect_left(run_timestamps, start.timestamp()): - return runs_by_timestamp[run_timestamps[idx - 1]] - return None - - -@dataclass(frozen=True) -class _RecorderRunsHistory: - """Bisectable history of RecorderRuns.""" - - run_timestamps: list[int] - runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -48,7 +18,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._run_history = _RecorderRunsHistory([], {}) + self._first_run: RecorderRuns | None = None @property def recording_start(self) -> datetime: @@ -58,9 +28,7 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - if runs_by_timestamp := self._run_history.runs_by_timestamp: - return next(iter(runs_by_timestamp.values())) - return self.current + return self._first_run or self.current @property def current(self) -> RecorderRuns: @@ -78,15 +46,6 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None - def get(self, start: datetime) -> RecorderRuns | None: - """Return the recorder run that started before or at start. - - If the first run started after the start, return None - """ - if start >= self.recording_start: - return self.current - return _find_recorder_run_for_start_time(self._run_history, start) - def start(self, session: Session) -> None: """Start a new run. @@ -122,31 +81,17 @@ class RecorderRunsManager: Must run in the recorder thread. """ - run_timestamps: list[int] = [] - runs_by_timestamp: dict[int, RecorderRuns] = {} - - for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): + if ( + run := session.query(RecorderRuns) + .order_by(RecorderRuns.start.asc()) + .first() + ): session.expunge(run) - if run_dt := process_timestamp(run.start): - # Not sure if this is correct or runs_by_timestamp annotation should be changed - timestamp = int(run_dt.timestamp()) - run_timestamps.append(timestamp) - runs_by_timestamp[timestamp] = run - - # - # self._run_history is accessed in get() - # which is allowed to be called from any thread - # - # We use a dataclass to ensure that when we update - # run_timestamps and runs_by_timestamp - # are never out of sync with each other. - # - self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) + self._first_run = run def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - if self._current_run_info: - self._current_run_info = None + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index 41f3a8fef4d..e79def01bad 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,6 +21,11 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) + # Test that the first run falls back to the current run + assert process_timestamp( + instance.recorder_runs_manager.first.start + ) == process_timestamp(instance.recorder_runs_manager.current.start) + with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -29,32 +34,7 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp( - instance.recorder_runs_manager.get( - three_days_ago + timedelta(microseconds=1) - ).start - ) - == three_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - two_days_ago + timedelta(microseconds=1) - ).start - ) - == two_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - one_day_ago + timedelta(microseconds=1) - ).start - ) - == one_day_ago - ) - assert ( - process_timestamp(instance.recorder_runs_manager.get(now).start) - == instance.recorder_runs_manager.recording_start + process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago ) From 962f1bad32ea47ba9454aebd37eb7c4e4f307900 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Thu, 19 Dec 2024 12:40:05 +0100 Subject: [PATCH 0876/1198] Add mW as unit of measurement for Matter electrical power sensors (#133504) --- homeassistant/components/matter/sensor.py | 4 ++-- homeassistant/components/number/const.py | 2 +- homeassistant/components/sensor/const.py | 2 +- homeassistant/const.py | 1 + homeassistant/util/unit_conversion.py | 2 ++ .../matter/snapshots/test_sensor.ambr | 6 ++++++ tests/components/sensor/test_recorder.py | 20 +++++++++++++++---- tests/util/test_unit_conversion.py | 1 + 8 files changed, 30 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index de4fdfe2685..847c9439b81 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -580,10 +580,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementWatt", device_class=SensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfPower.WATT, + native_unit_of_measurement=UnitOfPower.MILLIWATT, + suggested_unit_of_measurement=UnitOfPower.WATT, suggested_display_precision=2, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=( diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 56466934e5f..91a9d6adfe4 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -280,7 +280,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `mW`, `W`, `kW`, `MW`, `GW`, `TW`, `BTU/h` """ PRECIPITATION = "precipitation" diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 2fb563051a9..8c3c3925513 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -308,7 +308,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `mW`, `W`, `kW`, `MW`, `GW`, `TW`, `BTU/h` """ PRECIPITATION = "precipitation" diff --git a/homeassistant/const.py b/homeassistant/const.py index c026a8e5427..eed8d73a4ee 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -589,6 +589,7 @@ class UnitOfApparentPower(StrEnum): class UnitOfPower(StrEnum): """Power units.""" + MILLIWATT = "mW" WATT = "W" KILO_WATT = "kW" MEGA_WATT = "MW" diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 8bf6d4b9fc9..8ea290f01d1 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -340,6 +340,7 @@ class PowerConverter(BaseUnitConverter): UNIT_CLASS = "power" _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfPower.MILLIWATT: 1 * 1000, UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, UnitOfPower.MEGA_WATT: 1 / 1e6, @@ -347,6 +348,7 @@ class PowerConverter(BaseUnitConverter): UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { + UnitOfPower.MILLIWATT, UnitOfPower.WATT, UnitOfPower.KILO_WATT, UnitOfPower.MEGA_WATT, diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index e452ce45f1d..f88604e7d46 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1744,6 +1744,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2850,6 +2853,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 44eaa9fde0d..636fb9871c9 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -4247,8 +4247,14 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + ( + US_CUSTOMARY_SYSTEM, + POWER_SENSOR_ATTRIBUTES, + "W", + "kW", + "GW, MW, TW, W, kW, mW", + ), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW, mW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4459,8 +4465,14 @@ async def test_validate_statistics_unit_ignore_device_class( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + ( + US_CUSTOMARY_SYSTEM, + POWER_SENSOR_ATTRIBUTES, + "W", + "kW", + "GW, MW, TW, W, kW, mW", + ), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW, mW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 4be32b2851e..9c123d93f62 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -537,6 +537,7 @@ _CONVERTED_VALUE: dict[ (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), + (10, UnitOfPower.MILLIWATT, 0.01, UnitOfPower.WATT), ], PressureConverter: [ (1000, UnitOfPressure.HPA, 14.5037743897, UnitOfPressure.PSI), From eb8ee1339cad568253ba408b3b0e3d4c6167b4da Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 19 Dec 2024 06:40:23 -0500 Subject: [PATCH 0877/1198] Set Russound RIO quality scale to silver (#133494) --- homeassistant/components/russound_rio/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 2cd153c232c..ab77ca3ab6a 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], + "quality_scale": "silver", "requirements": ["aiorussound==4.1.0"] } From 94c7d1834620ea48a99fa04920429db51eeca13a Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Thu, 19 Dec 2024 13:36:32 +0100 Subject: [PATCH 0878/1198] Bump pylamarzocco to 1.4.1 (#133557) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 7505843850c..b34df6d6917 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.0"] + "requirements": ["pylamarzocco==1.4.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index d8dc08ca301..90bb9e9b2ab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2043,7 +2043,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.0 +pylamarzocco==1.4.1 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a988c0836b8..7bdedce08c9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1654,7 +1654,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.0 +pylamarzocco==1.4.1 # homeassistant.components.lastfm pylast==5.1.0 From 255f85eb2ff25ce1e1dd168b8963817b4fd6b6f1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 16:04:59 +0100 Subject: [PATCH 0879/1198] Fix boot loop after restoring backup (#133581) --- homeassistant/backup_restore.py | 3 +++ tests/test_backup_restore.py | 9 ++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py index f9250e3129e..57e1c734dfc 100644 --- a/homeassistant/backup_restore.py +++ b/homeassistant/backup_restore.py @@ -64,6 +64,9 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | ) except (FileNotFoundError, KeyError, json.JSONDecodeError): return None + finally: + # Always remove the backup instruction file to prevent a boot loop + instruction_path.unlink(missing_ok=True) def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None: diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index bce5eca4292..10ea64a6a61 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -57,11 +57,14 @@ def test_reading_the_instruction_contents( return_value=content, side_effect=side_effect, ), + mock.patch("pathlib.Path.unlink", autospec=True) as unlink_mock, ): - read_content = backup_restore.restore_backup_file_content( - Path(get_test_config_dir()) - ) + config_path = Path(get_test_config_dir()) + read_content = backup_restore.restore_backup_file_content(config_path) assert read_content == expected + unlink_mock.assert_called_once_with( + config_path / ".HA_RESTORE", missing_ok=True + ) def test_restoring_backup_that_does_not_exist() -> None: From a3ef3cce3e5cff6330705d0d6ba5fe6d7004aa3b Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 19 Dec 2024 07:41:47 -0800 Subject: [PATCH 0880/1198] Improve Google Tasks coordinator updates behavior (#133316) --- .../components/google_tasks/__init__.py | 23 +++++++- .../components/google_tasks/coordinator.py | 11 ++-- homeassistant/components/google_tasks/todo.py | 14 ++--- .../components/google_tasks/types.py | 16 +----- tests/components/google_tasks/conftest.py | 14 ++++- tests/components/google_tasks/test_init.py | 35 +++++++++---- tests/components/google_tasks/test_todo.py | 52 ++++++++++++------- 7 files changed, 107 insertions(+), 58 deletions(-) diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index 2ff22068ca9..45ad1777aa0 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio + from aiohttp import ClientError, ClientResponseError from homeassistant.const import Platform @@ -11,8 +13,9 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .coordinator import TaskUpdateCoordinator from .exceptions import GoogleTasksApiError -from .types import GoogleTasksConfigEntry, GoogleTasksData +from .types import GoogleTasksConfigEntry __all__ = [ "DOMAIN", @@ -46,7 +49,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleTasksConfigEntry) except GoogleTasksApiError as err: raise ConfigEntryNotReady from err - entry.runtime_data = GoogleTasksData(auth, task_lists) + coordinators = [ + TaskUpdateCoordinator( + hass, + auth, + task_list["id"], + task_list["title"], + ) + for task_list in task_lists + ] + # Refresh all coordinators in parallel + await asyncio.gather( + *( + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ) + ) + entry.runtime_data = coordinators await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/coordinator.py b/homeassistant/components/google_tasks/coordinator.py index 5377e2be567..a06faf00a91 100644 --- a/homeassistant/components/google_tasks/coordinator.py +++ b/homeassistant/components/google_tasks/coordinator.py @@ -20,7 +20,11 @@ class TaskUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]): """Coordinator for fetching Google Tasks for a Task List form the API.""" def __init__( - self, hass: HomeAssistant, api: AsyncConfigEntryAuth, task_list_id: str + self, + hass: HomeAssistant, + api: AsyncConfigEntryAuth, + task_list_id: str, + task_list_title: str, ) -> None: """Initialize TaskUpdateCoordinator.""" super().__init__( @@ -30,9 +34,10 @@ class TaskUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]): update_interval=UPDATE_INTERVAL, ) self.api = api - self._task_list_id = task_list_id + self.task_list_id = task_list_id + self.task_list_title = task_list_title async def _async_update_data(self) -> list[dict[str, Any]]: """Fetch tasks from API endpoint.""" async with asyncio.timeout(TIMEOUT): - return await self.api.list_tasks(self._task_list_id) + return await self.api.list_tasks(self.task_list_id) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 9a44b91b529..1df5e5fc2e9 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import UTC, date, datetime, timedelta +from datetime import UTC, date, datetime from typing import Any, cast from homeassistant.components.todo import ( @@ -20,7 +20,6 @@ from .coordinator import TaskUpdateCoordinator from .types import GoogleTasksConfigEntry PARALLEL_UPDATES = 0 -SCAN_INTERVAL = timedelta(minutes=15) TODO_STATUS_MAP = { "needsAction": TodoItemStatus.NEEDS_ACTION, @@ -76,14 +75,13 @@ async def async_setup_entry( async_add_entities( ( GoogleTaskTodoListEntity( - TaskUpdateCoordinator(hass, entry.runtime_data.api, task_list["id"]), - task_list["title"], + coordinator, + coordinator.task_list_title, entry.entry_id, - task_list["id"], + coordinator.task_list_id, ) - for task_list in entry.runtime_data.task_lists + for coordinator in entry.runtime_data ), - True, ) @@ -118,8 +116,6 @@ class GoogleTaskTodoListEntity( @property def todo_items(self) -> list[TodoItem] | None: """Get the current set of To-do items.""" - if self.coordinator.data is None: - return None return [_convert_api_item(item) for item in _order_tasks(self.coordinator.data)] async def async_create_todo_item(self, item: TodoItem) -> None: diff --git a/homeassistant/components/google_tasks/types.py b/homeassistant/components/google_tasks/types.py index eaaec23ddf5..21500d11eb8 100644 --- a/homeassistant/components/google_tasks/types.py +++ b/homeassistant/components/google_tasks/types.py @@ -1,19 +1,7 @@ """Types for the Google Tasks integration.""" -from dataclasses import dataclass -from typing import Any - from homeassistant.config_entries import ConfigEntry -from .api import AsyncConfigEntryAuth +from .coordinator import TaskUpdateCoordinator - -@dataclass -class GoogleTasksData: - """Class to hold Google Tasks data.""" - - api: AsyncConfigEntryAuth - task_lists: list[dict[str, Any]] - - -type GoogleTasksConfigEntry = ConfigEntry[GoogleTasksData] +type GoogleTasksConfigEntry = ConfigEntry[list[TaskUpdateCoordinator]] diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index e519cac9bdc..8f966800147 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -34,6 +34,18 @@ LIST_TASK_LIST_RESPONSE = { "items": [TASK_LIST], } +LIST_TASKS_RESPONSE_WATER = { + "items": [ + { + "id": "some-task-id", + "title": "Water", + "status": "needsAction", + "description": "Any size is ok", + "position": "00000000000000000001", + }, + ], +} + @pytest.fixture def platforms() -> list[Platform]: @@ -44,7 +56,7 @@ def platforms() -> list[Platform]: @pytest.fixture(name="expires_at") def mock_expires_at() -> int: """Fixture to set the oauth token expiration time.""" - return time.time() + 3600 + return time.time() + 86400 @pytest.fixture(name="token_entry") diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 9ad8c887a66..e93e0d9c643 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -3,6 +3,7 @@ from collections.abc import Awaitable, Callable import http from http import HTTPStatus +import json import time from unittest.mock import Mock @@ -15,13 +16,15 @@ from homeassistant.components.google_tasks.const import OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .conftest import LIST_TASK_LIST_RESPONSE +from .conftest import LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) +@pytest.mark.parametrize( + "api_responses", [[LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER]] +) async def test_setup( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -42,8 +45,10 @@ async def test_setup( assert not hass.services.async_services().get(DOMAIN) -@pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) -@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) +@pytest.mark.parametrize("expires_at", [time.time() - 86400], ids=["expired"]) +@pytest.mark.parametrize( + "api_responses", [[LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER]] +) async def test_expired_token_refresh_success( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -60,8 +65,8 @@ async def test_expired_token_refresh_success( json={ "access_token": "updated-access-token", "refresh_token": "updated-refresh-token", - "expires_at": time.time() + 3600, - "expires_in": 3600, + "expires_at": time.time() + 86400, + "expires_in": 86400, }, ) @@ -69,26 +74,26 @@ async def test_expired_token_refresh_success( assert config_entry.state is ConfigEntryState.LOADED assert config_entry.data["token"]["access_token"] == "updated-access-token" - assert config_entry.data["token"]["expires_in"] == 3600 + assert config_entry.data["token"]["expires_in"] == 86400 @pytest.mark.parametrize( ("expires_at", "status", "exc", "expected_state"), [ ( - time.time() - 3600, + time.time() - 86400, http.HTTPStatus.UNAUTHORIZED, None, ConfigEntryState.SETUP_ERROR, ), ( - time.time() - 3600, + time.time() - 86400, http.HTTPStatus.INTERNAL_SERVER_ERROR, None, ConfigEntryState.SETUP_RETRY, ), ( - time.time() - 3600, + time.time() - 86400, None, ClientError("error"), ConfigEntryState.SETUP_RETRY, @@ -124,6 +129,16 @@ async def test_expired_token_refresh_failure( "response_handler", [ ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), + # First request succeeds, second request fails + ( + [ + ( + Response({"status": HTTPStatus.OK}), + json.dumps(LIST_TASK_LIST_RESPONSE), + ), + (Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b""), + ] + ), ], ) async def test_setup_error( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index c713b9fd44f..f28f1bb917e 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -6,10 +6,12 @@ import json from typing import Any from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.google_tasks.coordinator import UPDATE_INTERVAL from homeassistant.components.todo import ( ATTR_DESCRIPTION, ATTR_DUE_DATE, @@ -19,12 +21,17 @@ from homeassistant.components.todo import ( DOMAIN as TODO_DOMAIN, TodoServices, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .conftest import LIST_TASK_LIST_RESPONSE, create_response_object +from .conftest import ( + LIST_TASK_LIST_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + create_response_object, +) +from tests.common import async_fire_time_changed from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" @@ -44,17 +51,6 @@ ERROR_RESPONSE = { CONTENT_ID = "Content-ID" BOUNDARY = "batch_00972cc8-75bd-11ee-9692-0242ac110002" # Arbitrary uuid -LIST_TASKS_RESPONSE_WATER = { - "items": [ - { - "id": "some-task-id", - "title": "Water", - "status": "needsAction", - "description": "Any size is ok", - "position": "00000000000000000001", - }, - ], -} LIST_TASKS_RESPONSE_MULTIPLE = { "items": [ { @@ -311,7 +307,9 @@ async def test_empty_todo_list( [ [ LIST_TASK_LIST_RESPONSE, - ERROR_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + ERROR_RESPONSE, # Fail after one update interval + LIST_TASKS_RESPONSE_WATER, ] ], ) @@ -319,18 +317,34 @@ async def test_task_items_error_response( hass: HomeAssistant, setup_credentials: None, integration_setup: Callable[[], Awaitable[bool]], - hass_ws_client: WebSocketGenerator, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + freezer: FrozenDateTimeFactory, ) -> None: - """Test an error while getting todo list items.""" + """Test an error while the entity updates getting a new list of todo list items.""" assert await integration_setup() - await hass_ws_client(hass) + # Test successful setup and first data fetch + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" + + # Next update fails + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("todo.my_tasks") assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE + + # Next update succeeds + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" @pytest.mark.parametrize( From 95b3d27b6073e1cac9015185da873fbf9c28e471 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 18:23:40 +0100 Subject: [PATCH 0881/1198] Update Airgradient quality scale (#133569) --- .../components/airgradient/quality_scale.yaml | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml index 8d62e8515fc..71132fdb47a 100644 --- a/homeassistant/components/airgradient/quality_scale.yaml +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -7,7 +7,9 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: done + config-flow-test-coverage: + status: todo + comment: Missing zeroconf duplicate entry test. config-flow: done dependency-transparency: done docs-actions: @@ -31,7 +33,9 @@ rules: # Silver action-exceptions: todo config-entry-unloading: done - docs-configuration-parameters: todo + docs-configuration-parameters: + status: exempt + comment: No options to configure docs-installation-parameters: todo entity-unavailable: done integration-owner: done @@ -41,12 +45,16 @@ rules: status: exempt comment: | This integration does not require authentication. - test-coverage: done + test-coverage: todo # Gold devices: done diagnostics: done - discovery-update-info: done - discovery: done + discovery-update-info: + status: todo + comment: DHCP is still possible + discovery: + status: todo + comment: DHCP is still possible docs-data-update: todo docs-examples: todo docs-known-limitations: todo From 1a068d99d62a5da299aeb73cc027cbd446872359 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Thu, 19 Dec 2024 17:28:50 +0000 Subject: [PATCH 0882/1198] Add data descriptions to Mealie integration (#133590) --- homeassistant/components/mealie/strings.json | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 830d43d8f93..de91c507950 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -1,4 +1,9 @@ { + "common": { + "data_description_host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234", + "data_description_api_token": "The API token of your Mealie instance from your user profile within Mealie.", + "data_description_verify_ssl": "Should SSL certificates be verified? This should be off for self-signed certificates." + }, "config": { "step": { "user": { @@ -8,13 +13,18 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234" + "host": "[%key:component::mealie::common::data_description_host%]", + "api_token": "[%key:component::mealie::common::data_description_api_token%]", + "verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]" } }, "reauth_confirm": { "description": "Please reauthenticate with Mealie.", "data": { "api_token": "[%key:common::config_flow::data::api_token%]" + }, + "data_description": { + "api_token": "[%key:component::mealie::common::data_description_api_token%]" } }, "reconfigure": { @@ -23,6 +33,11 @@ "host": "[%key:common::config_flow::data::url%]", "api_token": "[%key:common::config_flow::data::api_token%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "[%key:component::mealie::common::data_description_host%]", + "api_token": "[%key:component::mealie::common::data_description_api_token%]", + "verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]" } } }, From e357e0a406c648f957f845642e39a37ebcc68135 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 19 Dec 2024 18:40:04 +0100 Subject: [PATCH 0883/1198] Set default min/max color temperature in template lights (#133549) --- homeassistant/components/template/light.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 0654a42406a..9391e368e2b 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -16,6 +16,8 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, @@ -278,7 +280,7 @@ class LightTemplate(TemplateEntity, LightEntity): if self._max_mireds is not None: return color_util.color_temperature_mired_to_kelvin(self._max_mireds) - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN @property def max_color_temp_kelvin(self) -> int: @@ -286,7 +288,7 @@ class LightTemplate(TemplateEntity, LightEntity): if self._min_mireds is not None: return color_util.color_temperature_mired_to_kelvin(self._min_mireds) - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN @property def hs_color(self) -> tuple[float, float] | None: From a97434976e44b952f50b38d937936bb9d13f97cb Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Thu, 19 Dec 2024 19:00:18 +0100 Subject: [PATCH 0884/1198] Handle null value for elapsed time in Music Assistant (#133597) --- homeassistant/components/music_assistant/media_player.py | 8 ++------ tests/components/music_assistant/fixtures/players.json | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 7d09bd5b888..7004f09aad5 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -565,17 +565,13 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): # shuffle and repeat are not (yet) supported for external sources self._attr_shuffle = None self._attr_repeat = None - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._attr_media_position = int(player.elapsed_time) + self._attr_media_position = int(player.elapsed_time or 0) self._attr_media_position_updated_at = ( utc_from_timestamp(player.elapsed_time_last_updated) if player.elapsed_time_last_updated else None ) - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._prev_time = player.elapsed_time + self._prev_time = player.elapsed_time or 0 return if queue is None: diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index 2d8b88d0e8e..8a08a55dc45 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -20,7 +20,7 @@ "power", "enqueue" ], - "elapsed_time": 0, + "elapsed_time": null, "elapsed_time_last_updated": 0, "state": "idle", "volume_level": 20, From 2f77cda822d99cfdf261d46d0cfc5ed0a1c543cf Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Thu, 19 Dec 2024 19:18:21 +0100 Subject: [PATCH 0885/1198] Add basic UniFi Protect AiPort support (#133523) * UnifiProtect add basic support for AiPort devices * Sort ignore-words --------- Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 2 +- homeassistant/components/unifiprotect/const.py | 1 + homeassistant/components/unifiprotect/entity.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ecae762dcd..a4568552780 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: hooks: - id: codespell args: - - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn + - --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn - --skip="./.*,*.csv,*.json,*.ambr" - --quiet-level=2 exclude_types: [csv, json, html] diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index 7d1e5b55d3f..d607f87b76a 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -41,6 +41,7 @@ DEFAULT_VERIFY_SSL = False DEFAULT_MAX_MEDIA = 1000 DEVICES_THAT_ADOPT = { + ModelType.AIPORT, ModelType.CAMERA, ModelType.LIGHT, ModelType.VIEWPORT, diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index 1d68b18f1de..335bc1e933d 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -119,6 +119,7 @@ def _async_device_entities( _ALL_MODEL_TYPES = ( + ModelType.AIPORT, ModelType.CAMERA, ModelType.LIGHT, ModelType.SENSOR, From 52683c5f75af9eab7eb5a7b35af08c6c5d0fa7e2 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 19:58:33 +0100 Subject: [PATCH 0886/1198] Improve Airgradient config flow tests (#133594) --- .../components/airgradient/quality_scale.yaml | 4 +--- tests/components/airgradient/test_config_flow.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml index 71132fdb47a..43816401cdb 100644 --- a/homeassistant/components/airgradient/quality_scale.yaml +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -7,9 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: Missing zeroconf duplicate entry test. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 73dbd17a213..8927947c40e 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -255,6 +255,20 @@ async def test_zeroconf_flow_abort_old_firmware(hass: HomeAssistant) -> None: assert result["reason"] == "invalid_version" +async def test_zeroconf_flow_abort_duplicate( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test zeroconf flow aborts with duplicate.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_user_flow_works_discovery( hass: HomeAssistant, mock_new_airgradient_client: AsyncMock, From 04bcc8d3d3af8679410b4c7b9f69edac825a5d11 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 09:13:51 -1000 Subject: [PATCH 0887/1198] Bump yalexs-ble to 2.5.6 (#133593) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index d0b41411c96..652f1a7b966 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 7b7edfac77b..f1cde31d066 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index b2c331397b3..15b11719fdb 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.5"] + "requirements": ["yalexs-ble==2.5.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 90bb9e9b2ab..1f40c8d1612 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3060,7 +3060,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.5 +yalexs-ble==2.5.6 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7bdedce08c9..c82b937f1b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2455,7 +2455,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.5 +yalexs-ble==2.5.6 # homeassistant.components.august # homeassistant.components.yale From e6ef3fe5070816664969257233d178b4ad1b457e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Thu, 19 Dec 2024 19:24:10 +0000 Subject: [PATCH 0888/1198] Update Idasen Desk user flow step strings (#133605) --- homeassistant/components/idasen_desk/quality_scale.yaml | 5 +---- homeassistant/components/idasen_desk/strings.json | 5 ++++- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 1908178ec15..4af2f489bd3 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -15,10 +15,7 @@ rules: comment: | - use mock_desk_api - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. - config-flow: - status: todo - comment: | - Missing data description for user step. + config-flow: done dependency-transparency: done docs-actions: status: exempt diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 70e08976925..7486973638b 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -4,7 +4,10 @@ "step": { "user": { "data": { - "address": "Bluetooth address" + "address": "Device" + }, + "data_description": { + "address": "The bluetooth device for the desk." } } }, From 2413fc4c0d39b59d47ae6d593d8e928d1e23abb1 Mon Sep 17 00:00:00 2001 From: adam-the-hero <132444842+adam-the-hero@users.noreply.github.com> Date: Thu, 19 Dec 2024 20:25:24 +0100 Subject: [PATCH 0889/1198] Fix Watergate Water meter volume sensor (#133606) --- homeassistant/components/watergate/sensor.py | 2 +- tests/components/watergate/snapshots/test_sensor.ambr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/watergate/sensor.py b/homeassistant/components/watergate/sensor.py index 82ac7cfea92..638bf297415 100644 --- a/homeassistant/components/watergate/sensor.py +++ b/homeassistant/components/watergate/sensor.py @@ -56,7 +56,7 @@ class WatergateSensorEntityDescription(SensorEntityDescription): DESCRIPTIONS: list[WatergateSensorEntityDescription] = [ WatergateSensorEntityDescription( value_fn=lambda data: ( - data.state.water_meter.duration + data.state.water_meter.volume if data.state and data.state.water_meter else None ), diff --git a/tests/components/watergate/snapshots/test_sensor.ambr b/tests/components/watergate/snapshots/test_sensor.ambr index a8969798105..479a879a583 100644 --- a/tests/components/watergate/snapshots/test_sensor.ambr +++ b/tests/components/watergate/snapshots/test_sensor.ambr @@ -352,7 +352,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '1.2', }) # --- # name: test_sensor[sensor.sonic_water_pressure-entry] From 61e5f10d12d184fd350ab99c4d6698654faa0069 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 20:27:08 +0100 Subject: [PATCH 0890/1198] Fix Twinkly raise on progress (#133601) --- .../components/twinkly/config_flow.py | 4 +- tests/components/twinkly/test_config_flow.py | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 68c455dc619..837bd9ccb6a 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -45,7 +45,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): except (TimeoutError, ClientError): errors[CONF_HOST] = "cannot_connect" else: - await self.async_set_unique_id(device_info[DEV_ID]) + await self.async_set_unique_id( + device_info[DEV_ID], raise_on_progress=False + ) self._abort_if_unique_id_configured() return self._create_entry_from_device(device_info, host) diff --git a/tests/components/twinkly/test_config_flow.py b/tests/components/twinkly/test_config_flow.py index 9b9aeafd082..8d8e955291e 100644 --- a/tests/components/twinkly/test_config_flow.py +++ b/tests/components/twinkly/test_config_flow.py @@ -5,6 +5,7 @@ from unittest.mock import patch from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -157,3 +158,39 @@ async def test_dhcp_already_exists(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_user_flow_works_discovery(hass: HomeAssistant) -> None: + """Test user flow can continue after discovery happened.""" + client = ClientMock() + with ( + patch( + "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client + ), + patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), + ): + await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="aabbccddeeff", + ), + ) + result = await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(TWINKLY_DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(TWINKLY_DOMAIN) From b261c7f18ab7fad9ab7deb49e33440f2906305c5 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Thu, 19 Dec 2024 20:29:12 +0100 Subject: [PATCH 0891/1198] Mark `docs-installation-parameters` for SABnzbd as done (#133609) --- homeassistant/components/sabnzbd/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index ef4e72b4936..a1d6fc076b2 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -35,7 +35,7 @@ rules: status: exempt comment: | The integration does not provide any additional options. - docs-installation-parameters: todo + docs-installation-parameters: done entity-unavailable: done integration-owner: done log-when-unavailable: done From 551a584ca69771804b6f094eceb67dcb25a2f627 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 10:39:39 -1000 Subject: [PATCH 0892/1198] Handle mqtt.WebsocketConnectionError when connecting to the MQTT broker (#133610) fixes #132985 --- homeassistant/components/mqtt/client.py | 2 +- tests/components/mqtt/test_client.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 0091d2370a4..73c6b80cb14 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -661,7 +661,7 @@ class MQTT: self.conf.get(CONF_PORT, DEFAULT_PORT), self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), ) - except OSError as err: + except (OSError, mqtt.WebsocketConnectionError) as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) self._async_connection_result(False) finally: diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 4bfcde752ae..1878045a9b9 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1403,8 +1403,15 @@ async def test_handle_mqtt_timeout_on_callback( assert not mock_debouncer.is_set() +@pytest.mark.parametrize( + "exception", + [ + OSError("Connection error"), + paho_mqtt.WebsocketConnectionError("Connection error"), + ], +) async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, exception: Exception ) -> None: """Test for setup failure if connection to broker is missing.""" entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) @@ -1413,7 +1420,7 @@ async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( with patch( "homeassistant.components.mqtt.async_client.AsyncMQTTClient" ) as mock_client: - mock_client().connect = MagicMock(side_effect=OSError("Connection error")) + mock_client().connect = MagicMock(side_effect=exception) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert "Failed to connect to MQTT server due to exception:" in caplog.text From 64aba0c1a372a2c13f68f2edd3170fec93a1cf5d Mon Sep 17 00:00:00 2001 From: Quentame Date: Fri, 20 Dec 2024 00:48:03 +0100 Subject: [PATCH 0893/1198] Bump Freebox to 1.2.1 (#133455) --- homeassistant/components/freebox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/freebox/manifest.json b/homeassistant/components/freebox/manifest.json index ad7da1703b8..46422cee105 100644 --- a/homeassistant/components/freebox/manifest.json +++ b/homeassistant/components/freebox/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/freebox", "iot_class": "local_polling", "loggers": ["freebox_api"], - "requirements": ["freebox-api==1.1.0"], + "requirements": ["freebox-api==1.2.1"], "zeroconf": ["_fbx-api._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 1f40c8d1612..79cf3658b9f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -940,7 +940,7 @@ forecast-solar==4.0.0 fortiosapi==1.0.5 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.free_mobile freesms==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c82b937f1b0..589ed932ebd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -796,7 +796,7 @@ foobot_async==1.0.0 forecast-solar==4.0.0 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor From afae257a129b2b2d50b0448b9923a436717f47d6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 20 Dec 2024 01:14:48 +0100 Subject: [PATCH 0894/1198] Bump PyViCare to 2.39.1 (#133619) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 72bc3de53d8..98ff6ce4c82 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.39.0"] + "requirements": ["PyViCare==2.39.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 79cf3658b9f..a4f61fde797 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.0 +PyViCare==2.39.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 589ed932ebd..d374203a614 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.0 +PyViCare==2.39.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From 3d20c5c5d613bf27e1898ce0d9a6a450ebb54199 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 21:24:47 -1000 Subject: [PATCH 0895/1198] Remove lower bound for history start time state query (#133607) Remove lower bound for start time state query With the new query in #133553 we do not need a lower bound on the search since it will always use index now and we always want the newest value in the index before the provided timestamp. The lower bound is redudant at this point as it will always be older than the oldest time point for the state. It only made sense when the query would have had to examine a time window of states instead of doing an index only search. --- .../components/recorder/history/modern.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index a8902e184ec..2d8f4da5f38 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -182,7 +182,6 @@ def _significant_states_stmt( unioned_subquery = union_all( _select_from_subquery( _get_start_time_state_stmt( - run_start_ts, start_time_ts, single_metadata_id, metadata_ids, @@ -352,11 +351,12 @@ def _state_changed_during_period_stmt( ) if limit: stmt = stmt.limit(limit) - stmt = stmt.order_by( - States.metadata_id, - States.last_updated_ts, - ) + stmt = stmt.order_by(States.metadata_id, States.last_updated_ts) if not include_start_time_state or not run_start_ts: + # If we do not need the start time state or the + # oldest possible timestamp is newer than the start time + # we can return the statement as is as there will + # never be a start time state. return stmt return _select_from_subquery( union_all( @@ -555,7 +555,6 @@ def get_last_state_changes( def _get_start_time_state_for_entities_stmt( - run_start_ts: float, epoch_time: float, metadata_ids: list[int], no_attributes: bool, @@ -583,7 +582,6 @@ def _get_start_time_state_for_entities_stmt( .where( (StatesMeta.metadata_id == States.metadata_id) & (States.last_updated_ts < epoch_time) - & (States.last_updated_ts >= run_start_ts) ) .order_by(States.last_updated_ts.desc()) .limit(1) @@ -617,7 +615,6 @@ def _get_oldest_possible_ts( def _get_start_time_state_stmt( - run_start_ts: float, epoch_time: float, single_metadata_id: int | None, metadata_ids: list[int], @@ -638,7 +635,6 @@ def _get_start_time_state_stmt( # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. return _get_start_time_state_for_entities_stmt( - run_start_ts, epoch_time, metadata_ids, no_attributes, From 26212798a334e208a35a0c6dfc0dc495d149fa40 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 20 Dec 2024 08:25:08 +0100 Subject: [PATCH 0896/1198] Fixes and code cleanup for IronOS integration (#133579) * Fix typing and cleanup in IronOS integration * fix test not using freezer * changes * fix timedelta --- homeassistant/components/iron_os/entity.py | 14 +++++++------- homeassistant/components/iron_os/number.py | 12 +++++------- homeassistant/components/iron_os/select.py | 8 +++----- tests/components/iron_os/test_init.py | 6 ++++-- 4 files changed, 19 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/iron_os/entity.py b/homeassistant/components/iron_os/entity.py index 684957a2197..190a9f33639 100644 --- a/homeassistant/components/iron_os/entity.py +++ b/homeassistant/components/iron_os/entity.py @@ -2,29 +2,28 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import MANUFACTURER, MODEL -from .coordinator import IronOSBaseCoordinator +from .coordinator import IronOSLiveDataCoordinator -class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): +class IronOSBaseEntity(CoordinatorEntity[IronOSLiveDataCoordinator]): """Base IronOS entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: IronOSBaseCoordinator, + coordinator: IronOSLiveDataCoordinator, entity_description: EntityDescription, - context: Any | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(coordinator, context=context) + super().__init__(coordinator) self.entity_description = entity_description self._attr_unique_id = ( @@ -32,7 +31,8 @@ class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): ) if TYPE_CHECKING: assert coordinator.config_entry.unique_id - self.device_info = DeviceInfo( + + self._attr_device_info = DeviceInfo( connections={(CONNECTION_BLUETOOTH, coordinator.config_entry.unique_id)}, manufacturer=MANUFACTURER, model=MODEL, diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index a288a61b021..583844223dd 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -336,10 +336,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number entities from a config entry.""" - coordinator = entry.runtime_data + coordinators = entry.runtime_data async_add_entities( - IronOSNumberEntity(coordinator, description) + IronOSNumberEntity(coordinators, description) for description in PINECIL_NUMBER_DESCRIPTIONS ) @@ -351,15 +351,13 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity): def __init__( self, - coordinator: IronOSCoordinators, + coordinators: IronOSCoordinators, entity_description: IronOSNumberEntityDescription, ) -> None: """Initialize the number entity.""" - super().__init__( - coordinator.live_data, entity_description, entity_description.characteristic - ) + super().__init__(coordinators.live_data, entity_description) - self.settings = coordinator.settings + self.settings = coordinators.settings async def async_set_native_value(self, value: float) -> None: """Update the current value.""" diff --git a/homeassistant/components/iron_os/select.py b/homeassistant/components/iron_os/select.py index c863e076f0b..10d8a6fcef5 100644 --- a/homeassistant/components/iron_os/select.py +++ b/homeassistant/components/iron_os/select.py @@ -164,15 +164,13 @@ class IronOSSelectEntity(IronOSBaseEntity, SelectEntity): def __init__( self, - coordinator: IronOSCoordinators, + coordinators: IronOSCoordinators, entity_description: IronOSSelectEntityDescription, ) -> None: """Initialize the select entity.""" - super().__init__( - coordinator.live_data, entity_description, entity_description.characteristic - ) + super().__init__(coordinators.live_data, entity_description) - self.settings = coordinator.settings + self.settings = coordinators.settings @property def current_option(self) -> str | None: diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index 21194a55eea..15327c55121 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -1,6 +1,6 @@ """Test init of IronOS integration.""" -from datetime import datetime, timedelta +from datetime import timedelta from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory @@ -73,6 +73,7 @@ async def test_settings_exception( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, ) -> None: """Test skipping of settings on exception.""" mock_pynecil.get_settings.side_effect = CommunicationError @@ -80,7 +81,8 @@ async def test_settings_exception( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, datetime.now() + timedelta(seconds=60)) + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED From ad34bc89101f16a3a8b5ebf55ad45fa133548456 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 20 Dec 2024 08:26:36 +0100 Subject: [PATCH 0897/1198] Add min/max price sensor to Nord Pool (#133534) * Add min/max price sensor to Nord Pool * Last fixes * Make link in strings * Replace func --- homeassistant/components/nordpool/sensor.py | 75 ++++++- .../components/nordpool/strings.json | 22 ++ .../nordpool/snapshots/test_sensor.ambr | 208 ++++++++++++++++++ 3 files changed, 298 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index 47617cc8e42..fe966e99168 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -27,6 +27,20 @@ from .entity import NordpoolBaseEntity PARALLEL_UPDATES = 0 +def validate_prices( + func: Callable[ + [DeliveryPeriodData], dict[str, tuple[float | None, float, float | None]] + ], + data: DeliveryPeriodData, + area: str, + index: int, +) -> float | None: + """Validate and return.""" + if result := func(data)[area][index]: + return result / 1000 + return None + + def get_prices( data: DeliveryPeriodData, ) -> dict[str, tuple[float | None, float, float | None]]: @@ -67,6 +81,26 @@ def get_prices( return result +def get_min_max_price( + data: DeliveryPeriodData, + area: str, + func: Callable[[float, float], float], +) -> tuple[float, datetime, datetime]: + """Get the lowest price from the data.""" + price_data = data.entries + price: float = price_data[0].entry[area] + start: datetime = price_data[0].start + end: datetime = price_data[0].end + for entry in price_data: + for _area, _price in entry.entry.items(): + if _area == area and _price == func(price, _price): + price = _price + start = entry.start + end = entry.end + + return (price, start, end) + + def get_blockprices( data: DeliveryPeriodData, ) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: @@ -103,7 +137,8 @@ class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[tuple[float | None, float, float | None]], float | None] + value_fn: Callable[[DeliveryPeriodData, str], float | None] + extra_fn: Callable[[DeliveryPeriodData, str], dict[str, str] | None] @dataclass(frozen=True, kw_only=True) @@ -142,20 +177,43 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="current_price", translation_key="current_price", - value_fn=lambda data: data[1] / 1000, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 1), + extra_fn=lambda data, area: None, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data: data[0] / 1000 if data[0] else None, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 0), + extra_fn=lambda data, area: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data: data[2] / 1000 if data[2] else None, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 2), + extra_fn=lambda data, area: None, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="lowest_price", + translation_key="lowest_price", + value_fn=lambda data, area: get_min_max_price(data, area, min)[0] / 1000, + extra_fn=lambda data, area: { + "start": get_min_max_price(data, area, min)[1].isoformat(), + "end": get_min_max_price(data, area, min)[2].isoformat(), + }, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="highest_price", + translation_key="highest_price", + value_fn=lambda data, area: get_min_max_price(data, area, max)[0] / 1000, + extra_fn=lambda data, area: { + "start": get_min_max_price(data, area, max)[1].isoformat(), + "end": get_min_max_price(data, area, max)[2].isoformat(), + }, suggested_display_precision=2, ), ) @@ -285,9 +343,12 @@ class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.entity_description.value_fn( - get_prices(self.coordinator.data)[self.area] - ) + return self.entity_description.value_fn(self.coordinator.data, self.area) + + @property + def extra_state_attributes(self) -> dict[str, str] | None: + """Return the extra state attributes.""" + return self.entity_description.extra_fn(self.coordinator.data, self.area) class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index d30898730b9..cc10a1a0640 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -50,6 +50,28 @@ "next_price": { "name": "Next price" }, + "lowest_price": { + "name": "Lowest price", + "state_attributes": { + "start": { + "name": "Start time" + }, + "end": { + "name": "End time" + } + } + }, + "highest_price": { + "name": "Highest price", + "state_attributes": { + "start": { + "name": "[%key:component::nordpool::entity::sensor::lowest_price::state_attributes::start::name%]" + }, + "end": { + "name": "[%key:component::nordpool::entity::sensor::lowest_price::state_attributes::end::name%]" + } + } + }, "block_average": { "name": "{block} average" }, diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr index 01600352861..9b328c3a71d 100644 --- a/tests/components/nordpool/snapshots/test_sensor.ambr +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -200,6 +200,58 @@ 'state': '11.6402', }) # --- +# name: test_sensor[sensor.nord_pool_se3_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'highest_price', + 'unique_id': 'SE3-highest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T17:00:00+00:00', + 'friendly_name': 'Nord Pool SE3 Highest price', + 'start': '2024-11-05T16:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.51265', + }) +# --- # name: test_sensor[sensor.nord_pool_se3_last_updated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -247,6 +299,58 @@ 'state': '2024-11-04T12:15:03+00:00', }) # --- +# name: test_sensor[sensor.nord_pool_se3_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lowest_price', + 'unique_id': 'SE3-lowest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T03:00:00+00:00', + 'friendly_name': 'Nord Pool SE3 Lowest price', + 'start': '2024-11-05T02:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06169', + }) +# --- # name: test_sensor[sensor.nord_pool_se3_next_price-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1307,6 +1411,58 @@ 'state': '11.6402', }) # --- +# name: test_sensor[sensor.nord_pool_se4_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'highest_price', + 'unique_id': 'SE4-highest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T17:00:00+00:00', + 'friendly_name': 'Nord Pool SE4 Highest price', + 'start': '2024-11-05T16:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.53303', + }) +# --- # name: test_sensor[sensor.nord_pool_se4_last_updated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1354,6 +1510,58 @@ 'state': '2024-11-04T12:15:03+00:00', }) # --- +# name: test_sensor[sensor.nord_pool_se4_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lowest_price', + 'unique_id': 'SE4-lowest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T03:00:00+00:00', + 'friendly_name': 'Nord Pool SE4 Lowest price', + 'start': '2024-11-05T02:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06519', + }) +# --- # name: test_sensor[sensor.nord_pool_se4_next_price-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From 10191e7a23acc4cc6c86aa86c72aa646ec711bbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Fri, 20 Dec 2024 08:55:00 +0100 Subject: [PATCH 0898/1198] Add async_register_backup_agents_listener to cloud/backup (#133584) * Add async_register_backup_agents_listener to cloud/backup * Coverage * more coverage --- homeassistant/components/cloud/backup.py | 30 ++++++++++++- homeassistant/components/cloud/const.py | 2 + homeassistant/components/cloud/http_api.py | 5 +++ tests/components/cloud/test_backup.py | 49 ++++++++++++++++++++++ tests/components/cloud/test_http_api.py | 42 +++++++++++++++++++ 5 files changed, 126 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index e826c229321..d21e28be50a 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -3,7 +3,7 @@ from __future__ import annotations import base64 -from collections.abc import AsyncIterator, Callable, Coroutine +from collections.abc import AsyncIterator, Callable, Coroutine, Mapping import hashlib from typing import Any, Self @@ -18,9 +18,10 @@ from hass_nabucasa.cloud_api import ( from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from .client import CloudClient -from .const import DATA_CLOUD, DOMAIN +from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT _STORAGE_BACKUP = "backup" @@ -45,6 +46,31 @@ async def async_get_backup_agents( return [CloudBackupAgent(hass=hass, cloud=cloud)] +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + + @callback + def unsub() -> None: + """Unsubscribe from events.""" + unsub_signal() + + @callback + def handle_event(data: Mapping[str, Any]) -> None: + """Handle event.""" + if data["type"] not in ("login", "logout"): + return + listener() + + unsub_signal = async_dispatcher_connect(hass, EVENT_CLOUD_EVENT, handle_event) + return unsub + + class ChunkAsyncStreamIterator: """Async iterator for chunked streams. diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 65d239f2b10..cff71bacebc 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -18,6 +18,8 @@ DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN) DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey( "cloud_platforms_setup" ) +EVENT_CLOUD_EVENT = "cloud_event" + REQUEST_TIMEOUT = 10 PREF_ENABLE_ALEXA = "alexa_enabled" diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 2f49d261792..473f553593a 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -34,6 +34,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.util.location import async_detect_location_info from .alexa_config import entity_supported as entity_supported_by_alexa @@ -41,6 +42,7 @@ from .assist_pipeline import async_create_cloud_pipeline from .client import CloudClient from .const import ( DATA_CLOUD, + EVENT_CLOUD_EVENT, LOGIN_MFA_TIMEOUT, PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, @@ -278,6 +280,8 @@ class CloudLoginView(HomeAssistantView): new_cloud_pipeline_id = await async_create_cloud_pipeline(hass) else: new_cloud_pipeline_id = None + + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) return self.json({"success": True, "cloud_pipeline": new_cloud_pipeline_id}) @@ -297,6 +301,7 @@ class CloudLogoutView(HomeAssistantView): async with asyncio.timeout(REQUEST_TIMEOUT): await cloud.logout() + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "logout"}) return self.json_message("ok") diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 93747ca25f7..86b25d61d88 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -17,7 +17,10 @@ from homeassistant.components.backup import ( Folder, ) from homeassistant.components.cloud import DOMAIN +from homeassistant.components.cloud.backup import async_register_backup_agents_listener +from homeassistant.components.cloud.const import EVENT_CLOUD_EVENT from homeassistant.core import HomeAssistant +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component from tests.test_util.aiohttp import AiohttpClientMocker @@ -576,3 +579,49 @@ async def test_agents_delete_not_found( assert response["success"] assert response["result"] == {"agent_errors": {}} + + +@pytest.mark.parametrize("event_type", ["login", "logout"]) +async def test_calling_listener_on_login_logout( + hass: HomeAssistant, + event_type: str, +) -> None: + """Test calling listener for login and logout events.""" + listener = MagicMock() + async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": event_type}) + await hass.async_block_till_done() + + assert listener.call_count == 1 + + +async def test_not_calling_listener_after_unsub(hass: HomeAssistant) -> None: + """Test only calling listener until unsub.""" + listener = MagicMock() + unsub = async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) + await hass.async_block_till_done() + assert listener.call_count == 1 + + unsub() + + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) + await hass.async_block_till_done() + assert listener.call_count == 1 + + +async def test_not_calling_listener_with_unknown_event_type( + hass: HomeAssistant, +) -> None: + """Test not calling listener if we did not get the expected event type.""" + listener = MagicMock() + async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "unknown"}) + await hass.async_block_till_done() + assert listener.call_count == 0 diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index b35cc03ac73..d915f158af0 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -1819,3 +1819,45 @@ async def test_api_calls_require_admin( resp = await client.post(endpoint, json=data) assert resp.status == HTTPStatus.UNAUTHORIZED + + +async def test_login_view_dispatch_event( + hass: HomeAssistant, + cloud: MagicMock, + hass_client: ClientSessionGenerator, +) -> None: + """Test dispatching event while logging in.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + + cloud_client = await hass_client() + + with patch( + "homeassistant.components.cloud.http_api.async_dispatcher_send" + ) as async_dispatcher_send_mock: + await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert async_dispatcher_send_mock.call_count == 1 + assert async_dispatcher_send_mock.mock_calls[0][1][1] == "cloud_event" + assert async_dispatcher_send_mock.mock_calls[0][1][2] == {"type": "login"} + + +async def test_logout_view_dispatch_event( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test dispatching event while logging out.""" + cloud_client = await hass_client() + + with patch( + "homeassistant.components.cloud.http_api.async_dispatcher_send" + ) as async_dispatcher_send_mock: + await cloud_client.post("/api/cloud/logout") + + assert async_dispatcher_send_mock.call_count == 1 + assert async_dispatcher_send_mock.mock_calls[0][1][1] == "cloud_event" + assert async_dispatcher_send_mock.mock_calls[0][1][2] == {"type": "logout"} From 7e6392f062a015c344a634a703c3e1224766d1dc Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Fri, 20 Dec 2024 10:11:50 +0100 Subject: [PATCH 0899/1198] Define setpoints as constants in flexit_bacnet (#133580) * Define setpoints as consts * Use a regular comment instead of docstring * Un-indent comment --- .../components/flexit_bacnet/number.py | 49 +++++++++++++------ 1 file changed, 35 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 029ce896445..6e405e8e8ac 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -23,6 +23,9 @@ from . import FlexitCoordinator from .const import DOMAIN from .entity import FlexitEntity +_MAX_FAN_SETPOINT = 100 +_MIN_FAN_SETPOINT = 30 + @dataclass(kw_only=True, frozen=True) class FlexitNumberEntityDescription(NumberEntityDescription): @@ -34,6 +37,24 @@ class FlexitNumberEntityDescription(NumberEntityDescription): set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] +# Setpoints for Away, Home and High are dependent of each other. Fireplace and Cooker Hood +# have setpoints between 0 (MIN_FAN_SETPOINT) and 100 (MAX_FAN_SETPOINT). +# See the table below for all the setpoints. +# +# | Mode | Setpoint | Min | Max | +# |:------------|----------|:----------------------|:----------------------| +# | HOME | Supply | AWAY Supply setpoint | 100 | +# | HOME | Extract | AWAY Extract setpoint | 100 | +# | AWAY | Supply | 30 | HOME Supply setpoint | +# | AWAY | Extract | 30 | HOME Extract setpoint | +# | HIGH | Supply | HOME Supply setpoint | 100 | +# | HIGH | Extract | HOME Extract setpoint | 100 | +# | COOKER_HOOD | Supply | 30 | 100 | +# | COOKER_HOOD | Extract | 30 | 100 | +# | FIREPLACE | Supply | 30 | 100 | +# | FIREPLACE | Extract | 30 | 100 | + + NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( FlexitNumberEntityDescription( key="away_extract_fan_setpoint", @@ -45,7 +66,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), - native_min_value_fn=lambda _: 30, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", @@ -57,7 +78,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), - native_min_value_fn=lambda _: 30, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", @@ -68,8 +89,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", @@ -80,8 +101,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", @@ -92,8 +113,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", @@ -104,8 +125,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", @@ -116,7 +137,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( @@ -128,7 +149,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( @@ -140,7 +161,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( @@ -152,7 +173,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) From b391dfe6476386d2b713acc5c5f9ad5a6a25a17d Mon Sep 17 00:00:00 2001 From: Kenny Root Date: Fri, 20 Dec 2024 01:59:30 -0800 Subject: [PATCH 0900/1198] Switch to official Zabbix Python API (#131674) --- CODEOWNERS | 1 + homeassistant/components/zabbix/__init__.py | 26 +++++++++---------- homeassistant/components/zabbix/manifest.json | 6 ++--- homeassistant/components/zabbix/sensor.py | 2 +- requirements_all.txt | 6 ++--- 5 files changed, 21 insertions(+), 20 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 382fbffecaa..0e2934b1f49 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1742,6 +1742,7 @@ build.json @home-assistant/supervisor /tests/components/youless/ @gjong /homeassistant/components/youtube/ @joostlek /tests/components/youtube/ @joostlek +/homeassistant/components/zabbix/ @kruton /homeassistant/components/zamg/ @killer0071234 /tests/components/zamg/ @killer0071234 /homeassistant/components/zengge/ @emontnemery diff --git a/homeassistant/components/zabbix/__init__.py b/homeassistant/components/zabbix/__init__.py index d9bab3e6fe4..05881d649cf 100644 --- a/homeassistant/components/zabbix/__init__.py +++ b/homeassistant/components/zabbix/__init__.py @@ -11,8 +11,9 @@ import time from urllib.error import HTTPError from urllib.parse import urljoin -from pyzabbix import ZabbixAPI, ZabbixAPIException, ZabbixMetric, ZabbixSender import voluptuous as vol +from zabbix_utils import ItemValue, Sender, ZabbixAPI +from zabbix_utils.exceptions import APIRequestError from homeassistant.const import ( CONF_HOST, @@ -42,6 +43,7 @@ CONF_PUBLISH_STATES_HOST = "publish_states_host" DEFAULT_SSL = False DEFAULT_PATH = "zabbix" +DEFAULT_SENDER_PORT = 10051 TIMEOUT = 5 RETRY_DELAY = 20 @@ -86,7 +88,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: zapi = ZabbixAPI(url=url, user=username, password=password) _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) - except ZabbixAPIException as login_exception: + except APIRequestError as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False except HTTPError as http_error: @@ -104,7 +106,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def event_to_metrics( event: Event, float_keys: set[str], string_keys: set[str] - ) -> list[ZabbixMetric] | None: + ) -> list[ItemValue] | None: """Add an event to the outgoing Zabbix list.""" state = event.data.get("new_state") if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE): @@ -145,14 +147,14 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: float_keys.update(floats) if len(float_keys) != float_keys_count: floats_discovery = [{"{#KEY}": float_key} for float_key in float_keys] - metric = ZabbixMetric( + metric = ItemValue( publish_states_host, "homeassistant.floats_discovery", json.dumps(floats_discovery), ) metrics.append(metric) for key, value in floats.items(): - metric = ZabbixMetric( + metric = ItemValue( publish_states_host, f"homeassistant.float[{key}]", value ) metrics.append(metric) @@ -161,7 +163,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return metrics if publish_states_host: - zabbix_sender = ZabbixSender(zabbix_server=conf[CONF_HOST]) + zabbix_sender = Sender(server=conf[CONF_HOST], port=DEFAULT_SENDER_PORT) instance = ZabbixThread(zabbix_sender, event_to_metrics) instance.setup(hass) @@ -175,10 +177,8 @@ class ZabbixThread(threading.Thread): def __init__( self, - zabbix_sender: ZabbixSender, - event_to_metrics: Callable[ - [Event, set[str], set[str]], list[ZabbixMetric] | None - ], + zabbix_sender: Sender, + event_to_metrics: Callable[[Event, set[str], set[str]], list[ItemValue] | None], ) -> None: """Initialize the listener.""" threading.Thread.__init__(self, name="Zabbix") @@ -208,12 +208,12 @@ class ZabbixThread(threading.Thread): item = (time.monotonic(), event) self.queue.put(item) - def get_metrics(self) -> tuple[int, list[ZabbixMetric]]: + def get_metrics(self) -> tuple[int, list[ItemValue]]: """Return a batch of events formatted for writing.""" queue_seconds = QUEUE_BACKLOG_SECONDS + self.MAX_TRIES * RETRY_DELAY count = 0 - metrics: list[ZabbixMetric] = [] + metrics: list[ItemValue] = [] dropped = 0 @@ -243,7 +243,7 @@ class ZabbixThread(threading.Thread): return count, metrics - def write_to_zabbix(self, metrics: list[ZabbixMetric]) -> None: + def write_to_zabbix(self, metrics: list[ItemValue]) -> None: """Write preprocessed events to zabbix, with retry.""" for retry in range(self.MAX_TRIES + 1): diff --git a/homeassistant/components/zabbix/manifest.json b/homeassistant/components/zabbix/manifest.json index 9c7171bea46..86389d2b839 100644 --- a/homeassistant/components/zabbix/manifest.json +++ b/homeassistant/components/zabbix/manifest.json @@ -1,10 +1,10 @@ { "domain": "zabbix", "name": "Zabbix", - "codeowners": [], + "codeowners": ["@kruton"], "documentation": "https://www.home-assistant.io/integrations/zabbix", "iot_class": "local_polling", - "loggers": ["pyzabbix"], + "loggers": ["zabbix_utils"], "quality_scale": "legacy", - "requirements": ["py-zabbix==1.1.7"] + "requirements": ["zabbix-utils==2.0.1"] } diff --git a/homeassistant/components/zabbix/sensor.py b/homeassistant/components/zabbix/sensor.py index f5d96f106cb..7728233ebc0 100644 --- a/homeassistant/components/zabbix/sensor.py +++ b/homeassistant/components/zabbix/sensor.py @@ -6,8 +6,8 @@ from collections.abc import Mapping import logging from typing import Any -from pyzabbix import ZabbixAPI import voluptuous as vol +from zabbix_utils import ZabbixAPI from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, diff --git a/requirements_all.txt b/requirements_all.txt index a4f61fde797..dfeb83cc176 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1723,9 +1723,6 @@ py-sucks==0.9.10 # homeassistant.components.synology_dsm py-synologydsm-api==2.5.3 -# homeassistant.components.zabbix -py-zabbix==1.1.7 - # homeassistant.components.atome pyAtome==0.1.1 @@ -3084,6 +3081,9 @@ youtubeaio==1.1.5 # homeassistant.components.media_extractor yt-dlp[default]==2024.12.13 +# homeassistant.components.zabbix +zabbix-utils==2.0.1 + # homeassistant.components.zamg zamg==0.3.6 From 3df992790d8cca2c2e13e828bb41254fb8ee072e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 10:59:52 +0100 Subject: [PATCH 0901/1198] Bump aiohasupervisor to version 0.2.2b3 (#133631) --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 70230701965..d2cf790219c 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b2"], + "requirements": ["aiohasupervisor==0.2.2b3"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 62de8720278..dae92035b11 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index af79a173bab..171ca69dac0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b2", + "aiohasupervisor==0.2.2b3", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index a6fda6760d4..9f1615b37f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index dfeb83cc176..e6a28cd6f41 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d374203a614..788f0faff5f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 # homeassistant.components.homekit_controller aiohomekit==3.2.7 From bddd8624bbf9c2fbc54335bf69f43513d768385b Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Fri, 20 Dec 2024 12:24:15 +0100 Subject: [PATCH 0902/1198] Add scale support to lamarzocco (#133335) --- .../components/lamarzocco/binary_sensor.py | 47 ++++++- .../components/lamarzocco/coordinator.py | 26 +++- homeassistant/components/lamarzocco/entity.py | 24 ++++ .../components/lamarzocco/icons.json | 10 ++ homeassistant/components/lamarzocco/number.py | 58 ++++++++- .../components/lamarzocco/quality_scale.yaml | 8 +- homeassistant/components/lamarzocco/select.py | 59 ++++++++- homeassistant/components/lamarzocco/sensor.py | 47 ++++++- .../components/lamarzocco/strings.json | 10 ++ tests/components/lamarzocco/conftest.py | 5 +- .../lamarzocco/fixtures/config_mini.json | 116 ++++++++++++++++++ .../snapshots/test_binary_sensor.ambr | 47 +++++++ .../lamarzocco/snapshots/test_init.ambr | 32 +++++ .../lamarzocco/snapshots/test_number.ambr | 116 +++++++++++++++++- .../lamarzocco/snapshots/test_select.ambr | 55 +++++++++ .../lamarzocco/snapshots/test_sensor.ambr | 51 ++++++++ .../lamarzocco/test_binary_sensor.py | 68 ++++++++++ tests/components/lamarzocco/test_init.py | 52 +++++++- tests/components/lamarzocco/test_number.py | 93 +++++++++++++- tests/components/lamarzocco/test_select.py | 97 ++++++++++++++- tests/components/lamarzocco/test_sensor.py | 69 ++++++++++- 21 files changed, 1059 insertions(+), 31 deletions(-) create mode 100644 tests/components/lamarzocco/fixtures/config_mini.json diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 3d11992e7c1..e36b53bc993 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,6 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass +from pylamarzocco.const import MachineModel from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( @@ -15,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity # Coordinator is used to centralize the data updates PARALLEL_UPDATES = 0 @@ -28,7 +29,7 @@ class LaMarzoccoBinarySensorEntityDescription( ): """Description of a La Marzocco binary sensor.""" - is_on_fn: Callable[[LaMarzoccoMachineConfig], bool] + is_on_fn: Callable[[LaMarzoccoMachineConfig], bool | None] ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( @@ -57,6 +58,15 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( + LaMarzoccoBinarySensorEntityDescription( + key="connected", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + is_on_fn=lambda config: config.scale.connected if config.scale else None, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -66,11 +76,30 @@ async def async_setup_entry( """Set up binary sensor entities.""" coordinator = entry.runtime_data.config_coordinator - async_add_entities( + entities = [ LaMarzoccoBinarySensorEntity(coordinator, description) for description in ENTITIES if description.supported_fn(coordinator) - ) + ] + + if ( + coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleBinarySensorEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleBinarySensorEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + + async_add_entities(entities) class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity): @@ -79,6 +108,14 @@ class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity): entity_description: LaMarzoccoBinarySensorEntityDescription @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Return true if the binary sensor is on.""" return self.entity_description.is_on_fn(self.coordinator.device.config) + + +class LaMarzoccoScaleBinarySensorEntity( + LaMarzoccoBinarySensorEntity, LaMarzoccScaleEntity +): + """Binary sensor for La Marzocco scales.""" + + entity_description: LaMarzoccoBinarySensorEntityDescription diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index aca84fc4660..0b07409adb5 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from abc import abstractmethod +from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging @@ -14,8 +15,9 @@ from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -62,6 +64,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): self.device = device self.local_connection_configured = local_client is not None self._local_client = local_client + self.new_device_callback: list[Callable] = [] async def _async_update_data(self) -> None: """Do the data update.""" @@ -86,6 +89,8 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Class to handle fetching data from the La Marzocco API centrally.""" + _scale_address: str | None = None + async def _async_setup(self) -> None: """Set up the coordinator.""" if self._local_client is not None: @@ -118,6 +123,25 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Fetch data from API endpoint.""" await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) + self._async_add_remove_scale() + + @callback + def _async_add_remove_scale(self) -> None: + """Add or remove a scale when added or removed.""" + if self.device.config.scale and not self._scale_address: + self._scale_address = self.device.config.scale.address + for scale_callback in self.new_device_callback: + scale_callback() + elif not self.device.config.scale and self._scale_address: + device_registry = dr.async_get(self.hass) + if device := device_registry.async_get_device( + identifiers={(DOMAIN, self._scale_address)} + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.config_entry.entry_id, + ) + self._scale_address = None class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator): diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index c3385eebd52..3e70ff1acdf 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -2,6 +2,7 @@ from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from pylamarzocco.const import FirmwareType from pylamarzocco.devices.machine import LaMarzoccoMachine @@ -85,3 +86,26 @@ class LaMarzoccoEntity(LaMarzoccoBaseEntity): """Initialize the entity.""" super().__init__(coordinator, entity_description.key) self.entity_description = entity_description + + +class LaMarzoccScaleEntity(LaMarzoccoEntity): + """Common class for scale.""" + + def __init__( + self, + coordinator: LaMarzoccoUpdateCoordinator, + entity_description: LaMarzoccoEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator, entity_description) + scale = coordinator.device.config.scale + if TYPE_CHECKING: + assert scale + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, scale.address)}, + name=scale.name, + manufacturer="Acaia", + model="Lunar", + model_id="Y.301", + via_device=(DOMAIN, coordinator.device.serial_number), + ) diff --git a/homeassistant/components/lamarzocco/icons.json b/homeassistant/components/lamarzocco/icons.json index 860da12ddd9..79267b4abd4 100644 --- a/homeassistant/components/lamarzocco/icons.json +++ b/homeassistant/components/lamarzocco/icons.json @@ -43,6 +43,9 @@ "preinfusion_off": { "default": "mdi:water" }, + "scale_target": { + "default": "mdi:scale-balance" + }, "smart_standby_time": { "default": "mdi:timer" }, @@ -54,6 +57,13 @@ } }, "select": { + "active_bbw": { + "default": "mdi:alpha-u", + "state": { + "a": "mdi:alpha-a", + "b": "mdi:alpha-b" + } + }, "smart_standby_mode": { "default": "mdi:power", "state": { diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index a1389769194..44b582fbf1a 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -33,7 +33,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity PARALLEL_UPDATES = 1 @@ -56,7 +56,9 @@ class LaMarzoccoKeyNumberEntityDescription( ): """Description of an La Marzocco number entity with keys.""" - native_value_fn: Callable[[LaMarzoccoMachineConfig, PhysicalKey], float | int] + native_value_fn: Callable[ + [LaMarzoccoMachineConfig, PhysicalKey], float | int | None + ] set_value_fn: Callable[ [LaMarzoccoMachine, float | int, PhysicalKey], Coroutine[Any, Any, bool] ] @@ -203,6 +205,27 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( ), ) +SCALE_KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( + LaMarzoccoKeyNumberEntityDescription( + key="scale_target", + translation_key="scale_target", + native_step=PRECISION_WHOLE, + native_min_value=1, + native_max_value=100, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda machine, weight, key: machine.set_bbw_recipe_target( + key, int(weight) + ), + native_value_fn=lambda config, key: ( + config.bbw_settings.doses[key] if config.bbw_settings else None + ), + supported_fn=( + lambda coordinator: coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale is not None + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -224,6 +247,27 @@ async def async_setup_entry( LaMarzoccoKeyNumberEntity(coordinator, description, key) for key in range(min(num_keys, 1), num_keys + 1) ) + + for description in SCALE_KEY_ENTITIES: + if description.supported_fn(coordinator): + if bbw_settings := coordinator.device.config.bbw_settings: + entities.extend( + LaMarzoccoScaleTargetNumberEntity( + coordinator, description, int(key) + ) + for key in bbw_settings.doses + ) + + def _async_add_new_scale() -> None: + if bbw_settings := coordinator.device.config.bbw_settings: + async_add_entities( + LaMarzoccoScaleTargetNumberEntity(coordinator, description, int(key)) + for description in SCALE_KEY_ENTITIES + for key in bbw_settings.doses + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + async_add_entities(entities) @@ -281,7 +325,7 @@ class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity): self.pyhsical_key = pyhsical_key @property - def native_value(self) -> float: + def native_value(self) -> float | None: """Return the current value.""" return self.entity_description.native_value_fn( self.coordinator.device.config, PhysicalKey(self.pyhsical_key) @@ -305,3 +349,11 @@ class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity): }, ) from exc self.async_write_ha_state() + + +class LaMarzoccoScaleTargetNumberEntity( + LaMarzoccoKeyNumberEntity, LaMarzoccScaleEntity +): + """Entity representing a key number on the scale.""" + + entity_description: LaMarzoccoKeyNumberEntityDescription diff --git a/homeassistant/components/lamarzocco/quality_scale.yaml b/homeassistant/components/lamarzocco/quality_scale.yaml index 3677bd8d6b8..b03f661c7b7 100644 --- a/homeassistant/components/lamarzocco/quality_scale.yaml +++ b/homeassistant/components/lamarzocco/quality_scale.yaml @@ -62,9 +62,9 @@ rules: docs-troubleshooting: done docs-use-cases: done dynamic-devices: - status: exempt + status: done comment: | - Device type integration. + Device type integration, only possible for addon scale entity-category: done entity-device-class: done entity-disabled-by-default: done @@ -74,9 +74,9 @@ rules: reconfiguration-flow: done repair-issues: done stale-devices: - status: exempt + status: done comment: | - Device type integration. + Device type integration, only possible for addon scale # Platinum async-dependency: done diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 595c157b823..7acb654f0d2 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,7 +4,13 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.const import ( + MachineModel, + PhysicalKey, + PrebrewMode, + SmartStandbyMode, + SteamLevel, +) from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import RequestNotSuccessful from pylamarzocco.models import LaMarzoccoMachineConfig @@ -17,7 +23,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity PARALLEL_UPDATES = 1 @@ -52,7 +58,7 @@ class LaMarzoccoSelectEntityDescription( ): """Description of a La Marzocco select entity.""" - current_option_fn: Callable[[LaMarzoccoMachineConfig], str] + current_option_fn: Callable[[LaMarzoccoMachineConfig], str | None] select_option_fn: Callable[[LaMarzoccoMachine, str], Coroutine[Any, Any, bool]] @@ -100,6 +106,22 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = ( + LaMarzoccoSelectEntityDescription( + key="active_bbw", + translation_key="active_bbw", + options=["a", "b"], + select_option_fn=lambda machine, option: machine.set_active_bbw_recipe( + PhysicalKey[option.upper()] + ), + current_option_fn=lambda config: ( + config.bbw_settings.active_dose.name.lower() + if config.bbw_settings + else None + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -109,11 +131,30 @@ async def async_setup_entry( """Set up select entities.""" coordinator = entry.runtime_data.config_coordinator - async_add_entities( + entities = [ LaMarzoccoSelectEntity(coordinator, description) for description in ENTITIES if description.supported_fn(coordinator) - ) + ] + + if ( + coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleSelectEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleSelectEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + + async_add_entities(entities) class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): @@ -122,7 +163,7 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): entity_description: LaMarzoccoSelectEntityDescription @property - def current_option(self) -> str: + def current_option(self) -> str | None: """Return the current selected option.""" return str( self.entity_description.current_option_fn(self.coordinator.device.config) @@ -145,3 +186,9 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): }, ) from exc self.async_write_ha_state() + + +class LaMarzoccoScaleSelectEntity(LaMarzoccoSelectEntity, LaMarzoccScaleEntity): + """Select entity for La Marzocco scales.""" + + entity_description: LaMarzoccoSelectEntityDescription diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 8d57c1b8403..2acca879d52 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -12,12 +12,17 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime +from homeassistant.const import ( + PERCENTAGE, + EntityCategory, + UnitOfTemperature, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity # Coordinator is used to centralize the data updates PARALLEL_UPDATES = 0 @@ -91,6 +96,21 @@ STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( + LaMarzoccoSensorEntityDescription( + key="scale_battery", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.BATTERY, + value_fn=lambda device: ( + device.config.scale.battery if device.config.scale else 0 + ), + supported_fn=( + lambda coordinator: coordinator.device.model == MachineModel.LINEA_MINI + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -106,6 +126,15 @@ async def async_setup_entry( if description.supported_fn(config_coordinator) ] + if ( + config_coordinator.device.model == MachineModel.LINEA_MINI + and config_coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleSensorEntity(config_coordinator, description) + for description in SCALE_ENTITIES + ) + statistics_coordinator = entry.runtime_data.statistics_coordinator entities.extend( LaMarzoccoSensorEntity(statistics_coordinator, description) @@ -113,6 +142,14 @@ async def async_setup_entry( if description.supported_fn(statistics_coordinator) ) + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleSensorEntity(config_coordinator, description) + for description in SCALE_ENTITIES + ) + + config_coordinator.new_device_callback.append(_async_add_new_scale) + async_add_entities(entities) @@ -125,3 +162,9 @@ class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity): def native_value(self) -> int | float: """State of the sensor.""" return self.entity_description.value_fn(self.coordinator.device) + + +class LaMarzoccoScaleSensorEntity(LaMarzoccoSensorEntity, LaMarzoccScaleEntity): + """Sensor for a La Marzocco scale.""" + + entity_description: LaMarzoccoSensorEntityDescription diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 666eb7f4a84..cc96e4615dc 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -122,6 +122,9 @@ "preinfusion_off_key": { "name": "Preinfusion time Key {key}" }, + "scale_target_key": { + "name": "Brew by weight target {key}" + }, "smart_standby_time": { "name": "Smart standby time" }, @@ -133,6 +136,13 @@ } }, "select": { + "active_bbw": { + "name": "Active brew by weight recipe", + "state": { + "a": "Recipe A", + "b": "Recipe B" + } + }, "prebrew_infusion_select": { "name": "Prebrew/-infusion mode", "state": { diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 997fa73604c..658e0dd96bc 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -135,7 +135,10 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: serial_number=serial_number, name=serial_number, ) - config = load_json_object_fixture("config.json", DOMAIN) + if device_fixture == MachineModel.LINEA_MINI: + config = load_json_object_fixture("config_mini.json", DOMAIN) + else: + config = load_json_object_fixture("config.json", DOMAIN) statistics = json.loads(load_fixture("statistics.json", DOMAIN)) dummy_machine.parse_config(config) diff --git a/tests/components/lamarzocco/fixtures/config_mini.json b/tests/components/lamarzocco/fixtures/config_mini.json new file mode 100644 index 00000000000..22533a94872 --- /dev/null +++ b/tests/components/lamarzocco/fixtures/config_mini.json @@ -0,0 +1,116 @@ +{ + "version": "v1", + "preinfusionModesAvailable": ["ByDoseType"], + "machineCapabilities": [ + { + "family": "LINEA", + "groupsNumber": 1, + "coffeeBoilersNumber": 1, + "hasCupWarmer": false, + "steamBoilersNumber": 1, + "teaDosesNumber": 1, + "machineModes": ["BrewingMode", "StandBy"], + "schedulingType": "smartWakeUpSleep" + } + ], + "machine_sn": "Sn01239157", + "machine_hw": "0", + "isPlumbedIn": false, + "isBackFlushEnabled": false, + "standByTime": 0, + "tankStatus": true, + "settings": [], + "recipes": [ + { + "id": "Recipe1", + "dose_mode": "Mass", + "recipe_doses": [ + { "id": "A", "target": 32 }, + { "id": "B", "target": 45 } + ] + } + ], + "recipeAssignment": [ + { + "dose_index": "DoseA", + "recipe_id": "Recipe1", + "recipe_dose": "A", + "group": "Group1" + } + ], + "groupCapabilities": [ + { + "capabilities": { + "groupType": "AV_Group", + "groupNumber": "Group1", + "boilerId": "CoffeeBoiler1", + "hasScale": false, + "hasFlowmeter": false, + "numberOfDoses": 1 + }, + "doses": [ + { + "groupNumber": "Group1", + "doseIndex": "DoseA", + "doseType": "MassType", + "stopTarget": 32 + } + ], + "doseMode": { "groupNumber": "Group1", "brewingType": "ManualType" } + } + ], + "machineMode": "StandBy", + "teaDoses": { "DoseA": { "doseIndex": "DoseA", "stopTarget": 0 } }, + "scale": { + "connected": true, + "address": "44:b7:d0:74:5f:90", + "name": "LMZ-123A45", + "battery": 64 + }, + "boilers": [ + { "id": "SteamBoiler", "isEnabled": false, "target": 0, "current": 0 }, + { "id": "CoffeeBoiler1", "isEnabled": true, "target": 89, "current": 42 } + ], + "boilerTargetTemperature": { "SteamBoiler": 0, "CoffeeBoiler1": 89 }, + "preinfusionMode": { + "Group1": { + "groupNumber": "Group1", + "preinfusionStyle": "PreinfusionByDoseType" + } + }, + "preinfusionSettings": { + "mode": "TypeB", + "Group1": [ + { + "groupNumber": "Group1", + "doseType": "DoseA", + "preWetTime": 2, + "preWetHoldTime": 3 + } + ] + }, + "wakeUpSleepEntries": [ + { + "id": "T6aLl42", + "days": [ + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday" + ], + "steam": false, + "enabled": false, + "timeOn": "24:0", + "timeOff": "24:0" + } + ], + "smartStandBy": { "mode": "LastBrewing", "minutes": 10, "enabled": true }, + "clock": "2024-08-31T14:47:45", + "firmwareVersions": [ + { "name": "machine_firmware", "fw_version": "2.12" }, + { "name": "gateway_firmware", "fw_version": "v3.6-rc4" } + ] +} diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index cda285a7106..5308ae22184 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -140,3 +140,50 @@ 'unit_of_measurement': None, }) # --- +# name: test_scale_connectivity[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'LMZ-123A45 Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.lmz_123a45_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_scale_connectivity[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lmz_123a45_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'LM012345_connected', + 'unit_of_measurement': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_init.ambr b/tests/components/lamarzocco/snapshots/test_init.ambr index 519a9301bfd..67aa0b8bea8 100644 --- a/tests/components/lamarzocco/snapshots/test_init.ambr +++ b/tests/components/lamarzocco/snapshots/test_init.ambr @@ -39,3 +39,35 @@ 'via_device_id': None, }) # --- +# name: test_scale_device[Linea Mini] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'lamarzocco', + '44:b7:d0:74:5f:90', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Acaia', + 'model': 'Lunar', + 'model_id': 'Y.301', + 'name': 'LMZ-123A45', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index b7e42bb425f..49e4713aab1 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -657,7 +657,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Linea Mini].1 @@ -771,7 +771,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Linea Mini].1 @@ -885,7 +885,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[preinfusion_time-set_preinfusion_time-TypeB-7-kwargs2-Linea Mini].1 @@ -983,3 +983,113 @@ 'unit_of_measurement': , }) # --- +# name: test_set_target[Linea Mini-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Brew by weight target 1', + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32', + }) +# --- +# name: test_set_target[Linea Mini-1].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brew by weight target 1', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'scale_target_key', + 'unique_id': 'LM012345_scale_target_key1', + 'unit_of_measurement': None, + }) +# --- +# name: test_set_target[Linea Mini-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Brew by weight target 2', + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- +# name: test_set_target[Linea Mini-2].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brew by weight target 2', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'scale_target_key', + 'unique_id': 'LM012345_scale_target_key2', + 'unit_of_measurement': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index 46fa55eff13..325409a0b7f 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -1,4 +1,59 @@ # serializer version: 1 +# name: test_active_bbw_recipe[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Active brew by weight recipe', + 'options': list([ + 'a', + 'b', + ]), + }), + 'context': , + 'entity_id': 'select.lmz_123a45_active_brew_by_weight_recipe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'a', + }) +# --- +# name: test_active_bbw_recipe[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'a', + 'b', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.lmz_123a45_active_brew_by_weight_recipe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active brew by weight recipe', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_bbw', + 'unique_id': 'LM012345_active_bbw', + 'unit_of_measurement': None, + }) +# --- # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index da1efbf1eaa..6afdffab821 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,4 +1,55 @@ # serializer version: 1 +# name: test_scale_battery[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'LMZ-123A45 Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lmz_123a45_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '64', + }) +# --- +# name: test_scale_battery[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lmz_123a45_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'LM012345_scale_battery', + 'unit_of_measurement': '%', + }) +# --- # name: test_sensors[GS012345_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 956bfe90dd4..cba806d887c 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,10 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import MachineModel from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale +import pytest from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE @@ -98,3 +101,68 @@ async def test_sensor_going_unavailable( state = hass.states.get(brewing_active_sensor) assert state assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_connectivity( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the scale binary sensors.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.lmz_123a45_connectivity") + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + assert entry == snapshot + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_connectivity( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a connectivity sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.lmz_123a45_connectivity") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_connectivity_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the connectivity binary sensor for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.scale_123a45_connectivity") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.scale_123a45_connectivity") + assert state diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 446c8780b62..7d90c049a3b 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -1,8 +1,10 @@ """Test initialization of lamarzocco.""" +from datetime import timedelta from unittest.mock import AsyncMock, MagicMock, patch -from pylamarzocco.const import FirmwareType +from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import FirmwareType, MachineModel from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -27,7 +29,7 @@ from homeassistant.helpers import ( from . import USER_INPUT, async_init_integration, get_bluetooth_service_info -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_load_unload_config_entry( @@ -251,3 +253,49 @@ async def test_device( device = device_registry.async_get(entry.device_id) assert device assert device == snapshot + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_device( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the device.""" + + await async_init_integration(hass, mock_config_entry) + + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_lamarzocco.config.scale.address)} + ) + assert device + assert device == snapshot + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_remove_stale_scale( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure stale scale is cleaned up.""" + + await async_init_integration(hass, mock_config_entry) + + scale_address = mock_lamarzocco.config.scale.address + + device = device_registry.async_get_device(identifiers={(DOMAIN, scale_address)}) + assert device + + mock_lamarzocco.config.scale = None + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + device = device_registry.async_get_device(identifiers={(DOMAIN, scale_address)}) + assert device is None diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 710a0220e06..65c5e264f22 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -1,8 +1,10 @@ """Tests for the La Marzocco number entities.""" +from datetime import timedelta from typing import Any from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, @@ -11,6 +13,7 @@ from pylamarzocco.const import ( PrebrewMode, ) from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -26,7 +29,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from . import async_init_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.parametrize( @@ -444,3 +447,91 @@ async def test_number_error( blocking=True, ) assert exc_info.value.translation_key == "number_exception_key" + + +@pytest.mark.parametrize("physical_key", [PhysicalKey.A, PhysicalKey.B]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_set_target( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + physical_key: PhysicalKey, +) -> None: + """Test the La Marzocco set target sensors.""" + + await async_init_integration(hass, mock_config_entry) + + entity_name = f"number.lmz_123a45_brew_by_weight_target_{int(physical_key)}" + + state = hass.states.get(entity_name) + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + # service call + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_name, + ATTR_VALUE: 42, + }, + blocking=True, + ) + + mock_lamarzocco.set_bbw_recipe_target.assert_called_once_with(physical_key, 42) + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_set_target( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a set target numbers.""" + await async_init_integration(hass, mock_config_entry) + + for i in range(1, 3): + state = hass.states.get(f"number.lmz_123a45_brew_by_weight_target_{i}") + assert state is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_set_target_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the set target numbers for a new scale are added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + for i in range(1, 3): + state = hass.states.get(f"number.scale_123a45_brew_by_weight_target_{i}") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for i in range(1, 3): + state = hass.states.get(f"number.scale_123a45_brew_by_weight_target_{i}") + assert state diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 24b96f84f37..614bffac172 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -1,9 +1,18 @@ """Tests for the La Marzocco select entities.""" +from datetime import timedelta from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import ( + MachineModel, + PhysicalKey, + PrebrewMode, + SmartStandbyMode, + SteamLevel, +) from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -17,9 +26,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -pytestmark = pytest.mark.usefixtures("init_integration") +from . import async_init_integration + +from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MICRA]) async def test_steam_boiler_level( hass: HomeAssistant, @@ -54,6 +66,9 @@ async def test_steam_boiler_level( mock_lamarzocco.set_steam_level.assert_called_once_with(level=SteamLevel.LEVEL_2) +pytest.mark.usefixtures("init_integration") + + @pytest.mark.parametrize( "device_fixture", [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MINI], @@ -69,6 +84,7 @@ async def test_steam_boiler_level_none( assert state is None +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "device_fixture", [MachineModel.LINEA_MICRA, MachineModel.GS3_AV, MachineModel.LINEA_MINI], @@ -106,6 +122,7 @@ async def test_pre_brew_infusion_select( mock_lamarzocco.set_prebrew_mode.assert_called_once_with(mode=PrebrewMode.PREBREW) +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "device_fixture", [MachineModel.GS3_MP], @@ -121,6 +138,7 @@ async def test_pre_brew_infusion_select_none( assert state is None +@pytest.mark.usefixtures("init_integration") async def test_smart_standby_mode( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -155,6 +173,7 @@ async def test_smart_standby_mode( ) +@pytest.mark.usefixtures("init_integration") async def test_select_errors( hass: HomeAssistant, mock_lamarzocco: MagicMock, @@ -179,3 +198,77 @@ async def test_select_errors( blocking=True, ) assert exc_info.value.translation_key == "select_option_error" + + +@pytest.mark.usefixtures("init_integration") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_active_bbw_recipe( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_lamarzocco: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test the La Marzocco active bbw recipe select.""" + + state = hass.states.get("select.lmz_123a45_active_brew_by_weight_recipe") + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.lmz_123a45_active_brew_by_weight_recipe", + ATTR_OPTION: "b", + }, + blocking=True, + ) + + mock_lamarzocco.set_active_bbw_recipe.assert_called_once_with(PhysicalKey.B) + + +@pytest.mark.usefixtures("init_integration") +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_active_bbw_select( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, +) -> None: + """Ensure the other models don't have a battery sensor.""" + + state = hass.states.get("select.lmz_123a45_active_brew_by_weight_recipe") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_active_bbw_select_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the active bbw select for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("select.scale_123a45_active_brew_by_weight_recipe") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("select.scale_123a45_active_brew_by_weight_recipe") + assert state diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 6f14d52d1fc..e0426e132c3 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -1,8 +1,11 @@ """Tests for La Marzocco sensors.""" +from datetime import timedelta from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from pylamarzocco.const import MachineModel +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -12,7 +15,7 @@ from homeassistant.helpers import entity_registry as er from . import async_init_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed SENSORS = ( "total_coffees_made", @@ -85,3 +88,67 @@ async def test_no_steam_linea_mini( serial_number = mock_lamarzocco.serial_number state = hass.states.get(f"sensor.{serial_number}_current_temp_steam") assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_battery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the scale battery sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + assert entry == snapshot + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_battery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a battery sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_battery_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the battery sensor for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("sensor.scale_123a45_battery") + assert state From b5c46083735aaa36681a0206d825d3b71e8b7608 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 20 Dec 2024 12:25:45 +0100 Subject: [PATCH 0903/1198] Upgrade QS from bronze to silver for slide_local (#133560) --- .../components/slide_local/manifest.json | 2 +- .../components/slide_local/quality_scale.yaml | 19 ++++++++++++++----- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json index 42c74b2c308..69d5c93b0af 100644 --- a/homeassistant/components/slide_local/manifest.json +++ b/homeassistant/components/slide_local/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/slide_local", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["goslide-api==0.7.0"], "zeroconf": [ { diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index c3ce12efd80..7a2be591927 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -26,7 +26,10 @@ rules: log-when-unavailable: done entity-unavailable: done action-exceptions: done - reauthentication-flow: todo + reauthentication-flow: + status: exempt + comment: | + The password used is the device code and can't change. No reauth required. parallel-updates: done test-coverage: done integration-owner: done @@ -34,18 +37,24 @@ rules: docs-configuration-parameters: done # Gold - entity-translations: todo + entity-translations: done entity-device-class: done devices: done entity-category: done entity-disabled-by-default: done discovery: done - stale-devices: todo + stale-devices: + status: done + comment: | + Slide_local represents a single physical device, no removal stale devices required (besides removal of instance itself). diagnostics: done exception-translations: done - icon-translations: todo + icon-translations: done reconfiguration-flow: todo - dynamic-devices: todo + dynamic-devices: + status: exempt + comment: | + Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). discovery-update-info: todo repair-issues: todo docs-use-cases: done From 86e43b7196dceabe546fec7dbb928afe271242ec Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:29:04 +0100 Subject: [PATCH 0904/1198] Record Knocki quality scale (#133582) * Record Knocki quality scale * Record Knocki quality scale * Fix --- .../components/knocki/quality_scale.yaml | 92 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/knocki/quality_scale.yaml diff --git a/homeassistant/components/knocki/quality_scale.yaml b/homeassistant/components/knocki/quality_scale.yaml new file mode 100644 index 00000000000..45b3764d786 --- /dev/null +++ b/homeassistant/components/knocki/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration is push-based. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: data_descriptions are missing + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: This is a cloud service and does not benefit from device updates. + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: done + entity-category: + status: exempt + comment: | + The default ones are good. + entity-device-class: + status: exempt + comment: | + Knocki does not have a device class. + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that are disabled by default. + entity-translations: + status: exempt + comment: | + This integration does not have any translatable entities. + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 4e5cee2d16d..6109924b4e9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -563,7 +563,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "kitchen_sink", "kiwi", "kmtronic", - "knocki", "knx", "kodi", "konnected", From df383a3a31f415dacb3fdfa6e2296db9fc09e5f6 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:29:54 +0100 Subject: [PATCH 0905/1198] Record Mealie quality scale (#133587) * Record Mealie quality scale * Record NYT Games quality scale * Fix * Fix * Fix --- .../components/mealie/quality_scale.yaml | 79 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/mealie/quality_scale.yaml diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml new file mode 100644 index 00000000000..c3b748525ca --- /dev/null +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: + status: todo + comment: Platform missing tests + # Gold + devices: done + diagnostics: done + discovery-update-info: todo + discovery: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: done + comment: | + The integration adds new todo lists on runtime. + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any irrelevant entities. + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: done + comment: | + The integration removes removed todo lists on runtime. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 6109924b4e9..ec8898c5df5 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -631,7 +631,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "matter", "maxcube", "mazda", - "mealie", "meater", "medcom_ble", "media_extractor", From 6a599dc27a7a2f90480506f6cbed8f19390ec874 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:30:35 +0100 Subject: [PATCH 0906/1198] Record NYT Games quality scale (#133592) * Record NYT Games quality scale * Record NYT Games quality scale * Fix --- .../components/nyt_games/quality_scale.yaml | 92 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nyt_games/quality_scale.yaml diff --git a/homeassistant/components/nyt_games/quality_scale.yaml b/homeassistant/components/nyt_games/quality_scale.yaml new file mode 100644 index 00000000000..9f455bd4e2c --- /dev/null +++ b/homeassistant/components/nyt_games/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: + status: exempt + comment: | + This is handled by the coordinator. + integration-owner: done + log-when-unavailable: + status: done + comment: | + This is handled by the coordinator. + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration is a service and not discoverable. + discovery: + status: exempt + comment: | + This integration is a service and not discoverable. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: + status: done + comment: | + The entities are categorized well by using default category. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + Games can't be "unplayed". + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index ec8898c5df5..8e3d0311719 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -733,7 +733,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "nut", "nws", "nx584", - "nyt_games", "nzbget", "oasa_telematics", "obihai", From e62a563ec143ff00df16f1b2c88a5f5e4c804e08 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 20 Dec 2024 12:30:55 +0100 Subject: [PATCH 0907/1198] Add Swiss Public Transport quality scale record (#131629) --- .../swiss_public_transport/quality_scale.yaml | 86 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/swiss_public_transport/quality_scale.yaml diff --git a/homeassistant/components/swiss_public_transport/quality_scale.yaml b/homeassistant/components/swiss_public_transport/quality_scale.yaml new file mode 100644 index 00000000000..0329f9c8fab --- /dev/null +++ b/homeassistant/components/swiss_public_transport/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: + status: done + comment: > + Polling interval is set to support one connection. + There is a rate limit at 10000 calls per day. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: todo + dependency-transparency: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: No events implemented + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: Offloaded to coordinator + entity-unavailable: + status: done + comment: Offloaded to coordinator + action-exceptions: done + reauthentication-flow: + status: exempt + comment: No authentication needed + parallel-updates: todo + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: no options flow + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: done + comment: No disabled entities implemented + discovery: + status: exempt + comment: Nothing to discover + stale-devices: + status: exempt + comment: Stale not possible + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: No dynamic devices + discovery-update-info: + status: exempt + comment: Nothing to discover + repair-issues: + status: exempt + comment: Nothing to repair + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 8e3d0311719..e2eedea9a2c 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -983,7 +983,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "supla", "surepetcare", "swiss_hydrological_data", - "swiss_public_transport", "swisscom", "switch_as_x", "switchbee", From 5834ecb13ee1c293b535dd21f76f68af0257a47c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 12:36:54 +0100 Subject: [PATCH 0908/1198] Fix homeassistant_included flag for local backups (#133640) --- homeassistant/components/backup/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index bb01a9a4e3f..c4d69789aac 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -50,6 +50,7 @@ def read_backup(backup_path: Path) -> AgentBackup: if ( homeassistant := cast(JsonObjectType, data.get("homeassistant")) ) and "version" in homeassistant: + homeassistant_included = True homeassistant_version = cast(str, homeassistant["version"]) database_included = not cast( bool, homeassistant.get("exclude_database", False) From 4c96b832975dc03763a6cc52457fd748e4b57a39 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 13:13:25 +0100 Subject: [PATCH 0909/1198] Fix reading extra metadata for local backups (#133643) --- homeassistant/components/backup/util.py | 2 +- tests/components/backup/test_util.py | 88 +++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 tests/components/backup/test_util.py diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index c4d69789aac..0cedc07443a 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -61,7 +61,7 @@ def read_backup(backup_path: Path) -> AgentBackup: backup_id=cast(str, data["slug"]), database_included=database_included, date=cast(str, data["date"]), - extra_metadata=cast(dict[str, bool | str], data.get("metadata", {})), + extra_metadata=cast(dict[str, bool | str], data.get("extra", {})), folders=folders, homeassistant_included=homeassistant_included, homeassistant_version=homeassistant_version, diff --git a/tests/components/backup/test_util.py b/tests/components/backup/test_util.py new file mode 100644 index 00000000000..888029f2e35 --- /dev/null +++ b/tests/components/backup/test_util.py @@ -0,0 +1,88 @@ +"""Tests for the Backup integration's utility functions.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +import pytest + +from homeassistant.components.backup import AddonInfo, AgentBackup, Folder +from homeassistant.components.backup.util import read_backup + + +@pytest.mark.parametrize( + ("backup_json_content", "expected_backup"), + [ + ( + b'{"compressed":true,"date":"2024-12-02T07:23:58.261875-05:00","homeassistant":' + b'{"exclude_database":true,"version":"2024.12.0.dev0"},"name":"test",' + b'"protected":true,"slug":"455645fe","type":"partial","version":2}', + AgentBackup( + addons=[], + backup_id="455645fe", + date="2024-12-02T07:23:58.261875-05:00", + database_included=False, + extra_metadata={}, + folders=[], + homeassistant_included=True, + homeassistant_version="2024.12.0.dev0", + name="test", + protected=True, + size=1234, + ), + ), + ( + b'{"slug":"d4b8fdc6","version":2,"name":"Core 2025.1.0.dev0",' + b'"date":"2024-12-20T11:27:51.119062+00:00","type":"partial",' + b'"supervisor_version":"2024.12.1.dev1803",' + b'"extra":{"instance_id":"6b453733d2d74d2a9ae432ff2fbaaa64",' + b'"with_automatic_settings":false},"homeassistant":' + b'{"version":"2025.1.0.dev202412200230","exclude_database":false,"size":0.0},' + b'"compressed":true,"protected":true,"repositories":[' + b'"https://github.com/home-assistant/hassio-addons-development","local",' + b'"https://github.com/esphome/home-assistant-addon","core",' + b'"https://github.com/music-assistant/home-assistant-addon",' + b'"https://github.com/hassio-addons/repository"],"crypto":"aes128",' + b'"folders":["share","media"],"addons":[{"slug":"core_configurator",' + b'"name":"File editor","version":"5.5.0","size":0.0},' + b'{"slug":"ae6e943c_remote_api","name":"Remote API proxy",' + b'"version":"1.3.0","size":0.0}],"docker":{"registries":{}}}', + AgentBackup( + addons=[ + AddonInfo( + name="File editor", + slug="core_configurator", + version="5.5.0", + ), + AddonInfo( + name="Remote API proxy", + slug="ae6e943c_remote_api", + version="1.3.0", + ), + ], + backup_id="d4b8fdc6", + date="2024-12-20T11:27:51.119062+00:00", + database_included=True, + extra_metadata={ + "instance_id": "6b453733d2d74d2a9ae432ff2fbaaa64", + "with_automatic_settings": False, + }, + folders=[Folder.SHARE, Folder.MEDIA], + homeassistant_included=True, + homeassistant_version="2025.1.0.dev202412200230", + name="Core 2025.1.0.dev0", + protected=True, + size=1234, + ), + ), + ], +) +def test_read_backup(backup_json_content: bytes, expected_backup: AgentBackup) -> None: + """Test reading a backup.""" + mock_path = Mock() + mock_path.stat.return_value.st_size = 1234 + + with patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar: + mock_open_tar.return_value.__enter__.return_value.extractfile().read.return_value = backup_json_content + backup = read_backup(mock_path) + assert backup == expected_backup From 870dc4dbeabe55a86d8d73d8e9184f55cdf3bfac Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 13:14:41 +0100 Subject: [PATCH 0910/1198] Record Analytics Insights quality scale (#133571) * Record Analytics Insights quality scale * Record Analytics Insights quality scale * Record Analytics Insights quality scale * Update homeassistant/components/analytics_insights/quality_scale.yaml Co-authored-by: Josef Zweck * Update homeassistant/components/analytics_insights/quality_scale.yaml Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- .../analytics_insights/config_flow.py | 10 +- .../analytics_insights/quality_scale.yaml | 100 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 3 files changed, 103 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/analytics_insights/quality_scale.yaml diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index c36755f5403..da77a35f789 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -11,12 +11,7 @@ from python_homeassistant_analytics import ( from python_homeassistant_analytics.models import IntegrationType import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -25,6 +20,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) +from . import AnalyticsInsightsConfigEntry from .const import ( CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, @@ -46,7 +42,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: AnalyticsInsightsConfigEntry, ) -> HomeassistantAnalyticsOptionsFlowHandler: """Get the options flow for this handler.""" return HomeassistantAnalyticsOptionsFlowHandler() diff --git a/homeassistant/components/analytics_insights/quality_scale.yaml b/homeassistant/components/analytics_insights/quality_scale.yaml new file mode 100644 index 00000000000..ff999d97d03 --- /dev/null +++ b/homeassistant/components/analytics_insights/quality_scale.yaml @@ -0,0 +1,100 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: + status: done + comment: | + The coordinator handles this. + integration-owner: done + log-when-unavailable: + status: done + comment: | + The coordinator handles this. + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration is a cloud service and thus does not support discovery. + discovery: + status: exempt + comment: | + This integration is a cloud service and thus does not support discovery. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single service. + entity-category: done + entity-device-class: + status: exempt + comment: | + This integration does not have entities with device classes. + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: exempt + comment: All the options of this integration are managed via the options flow + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single service. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index e2eedea9a2c..3d880d7b536 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -151,7 +151,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "amcrest", "ampio", "analytics", - "analytics_insights", "android_ip_webcam", "androidtv", "androidtv_remote", From 7d04eef5c5de7c7867378f247005bb0d607cfe54 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:06:55 +0100 Subject: [PATCH 0911/1198] Reject duplicates in WS command backup/config/update (#133650) * Reject duplicates in WS command backup/config/update * Add tests --- homeassistant/components/backup/websocket.py | 10 +- .../backup/snapshots/test_websocket.ambr | 180 ++++++++++++++++++ tests/components/backup/test_websocket.py | 12 ++ 3 files changed, 199 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 2fee84e39bb..718ffc3ae44 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -291,11 +291,15 @@ async def handle_config_info( vol.Required("type"): "backup/config/update", vol.Optional("create_backup"): vol.Schema( { - vol.Optional("agent_ids"): vol.All(list[str]), - vol.Optional("include_addons"): vol.Any(list[str], None), + vol.Optional("agent_ids"): vol.All([str], vol.Unique()), + vol.Optional("include_addons"): vol.Any( + vol.All([str], vol.Unique()), None + ), vol.Optional("include_all_addons"): bool, vol.Optional("include_database"): bool, - vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None), + vol.Optional("include_folders"): vol.Any( + vol.All([vol.Coerce(Folder)], vol.Unique()), None + ), vol.Optional("name"): vol.Any(str, None), vol.Optional("password"): vol.Any(str, None), }, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 1607e2e15d9..dbad733d83a 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1466,6 +1466,186 @@ 'type': 'result', }) # --- +# name: test_config_update_errors[command1] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command1].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command2].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command3].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_delete[remote_agents0-backups0] dict({ 'id': 1, diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index a0860f49149..dfc118577b5 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -992,6 +992,18 @@ async def test_config_update( "create_backup": {"agent_ids": ["test-agent"]}, "schedule": "someday", }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent", "test-agent"]}, + }, + { + "type": "backup/config/update", + "create_backup": {"include_addons": ["my-addon", "my-addon"]}, + }, + { + "type": "backup/config/update", + "create_backup": {"include_folders": ["media", "media"]}, + }, ], ) async def test_config_update_errors( From 0d309aa632e9e7a6de0db603fe8d7f2484e764c9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:14:04 +0100 Subject: [PATCH 0912/1198] Fix inconsistent spelling of "PIN" vs. "pin" (#133656) Part of the strings in the strings.json use an inconsistent spelling of "PIN" as "pin" This commit fixes this to ensure correct and consistent translations, too. --- homeassistant/components/elmax/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/elmax/strings.json b/homeassistant/components/elmax/strings.json index daa502a7dac..2ba74f5fc8f 100644 --- a/homeassistant/components/elmax/strings.json +++ b/homeassistant/components/elmax/strings.json @@ -50,7 +50,7 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "username": "[%key:common::config_flow::data::username%]", - "panel_pin": "Panel Pin" + "panel_pin": "Panel PIN" } } }, @@ -58,7 +58,7 @@ "no_panel_online": "No online Elmax control panel was found.", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "network_error": "A network error occurred", - "invalid_pin": "The provided pin is invalid", + "invalid_pin": "The provided PIN is invalid", "invalid_mode": "Invalid or unsupported mode", "reauth_panel_disappeared": "The given panel is no longer associated to this user. Please log in using an account associated to this panel.", "unknown": "[%key:common::config_flow::error::unknown%]" From 83fdc07df0e2acc53ee8c363a15b6b6638e99616 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:15:16 +0100 Subject: [PATCH 0913/1198] Fix inconsistent spelling of "PIN" vs. "pin" (#133655) As "PIN" is an abbreviation it should be all-caps throughout the UI. This commit fixes two inconsistent occurrences in the invoke_pin action. --- homeassistant/components/openhome/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/openhome/strings.json b/homeassistant/components/openhome/strings.json index b13fb997b7f..a757a2cb31c 100644 --- a/homeassistant/components/openhome/strings.json +++ b/homeassistant/components/openhome/strings.json @@ -2,11 +2,11 @@ "services": { "invoke_pin": { "name": "Invoke PIN", - "description": "Invokes a pin on the specified device.", + "description": "Invokes a PIN on the specified device.", "fields": { "pin": { "name": "PIN", - "description": "Which pin to invoke." + "description": "Which PIN to invoke." } } } From cbb4c06195e141af6d4d10d44e82d3c1cc0c702b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 15:17:08 +0100 Subject: [PATCH 0914/1198] Add Mealie to strict typing (#133644) --- .strict-typing | 1 + homeassistant/components/mealie/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.strict-typing b/.strict-typing index a96597da4c6..07a96a3d692 100644 --- a/.strict-typing +++ b/.strict-typing @@ -311,6 +311,7 @@ homeassistant.components.manual.* homeassistant.components.mastodon.* homeassistant.components.matrix.* homeassistant.components.matter.* +homeassistant.components.mealie.* homeassistant.components.media_extractor.* homeassistant.components.media_player.* homeassistant.components.media_source.* diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index c3b748525ca..9153bf7aadf 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -76,4 +76,4 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index ca7195ef92f..f0d024b6b68 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2866,6 +2866,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.mealie.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.media_extractor.*] check_untyped_defs = true disallow_incomplete_defs = true From 9f43a7a17b883603310a33e38b9002b66f18c029 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:17:47 +0100 Subject: [PATCH 0915/1198] Fix inconsistent spelling of "PIN" and "ID" (#133653) * Fix inconsistent spelling of "PIN" and "ID" Several actions contain an inconsistent spelling of "PIN" and "ID" with lowercase characters. Especially to avoid (automated) mistranslations as (connection) pin etc. this needs to be corrected. * Fix lowercase "blink" as well Additional commit to fix the wrong spelling of "blink" along the way. --- homeassistant/components/blink/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index 6e2384e5d5b..74f8ae1cb28 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -84,16 +84,16 @@ } }, "send_pin": { - "name": "Send pin", - "description": "Sends a new PIN to blink for 2FA.", + "name": "Send PIN", + "description": "Sends a new PIN to Blink for 2FA.", "fields": { "pin": { - "name": "Pin", - "description": "PIN received from blink. Leave empty if you only received a verification email." + "name": "PIN", + "description": "PIN received from Blink. Leave empty if you only received a verification email." }, "config_entry_id": { "name": "Integration ID", - "description": "The Blink Integration id." + "description": "The Blink Integration ID." } } } From 10478f4ca58d57e2c6b3ad0ee1d157cf9f670dd2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:19:57 +0100 Subject: [PATCH 0916/1198] Fix logic in backup retention filter (#133654) --- homeassistant/components/backup/config.py | 2 +- tests/components/backup/test_websocket.py | 29 +++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index e8d740d2e13..cdecf55848f 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -467,7 +467,7 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N sorted( backups.items(), key=lambda backup_item: backup_item[1].date, - )[: len(backups) - manager.config.data.retention.copies] + )[: max(len(backups) - manager.config.data.retention.copies, 0)] ) await _delete_filtered_backups(manager, _backups_filter) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index dfc118577b5..c75fb978082 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1331,6 +1331,35 @@ async def test_config_schedule_logic( 0, [], ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), ( { "type": "backup/config/update", From f49111a4d92bd3c0ecf0b70a868af0254300744d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:23:21 +0100 Subject: [PATCH 0917/1198] Bump aiohasupervisor to version 0.2.2b4 (#133652) * Bump aiohasupervisor to version 0.2.2b4 * Update test --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hassio/test_backup.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index d2cf790219c..7276b76afc0 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b3"], + "requirements": ["aiohasupervisor==0.2.2b4"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index dae92035b11..bd78ef8e3fb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 171ca69dac0..71eae73a859 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b3", + "aiohasupervisor==0.2.2b4", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 9f1615b37f2..78aa370c4ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index e6a28cd6f41..06cf4145b48 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 788f0faff5f..3b9edd9e7c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 9338313c87d..c39574fd941 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -650,7 +650,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - replace(DEFAULT_BACKUP_OPTIONS, addons="all"), + replace(DEFAULT_BACKUP_OPTIONS, addons="ALL"), ), ( {"include_database": False}, From 1c0135880dbc1c38cfc1ed0b5a8e92e1f666d348 Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Fri, 20 Dec 2024 15:33:05 +0100 Subject: [PATCH 0918/1198] Add outlet device class to iotty switch entity (#132912) * upgrade iottycloud lib to 0.3.0 * Add outlet * test outlet turn on and turn off * test add outlet * Refactor code to use only one SwitchEntity with an EntityDescription to distinguish Outlet and Lightswitch * Refactor switch entities to reduce duplicated code * Refactor tests to reduce duplicated code * Refactor code to improve abstraction layer using specific types instead of generics * Remove print and redundant field --- homeassistant/components/iotty/switch.py | 126 +++++++++---- tests/components/iotty/conftest.py | 28 +++ .../iotty/snapshots/test_switch.ambr | 13 ++ tests/components/iotty/test_switch.py | 178 ++++++++++++++---- 4 files changed, 268 insertions(+), 77 deletions(-) diff --git a/homeassistant/components/iotty/switch.py b/homeassistant/components/iotty/switch.py index 1e2bdffcf79..b06e3ea308d 100644 --- a/homeassistant/components/iotty/switch.py +++ b/homeassistant/components/iotty/switch.py @@ -3,13 +3,22 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch -from iottycloud.verbs import LS_DEVICE_TYPE_UID +from iottycloud.outlet import Outlet +from iottycloud.verbs import ( + COMMAND_TURNOFF, + COMMAND_TURNON, + LS_DEVICE_TYPE_UID, + OU_DEVICE_TYPE_UID, +) -from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -20,31 +29,62 @@ from .entity import IottyEntity _LOGGER = logging.getLogger(__name__) +ENTITIES: dict[str, SwitchEntityDescription] = { + LS_DEVICE_TYPE_UID: SwitchEntityDescription( + key="light", + name=None, + device_class=SwitchDeviceClass.SWITCH, + ), + OU_DEVICE_TYPE_UID: SwitchEntityDescription( + key="outlet", + name=None, + device_class=SwitchDeviceClass.OUTLET, + ), +} + async def async_setup_entry( hass: HomeAssistant, config_entry: IottyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Activate the iotty LightSwitch component.""" + """Activate the iotty Switch component.""" _LOGGER.debug("Setup SWITCH entry id is %s", config_entry.entry_id) coordinator = config_entry.runtime_data.coordinator - entities = [ - IottyLightSwitch( - coordinator=coordinator, iotty_cloud=coordinator.iotty, iotty_device=d + lightswitch_entities = [ + IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=d, + entity_description=ENTITIES[LS_DEVICE_TYPE_UID], ) for d in coordinator.data.devices if d.device_type == LS_DEVICE_TYPE_UID if (isinstance(d, LightSwitch)) ] - _LOGGER.debug("Found %d LightSwitches", len(entities)) + _LOGGER.debug("Found %d LightSwitches", len(lightswitch_entities)) + + outlet_entities = [ + IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=d, + entity_description=ENTITIES[OU_DEVICE_TYPE_UID], + ) + for d in coordinator.data.devices + if d.device_type == OU_DEVICE_TYPE_UID + if (isinstance(d, Outlet)) + ] + _LOGGER.debug("Found %d Outlets", len(outlet_entities)) + + entities = lightswitch_entities + outlet_entities async_add_entities(entities) known_devices: set = config_entry.runtime_data.known_devices for known_device in coordinator.data.devices: - if known_device.device_type == LS_DEVICE_TYPE_UID: + if known_device.device_type in {LS_DEVICE_TYPE_UID, OU_DEVICE_TYPE_UID}: known_devices.add(known_device) @callback @@ -59,21 +99,37 @@ async def async_setup_entry( # Add entities for devices which we've not yet seen for device in devices: - if ( - any(d.device_id == device.device_id for d in known_devices) - or device.device_type != LS_DEVICE_TYPE_UID + if any(d.device_id == device.device_id for d in known_devices) or ( + device.device_type not in {LS_DEVICE_TYPE_UID, OU_DEVICE_TYPE_UID} ): continue - iotty_entity = IottyLightSwitch( - coordinator=coordinator, - iotty_cloud=coordinator.iotty, - iotty_device=LightSwitch( + iotty_entity: SwitchEntity + iotty_device: LightSwitch | Outlet + if device.device_type == LS_DEVICE_TYPE_UID: + if TYPE_CHECKING: + assert isinstance(device, LightSwitch) + iotty_device = LightSwitch( device.device_id, device.serial_number, device.device_type, device.device_name, - ), + ) + else: + if TYPE_CHECKING: + assert isinstance(device, Outlet) + iotty_device = Outlet( + device.device_id, + device.serial_number, + device.device_type, + device.device_name, + ) + + iotty_entity = IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=iotty_device, + entity_description=ENTITIES[device.device_type], ) entities.extend([iotty_entity]) @@ -85,24 +141,27 @@ async def async_setup_entry( coordinator.async_add_listener(async_update_data) -class IottyLightSwitch(IottyEntity, SwitchEntity): - """Haas entity class for iotty LightSwitch.""" +class IottySwitch(IottyEntity, SwitchEntity): + """Haas entity class for iotty switch.""" - _attr_device_class = SwitchDeviceClass.SWITCH - _iotty_device: LightSwitch + _attr_device_class: SwitchDeviceClass | None + _iotty_device: LightSwitch | Outlet def __init__( self, coordinator: IottyDataUpdateCoordinator, iotty_cloud: IottyProxy, - iotty_device: LightSwitch, + iotty_device: LightSwitch | Outlet, + entity_description: SwitchEntityDescription, ) -> None: - """Initialize the LightSwitch device.""" + """Initialize the Switch device.""" super().__init__(coordinator, iotty_cloud, iotty_device) + self.entity_description = entity_description + self._attr_device_class = entity_description.device_class @property def is_on(self) -> bool: - """Return true if the LightSwitch is on.""" + """Return true if the Switch is on.""" _LOGGER.debug( "Retrieve device status for %s ? %s", self._iotty_device.device_id, @@ -111,30 +170,25 @@ class IottyLightSwitch(IottyEntity, SwitchEntity): return self._iotty_device.is_on async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the LightSwitch on.""" + """Turn the Switch on.""" _LOGGER.debug("[%s] Turning on", self._iotty_device.device_id) - await self._iotty_cloud.command( - self._iotty_device.device_id, self._iotty_device.cmd_turn_on() - ) + await self._iotty_cloud.command(self._iotty_device.device_id, COMMAND_TURNON) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the LightSwitch off.""" + """Turn the Switch off.""" _LOGGER.debug("[%s] Turning off", self._iotty_device.device_id) - await self._iotty_cloud.command( - self._iotty_device.device_id, self._iotty_device.cmd_turn_off() - ) + await self._iotty_cloud.command(self._iotty_device.device_id, COMMAND_TURNOFF) await self.coordinator.async_request_refresh() @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - device: Device = next( + device: LightSwitch | Outlet = next( # type: ignore[assignment] device for device in self.coordinator.data.devices if device.device_id == self._iotty_device.device_id ) - if isinstance(device, LightSwitch): - self._iotty_device.is_on = device.is_on + self._iotty_device.is_on = device.is_on self.async_write_ha_state() diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py index 1935a069cca..51a23bf18c7 100644 --- a/tests/components/iotty/conftest.py +++ b/tests/components/iotty/conftest.py @@ -6,10 +6,12 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientSession from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch +from iottycloud.outlet import Outlet from iottycloud.shutter import Shutter from iottycloud.verbs import ( LS_DEVICE_TYPE_UID, OPEN_PERCENTAGE, + OU_DEVICE_TYPE_UID, RESULT, SH_DEVICE_TYPE_UID, STATUS, @@ -73,6 +75,22 @@ test_sh_one_added = [ sh_2, ] +ou_0 = Outlet("TestOU", "TEST_SERIAL_OU_0", OU_DEVICE_TYPE_UID, "[TEST] Outlet 0") + +ou_1 = Outlet("TestOU1", "TEST_SERIAL_OU_1", OU_DEVICE_TYPE_UID, "[TEST] Outlet 1") + +ou_2 = Outlet("TestOU2", "TEST_SERIAL_OU_2", OU_DEVICE_TYPE_UID, "[TEST] Outlet 2") + +test_ou = [ou_0, ou_1] + +test_ou_one_removed = [ou_0] + +test_ou_one_added = [ + ou_0, + ou_1, + ou_2, +] + @pytest.fixture async def local_oauth_impl(hass: HomeAssistant): @@ -175,6 +193,16 @@ def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: yield mock_fn +@pytest.fixture +def mock_get_devices_two_outlets() -> Generator[AsyncMock]: + """Mock for get_devices, returning two outlets.""" + + with patch( + "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ou + ) as mock_fn: + yield mock_fn + + @pytest.fixture def mock_get_devices_twoshutters() -> Generator[AsyncMock]: """Mock for get_devices, returning two shutters.""" diff --git a/tests/components/iotty/snapshots/test_switch.ambr b/tests/components/iotty/snapshots/test_switch.ambr index 8ec22ed162a..c6e8764cf37 100644 --- a/tests/components/iotty/snapshots/test_switch.ambr +++ b/tests/components/iotty/snapshots/test_switch.ambr @@ -120,6 +120,19 @@ 'switch.test_light_switch_2_test_serial_2', ]) # --- +# name: test_outlet_insertion_ok + list([ + 'switch.test_outlet_0_test_serial_ou_0', + 'switch.test_outlet_1_test_serial_ou_1', + ]) +# --- +# name: test_outlet_insertion_ok.1 + list([ + 'switch.test_outlet_0_test_serial_ou_0', + 'switch.test_outlet_1_test_serial_ou_1', + 'switch.test_outlet_2_test_serial_ou_2', + ]) +# --- # name: test_setup_entry_ok_nodevices list([ ]) diff --git a/tests/components/iotty/test_switch.py b/tests/components/iotty/test_switch.py index 235a897c305..069fa665cac 100644 --- a/tests/components/iotty/test_switch.py +++ b/tests/components/iotty/test_switch.py @@ -20,12 +20,52 @@ from homeassistant.helpers import ( entity_registry as er, ) -from .conftest import test_ls_one_added, test_ls_one_removed +from .conftest import test_ls_one_added, test_ls_one_removed, test_ou_one_added from tests.common import MockConfigEntry, async_fire_time_changed -async def test_turn_on_ok( +async def check_command_ok( + entity_id: str, + initial_status: str, + final_status: str, + command: str, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_status, + mock_command_fn, +) -> None: + """Issue a command.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == initial_status + + mock_get_status.return_value = {RESULT: {STATUS: final_status}} + + await hass.services.async_call( + SWITCH_DOMAIN, + command, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == final_status + + +async def test_turn_on_light_ok( hass: HomeAssistant, mock_config_entry: MockConfigEntry, local_oauth_impl: ClientSession, @@ -37,34 +77,45 @@ async def test_turn_on_ok( entity_id = "switch.test_light_switch_0_test_serial_0" - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_OFF, + final_status=STATUS_ON, + command=SERVICE_TURN_ON, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled_off, + mock_command_fn=mock_command_fn, ) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF +async def test_turn_on_outlet_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled_off, + mock_command_fn, +) -> None: + """Issue a turnon command.""" - mock_get_status_filled_off.return_value = {RESULT: {STATUS: STATUS_ON}} + entity_id = "switch.test_outlet_0_test_serial_ou_0" - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_OFF, + final_status=STATUS_ON, + command=SERVICE_TURN_ON, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled_off, + mock_command_fn=mock_command_fn, ) - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON - - -async def test_turn_off_ok( +async def test_turn_off_light_ok( hass: HomeAssistant, mock_config_entry: MockConfigEntry, local_oauth_impl: ClientSession, @@ -76,32 +127,43 @@ async def test_turn_off_ok( entity_id = "switch.test_light_switch_0_test_serial_0" - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_ON, + final_status=STATUS_OFF, + command=SERVICE_TURN_OFF, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled, + mock_command_fn=mock_command_fn, ) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON +async def test_turn_off_outlet_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled, + mock_command_fn, +) -> None: + """Issue a turnoff command.""" - mock_get_status_filled.return_value = {RESULT: {STATUS: STATUS_OFF}} + entity_id = "switch.test_outlet_0_test_serial_ou_0" - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_ON, + final_status=STATUS_OFF, + command=SERVICE_TURN_OFF, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled, + mock_command_fn=mock_command_fn, ) - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF - async def test_setup_entry_ok_nodevices( hass: HomeAssistant, @@ -229,6 +291,40 @@ async def test_devices_insertion_ok( assert hass.states.async_entity_ids() == snapshot +async def test_outlet_insertion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch insertion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_two_outlets.return_value = test_ou_one_added + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have three devices + assert hass.states.async_entity_ids_count() == 3 + assert hass.states.async_entity_ids() == snapshot + + async def test_api_not_ok_entities_stay_the_same_as_before( hass: HomeAssistant, mock_config_entry: MockConfigEntry, From 5afb9a5053038cfc02832b55f6e3456e89591037 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:43:46 +0100 Subject: [PATCH 0919/1198] Validate password before restoring backup (#133647) * Validate password before restoring backup * Raise specific error when password is incorrect --- homeassistant/components/backup/manager.py | 12 +++- homeassistant/components/backup/util.py | 37 ++++++++++- homeassistant/components/backup/websocket.py | 26 ++++---- .../backup/snapshots/test_websocket.ambr | 11 ++++ tests/components/backup/test_manager.py | 63 ++++++++++++++++++- tests/components/backup/test_util.py | 48 +++++++++++++- tests/components/backup/test_websocket.py | 40 +++++++++++- 7 files changed, 220 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 4a0b8553f1c..a27c1cc7170 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -48,7 +48,11 @@ from .const import ( ) from .models import AgentBackup, Folder from .store import BackupStore -from .util import make_backup_dir, read_backup +from .util import make_backup_dir, read_backup, validate_password + + +class IncorrectPasswordError(HomeAssistantError): + """Raised when the password is incorrect.""" @dataclass(frozen=True, kw_only=True, slots=True) @@ -1269,6 +1273,12 @@ class CoreBackupReaderWriter(BackupReaderWriter): remove_after_restore = True + password_valid = await self._hass.async_add_executor_job( + validate_password, path, password + ) + if not password_valid: + raise IncorrectPasswordError("The password provided is incorrect.") + def _write_restore_file() -> None: """Write the restore file.""" Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text( diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index 0cedc07443a..930625c52ca 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -9,11 +9,13 @@ import tarfile from typing import cast import aiohttp +from securetar import SecureTarFile +from homeassistant.backup_restore import password_to_key from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonObjectType, json_loads_object -from .const import BUF_SIZE +from .const import BUF_SIZE, LOGGER from .models import AddonInfo, AgentBackup, Folder @@ -71,6 +73,39 @@ def read_backup(backup_path: Path) -> AgentBackup: ) +def validate_password(path: Path, password: str | None) -> bool: + """Validate the password.""" + with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file: + compressed = False + ha_tar_name = "homeassistant.tar" + try: + ha_tar = backup_file.extractfile(ha_tar_name) + except KeyError: + compressed = True + ha_tar_name = "homeassistant.tar.gz" + try: + ha_tar = backup_file.extractfile(ha_tar_name) + except KeyError: + LOGGER.error("No homeassistant.tar or homeassistant.tar.gz found") + return False + try: + with SecureTarFile( + path, # Not used + gzip=compressed, + key=password_to_key(password) if password is not None else None, + mode="r", + fileobj=ha_tar, + ): + # If we can read the tar file, the password is correct + return True + except tarfile.ReadError: + LOGGER.debug("Invalid password") + return False + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected error validating password") + return False + + async def receive_file( hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path ) -> None: diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 718ffc3ae44..0139b7fdb77 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant, callback from .config import ScheduleState from .const import DATA_MANAGER, LOGGER -from .manager import ManagerStateEvent +from .manager import IncorrectPasswordError, ManagerStateEvent from .models import Folder @@ -131,16 +131,20 @@ async def handle_restore( msg: dict[str, Any], ) -> None: """Restore a backup.""" - await hass.data[DATA_MANAGER].async_restore_backup( - msg["backup_id"], - agent_id=msg["agent_id"], - password=msg.get("password"), - restore_addons=msg.get("restore_addons"), - restore_database=msg["restore_database"], - restore_folders=msg.get("restore_folders"), - restore_homeassistant=msg["restore_homeassistant"], - ) - connection.send_result(msg["id"]) + try: + await hass.data[DATA_MANAGER].async_restore_backup( + msg["backup_id"], + agent_id=msg["agent_id"], + password=msg.get("password"), + restore_addons=msg.get("restore_addons"), + restore_database=msg["restore_database"], + restore_folders=msg.get("restore_folders"), + restore_homeassistant=msg["restore_homeassistant"], + ) + except IncorrectPasswordError: + connection.send_error(msg["id"], "password_incorrect", "Incorrect password") + else: + connection.send_result(msg["id"]) @websocket_api.require_admin diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index dbad733d83a..4de06861b67 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -3050,6 +3050,17 @@ # name: test_restore_remote_agent[remote_agents1-backups1].1 1 # --- +# name: test_restore_wrong_password + dict({ + 'error': dict({ + 'code': 'password_incorrect', + 'message': 'Incorrect password', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- # name: test_subscribe_event dict({ 'event': dict({ diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index e976ad0c099..1c45c86149b 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -1120,6 +1120,9 @@ async def test_async_trigger_restore( patch("pathlib.Path.open"), patch("pathlib.Path.write_text") as mocked_write_text, patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch( + "homeassistant.components.backup.manager.validate_password" + ) as validate_password_mock, patch.object(BackupAgentTest, "async_download_backup") as download_mock, ): download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) @@ -1132,19 +1135,72 @@ async def test_async_trigger_restore( restore_folders=None, restore_homeassistant=restore_homeassistant, ) + backup_path = f"{hass.config.path()}/{dir}/abc123.tar" expected_restore_file = json.dumps( { - "path": f"{hass.config.path()}/{dir}/abc123.tar", + "path": backup_path, "password": password, "remove_after_restore": agent_id != LOCAL_AGENT_ID, "restore_database": restore_database, "restore_homeassistant": restore_homeassistant, } ) + validate_password_mock.assert_called_once_with(Path(backup_path), password) assert mocked_write_text.call_args[0][0] == expected_restore_file assert mocked_service_call.called +async def test_async_trigger_restore_wrong_password(hass: HomeAssistant) -> None: + """Test trigger restore.""" + password = "hunter2" + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + hass.data[DATA_MANAGER] = manager + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123])] + ), + spec_set=BackupAgentPlatformProtocol, + ), + ) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch( + "homeassistant.components.backup.manager.validate_password" + ) as validate_password_mock, + ): + validate_password_mock.return_value = False + with pytest.raises( + HomeAssistantError, match="The password provided is incorrect." + ): + await manager.async_restore_backup( + TEST_BACKUP_ABC123.backup_id, + agent_id=LOCAL_AGENT_ID, + password=password, + restore_addons=None, + restore_database=True, + restore_folders=None, + restore_homeassistant=True, + ) + + backup_path = f"{hass.config.path()}/backups/abc123.tar" + validate_password_mock.assert_called_once_with(Path(backup_path), password) + mocked_write_text.assert_not_called() + mocked_service_call.assert_not_called() + + @pytest.mark.parametrize( ("parameters", "expected_error"), [ @@ -1191,6 +1247,11 @@ async def test_async_trigger_restore_wrong_parameters( with ( patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, pytest.raises(HomeAssistantError, match=expected_error), ): await manager.async_restore_backup(**(default_parameters | parameters)) + + mocked_write_text.assert_not_called() + mocked_service_call.assert_not_called() diff --git a/tests/components/backup/test_util.py b/tests/components/backup/test_util.py index 888029f2e35..60cfc77b1aa 100644 --- a/tests/components/backup/test_util.py +++ b/tests/components/backup/test_util.py @@ -2,12 +2,13 @@ from __future__ import annotations +import tarfile from unittest.mock import Mock, patch import pytest from homeassistant.components.backup import AddonInfo, AgentBackup, Folder -from homeassistant.components.backup.util import read_backup +from homeassistant.components.backup.util import read_backup, validate_password @pytest.mark.parametrize( @@ -83,6 +84,49 @@ def test_read_backup(backup_json_content: bytes, expected_backup: AgentBackup) - mock_path.stat.return_value.st_size = 1234 with patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar: - mock_open_tar.return_value.__enter__.return_value.extractfile().read.return_value = backup_json_content + mock_open_tar.return_value.__enter__.return_value.extractfile.return_value.read.return_value = backup_json_content backup = read_backup(mock_path) assert backup == expected_backup + + +@pytest.mark.parametrize("password", [None, "hunter2"]) +def test_validate_password(password: str | None) -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open"), + patch("homeassistant.components.backup.util.SecureTarFile"), + ): + assert validate_password(mock_path, password) is True + + +@pytest.mark.parametrize("password", [None, "hunter2"]) +@pytest.mark.parametrize("secure_tar_side_effect", [tarfile.ReadError, Exception]) +def test_validate_password_wrong_password( + password: str | None, secure_tar_side_effect: Exception +) -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open"), + patch( + "homeassistant.components.backup.util.SecureTarFile", + ) as mock_secure_tar, + ): + mock_secure_tar.return_value.__enter__.side_effect = secure_tar_side_effect + assert validate_password(mock_path, password) is False + + +def test_validate_password_no_homeassistant() -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar, + ): + mock_open_tar.return_value.__enter__.return_value.extractfile.side_effect = ( + KeyError + ) + assert validate_password(mock_path, "hunter2") is False diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index c75fb978082..b407241be54 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -571,6 +571,7 @@ async def test_restore_local_agent( with ( patch("pathlib.Path.exists", return_value=True), patch("pathlib.Path.write_text"), + patch("homeassistant.components.backup.manager.validate_password"), ): await client.send_json_auto_id( { @@ -606,7 +607,11 @@ async def test_restore_remote_agent( client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch("pathlib.Path.write_text"), patch("pathlib.Path.open"): + with ( + patch("pathlib.Path.write_text"), + patch("pathlib.Path.open"), + patch("homeassistant.components.backup.manager.validate_password"), + ): await client.send_json_auto_id( { "type": "backup/restore", @@ -618,6 +623,39 @@ async def test_restore_remote_agent( assert len(restart_calls) == snapshot +async def test_restore_wrong_password( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text"), + patch( + "homeassistant.components.backup.manager.validate_password", + return_value=False, + ), + ): + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "backup.local", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == 0 + + @pytest.mark.parametrize( "access_token_fixture_name", ["hass_access_token", "hass_supervisor_access_token"], From 87f5a7057edb93098030e271b6097c20a775417b Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Fri, 20 Dec 2024 16:00:44 +0100 Subject: [PATCH 0920/1198] Fix target temperature for AtlanticElectricalTowelDryer in Overkiz (#133657) --- .../climate/atlantic_electrical_towel_dryer.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py index 92bd6ceae82..0b5ba3ffcc7 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py @@ -84,12 +84,15 @@ class AtlanticElectricalTowelDryer(OverkizEntity, ClimateEntity): ) @property - def target_temperature(self) -> None: - """Return the temperature.""" - if self.hvac_mode == HVACMode.AUTO: - self.executor.select_state(OverkizState.IO_EFFECTIVE_TEMPERATURE_SETPOINT) - else: - self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE) + def target_temperature(self) -> float | None: + """Return the target temperature.""" + state = ( + OverkizState.IO_EFFECTIVE_TEMPERATURE_SETPOINT + if self.hvac_mode == HVACMode.AUTO + else OverkizState.CORE_TARGET_TEMPERATURE + ) + + return cast(float, self.executor.select_state(state)) @property def current_temperature(self) -> float | None: From ad7a334147213e01e45b5ea15804d75629e3f362 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 16:08:31 +0100 Subject: [PATCH 0921/1198] Add translations to Mealie exceptions (#133648) --- homeassistant/components/mealie/__init__.py | 10 ++++++++-- .../components/mealie/coordinator.py | 20 ++++++++++++------- .../components/mealie/quality_scale.yaml | 2 +- homeassistant/components/mealie/strings.json | 15 ++++++++++++++ 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 443c8fdd991..5e1523b939a 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -52,9 +52,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo about = await client.get_about() version = create_version(about.version) except MealieAuthenticationError as error: - raise ConfigEntryAuthFailed from error + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from error except MealieError as error: - raise ConfigEntryNotReady(error) from error + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="setup_failed", + ) from error if not version.valid: LOGGER.warning( diff --git a/homeassistant/components/mealie/coordinator.py b/homeassistant/components/mealie/coordinator.py index 051586e53c2..7d4f23d706e 100644 --- a/homeassistant/components/mealie/coordinator.py +++ b/homeassistant/components/mealie/coordinator.py @@ -23,7 +23,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import LOGGER +from .const import DOMAIN, LOGGER WEEK = timedelta(days=7) @@ -53,7 +53,7 @@ class MealieDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): super().__init__( hass, LOGGER, - name=self._name, + name=f"Mealie {self._name}", update_interval=self._update_interval, ) self.client = client @@ -63,9 +63,15 @@ class MealieDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): try: return await self._async_update_internal() except MealieAuthenticationError as error: - raise ConfigEntryAuthFailed from error + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from error except MealieConnectionError as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key=f"update_failed_{self._name}", + ) from error @abstractmethod async def _async_update_internal(self) -> _DataT: @@ -77,7 +83,7 @@ class MealieMealplanCoordinator( ): """Class to manage fetching Mealie data.""" - _name = "MealieMealplan" + _name = "mealplan" _update_interval = timedelta(hours=1) async def _async_update_internal(self) -> dict[MealplanEntryType, list[Mealplan]]: @@ -106,7 +112,7 @@ class MealieShoppingListCoordinator( ): """Class to manage fetching Mealie Shopping list data.""" - _name = "MealieShoppingList" + _name = "shopping_list" _update_interval = timedelta(minutes=5) async def _async_update_internal( @@ -130,7 +136,7 @@ class MealieShoppingListCoordinator( class MealieStatisticsCoordinator(MealieDataUpdateCoordinator[Statistics]): """Class to manage fetching Mealie Statistics data.""" - _name = "MealieStatistics" + _name = "statistics" _update_interval = timedelta(minutes=15) async def _async_update_internal( diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index 9153bf7aadf..c72cde3672d 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -61,7 +61,7 @@ rules: comment: | This integration does not have any irrelevant entities. entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index de91c507950..e80db7ab3b0 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -126,6 +126,21 @@ }, "version_error": { "message": "You are running {mealie_version} of Mealie. Minimum required version is {min_version}. Please upgrade Mealie and then retry." + }, + "auth_failed": { + "message": "Authentication failed. Please reauthenticate." + }, + "update_failed_mealplan": { + "message": "Could not fetch mealplan data." + }, + "update_failed_shopping_list": { + "message": "Could not fetch shopping list data." + }, + "update_failed_statistics": { + "message": "Could not fetch statistics data." + }, + "setup_failed": { + "message": "Could not connect to the Mealie instance." } }, "services": { From 92195ff77dafdd4e73dffcf3a5e700e2f83feaf3 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 20 Dec 2024 17:10:37 +0100 Subject: [PATCH 0922/1198] Bump pypck to 0.8.1 (#133646) Co-authored-by: Robert Resch --- homeassistant/components/lcn/__init__.py | 39 ++++++++++++--------- homeassistant/components/lcn/config_flow.py | 5 ++- homeassistant/components/lcn/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lcn/test_config_flow.py | 13 +++++-- tests/components/lcn/test_init.py | 31 +++++++++++----- 7 files changed, 63 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 6dc6fb1ecc4..a10d08ad073 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -6,7 +6,14 @@ from functools import partial import logging import pypck -from pypck.connection import PchkConnectionManager +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionManager, + PchkConnectionRefusedError, + PchkLcnNotConnectedError, + PchkLicenseError, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -20,6 +27,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.typing import ConfigType @@ -81,24 +89,21 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b settings=settings, connection_id=config_entry.entry_id, ) + try: # establish connection to PCHK server await lcn_connection.async_connect(timeout=15) - except pypck.connection.PchkAuthenticationError: - _LOGGER.warning('Authentication on PCHK "%s" failed', config_entry.title) - return False - except pypck.connection.PchkLicenseError: - _LOGGER.warning( - ( - 'Maximum number of connections on PCHK "%s" was ' - "reached. An additional license key is required" - ), - config_entry.title, - ) - return False - except TimeoutError: - _LOGGER.warning('Connection to PCHK "%s" failed', config_entry.title) - return False + except ( + PchkAuthenticationError, + PchkLicenseError, + PchkConnectionRefusedError, + PchkConnectionFailedError, + PchkLcnNotConnectedError, + ) as ex: + await lcn_connection.async_close() + raise ConfigEntryNotReady( + f"Unable to connect to {config_entry.title}: {ex}" + ) from ex _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { @@ -106,6 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b DEVICE_CONNECTIONS: {}, ADD_ENTITIES_CALLBACKS: {}, } + # Update config_entry with LCN device serials await async_update_config_entry(hass, config_entry) @@ -121,6 +127,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b input_received = partial( async_host_input_received, hass, config_entry, device_registry ) + lcn_connection.register_for_inputs(input_received) return True diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index 008265e62ae..a1be32704f7 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -96,7 +96,10 @@ async def validate_connection(data: ConfigType) -> str | None: host_name, ) error = "license_error" - except (TimeoutError, ConnectionRefusedError): + except ( + pypck.connection.PchkConnectionFailedError, + pypck.connection.PchkConnectionRefusedError, + ): _LOGGER.warning('Connection to PCHK "%s" failed', host_name) error = "connection_refused" diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 695a35df871..f5eb1654588 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] + "requirements": ["pypck==0.8.1", "lcn-frontend==0.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 06cf4145b48..62bc0528605 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2174,7 +2174,7 @@ pypalazzetti==0.1.15 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.8.1 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b9edd9e7c1..d4917df83b7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1767,7 +1767,7 @@ pyownet==0.10.0.post1 pypalazzetti==0.1.15 # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.8.1 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index b7967c247ec..478f2c0949e 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -2,7 +2,12 @@ from unittest.mock import patch -from pypck.connection import PchkAuthenticationError, PchkLicenseError +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionRefusedError, + PchkLicenseError, +) import pytest from homeassistant import config_entries, data_entry_flow @@ -98,7 +103,8 @@ async def test_step_user_existing_host( [ (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), + (PchkConnectionFailedError, {CONF_BASE: "connection_refused"}), + (PchkConnectionRefusedError, {CONF_BASE: "connection_refused"}), ], ) async def test_step_user_error( @@ -149,7 +155,8 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> [ (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), + (PchkConnectionFailedError, {CONF_BASE: "connection_refused"}), + (PchkConnectionRefusedError, {CONF_BASE: "connection_refused"}), ], ) async def test_step_reconfigure_error( diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 2327635e356..bffa91d14ef 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -2,7 +2,13 @@ from unittest.mock import Mock, patch -from pypck.connection import PchkAuthenticationError, PchkLicenseError +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionRefusedError, + PchkLcnNotConnectedError, + PchkLicenseError, +) import pytest from homeassistant import config_entries @@ -84,21 +90,30 @@ async def test_async_setup_entry_update( @pytest.mark.parametrize( - "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] + "exception", + [ + PchkAuthenticationError, + PchkLicenseError, + PchkConnectionRefusedError, + PchkConnectionFailedError, + PchkLcnNotConnectedError, + ], ) -async def test_async_setup_entry_raises_authentication_error( +async def test_async_setup_entry_fails( hass: HomeAssistant, entry: MockConfigEntry, exception: Exception ) -> None: - """Test that an authentication error is handled properly.""" - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=exception, + """Test that an error is handled properly.""" + with ( + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=exception, + ), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_RETRY @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) From 233395c18113ad50084ef1ccc0c92ef414d02e29 Mon Sep 17 00:00:00 2001 From: elmurato <1382097+elmurato@users.noreply.github.com> Date: Fri, 20 Dec 2024 17:58:31 +0100 Subject: [PATCH 0923/1198] Add missing await in Minecraft Server (#133670) --- homeassistant/components/minecraft_server/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/minecraft_server/__init__.py b/homeassistant/components/minecraft_server/__init__.py index 8f016e2de00..f937c304471 100644 --- a/homeassistant/components/minecraft_server/__init__.py +++ b/homeassistant/components/minecraft_server/__init__.py @@ -43,7 +43,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Minecraft Server from a config entry.""" # Workaround to avoid blocking imports from dnspython (https://github.com/rthalley/dnspython/issues/1083) - hass.async_add_executor_job(load_dnspython_rdata_classes) + await hass.async_add_executor_job(load_dnspython_rdata_classes) # Create API instance. api = MinecraftServer( From 6ed345f7732889d5a5d4f5dde246c3069592374d Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 20 Dec 2024 17:20:24 +0000 Subject: [PATCH 0924/1198] Add check for client errors to stream component (#132866) --- homeassistant/components/stream/__init__.py | 111 ++++++++++++++++++++ tests/components/stream/test_init.py | 80 +++++++++++++- 2 files changed, 190 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/stream/__init__.py b/homeassistant/components/stream/__init__.py index 64c520150c2..1471db890d7 100644 --- a/homeassistant/components/stream/__init__.py +++ b/homeassistant/components/stream/__init__.py @@ -20,6 +20,7 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Mapping import copy +from enum import IntEnum import logging import secrets import threading @@ -45,6 +46,7 @@ from .const import ( CONF_EXTRA_PART_WAIT_TIME, CONF_LL_HLS, CONF_PART_DURATION, + CONF_PREFER_TCP, CONF_RTSP_TRANSPORT, CONF_SEGMENT_DURATION, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, @@ -74,6 +76,8 @@ from .diagnostics import Diagnostics from .hls import HlsStreamOutput, async_setup_hls if TYPE_CHECKING: + from av.container import InputContainer, OutputContainer + from homeassistant.components.camera import DynamicStreamSettings __all__ = [ @@ -95,6 +99,113 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) +class StreamClientError(IntEnum): + """Enum for stream client errors.""" + + BadRequest = 400 + Unauthorized = 401 + Forbidden = 403 + NotFound = 404 + Other = 4 + + +class StreamOpenClientError(HomeAssistantError): + """Raised when client error received when trying to open a stream. + + :param stream_client_error: The type of client error + """ + + def __init__( + self, *args: Any, stream_client_error: StreamClientError, **kwargs: Any + ) -> None: + self.stream_client_error = stream_client_error + super().__init__(*args, **kwargs) + + +async def _async_try_open_stream( + hass: HomeAssistant, source: str, pyav_options: dict[str, str] | None = None +) -> InputContainer | OutputContainer: + """Try to open a stream. + + Will raise StreamOpenClientError if an http client error is encountered. + """ + return await hass.loop.run_in_executor(None, _try_open_stream, source, pyav_options) + + +def _try_open_stream( + source: str, pyav_options: dict[str, str] | None = None +) -> InputContainer | OutputContainer: + """Try to open a stream. + + Will raise StreamOpenClientError if an http client error is encountered. + """ + import av # pylint: disable=import-outside-toplevel + + if pyav_options is None: + pyav_options = {} + + default_pyav_options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + } + + pyav_options = { + **default_pyav_options, + **pyav_options, + } + + try: + container = av.open(source, options=pyav_options, timeout=5) + + except av.HTTPBadRequestError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.BadRequest + ) from ex + + except av.HTTPUnauthorizedError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.Unauthorized + ) from ex + + except av.HTTPForbiddenError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.Forbidden + ) from ex + + except av.HTTPNotFoundError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.NotFound + ) from ex + + except av.HTTPOtherClientError as ex: + raise StreamOpenClientError(stream_client_error=StreamClientError.Other) from ex + + else: + return container + + +async def async_check_stream_client_error( + hass: HomeAssistant, source: str, pyav_options: dict[str, str] | None = None +) -> None: + """Check if a stream can be successfully opened. + + Raise StreamOpenClientError if an http client error is encountered. + """ + await hass.loop.run_in_executor( + None, _check_stream_client_error, source, pyav_options + ) + + +def _check_stream_client_error( + source: str, pyav_options: dict[str, str] | None = None +) -> None: + """Check if a stream can be successfully opened. + + Raise StreamOpenClientError if an http client error is encountered. + """ + _try_open_stream(source, pyav_options).close() + + def redact_credentials(url: str) -> str: """Redact credentials from string data.""" yurl = URL(url) diff --git a/tests/components/stream/test_init.py b/tests/components/stream/test_init.py index 1ae6f9e8931..5f9d305620d 100644 --- a/tests/components/stream/test_init.py +++ b/tests/components/stream/test_init.py @@ -1,11 +1,20 @@ """Test stream init.""" import logging +from unittest.mock import MagicMock, patch import av import pytest -from homeassistant.components.stream import __name__ as stream_name +from homeassistant.components.stream import ( + CONF_PREFER_TCP, + SOURCE_TIMEOUT, + StreamClientError, + StreamOpenClientError, + __name__ as stream_name, + _async_try_open_stream, + async_check_stream_client_error, +) from homeassistant.const import EVENT_LOGGING_CHANGED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -53,3 +62,72 @@ async def test_log_levels( assert "SHOULD PASS" in caplog.text assert "SHOULD NOT PASS" not in caplog.text + + +async def test_check_open_stream_params(hass: HomeAssistant) -> None: + """Test check open stream params.""" + + container_mock = MagicMock() + source = "rtsp://foobar" + + with patch("av.open", return_value=container_mock) as open_mock: + await async_check_stream_client_error(hass, source) + + options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + } + open_mock.assert_called_once_with(source, options=options, timeout=5) + container_mock.close.assert_called_once() + + container_mock.reset_mock() + with patch("av.open", return_value=container_mock) as open_mock: + await async_check_stream_client_error(hass, source, {"foo": "bar"}) + + options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + "foo": "bar", + } + open_mock.assert_called_once_with(source, options=options, timeout=5) + container_mock.close.assert_called_once() + + +@pytest.mark.parametrize( + ("error", "enum_result"), + [ + pytest.param( + av.HTTPBadRequestError(400, ""), + StreamClientError.BadRequest, + id="BadRequest", + ), + pytest.param( + av.HTTPUnauthorizedError(401, ""), + StreamClientError.Unauthorized, + id="Unauthorized", + ), + pytest.param( + av.HTTPForbiddenError(403, ""), StreamClientError.Forbidden, id="Forbidden" + ), + pytest.param( + av.HTTPNotFoundError(404, ""), StreamClientError.NotFound, id="NotFound" + ), + pytest.param( + av.HTTPOtherClientError(408, ""), StreamClientError.Other, id="Other" + ), + ], +) +async def test_try_open_stream_error( + hass: HomeAssistant, error: av.HTTPClientError, enum_result: StreamClientError +) -> None: + """Test trying to open a stream.""" + oc_error: StreamOpenClientError | None = None + + with patch("av.open", side_effect=error): + try: + await _async_try_open_stream(hass, "rtsp://foobar") + except StreamOpenClientError as ex: + oc_error = ex + + assert oc_error + assert oc_error.stream_client_error is enum_result From 17f0c2489534ef9bc42513a1d90c921405740deb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Fri, 20 Dec 2024 17:24:57 +0000 Subject: [PATCH 0925/1198] Replace tests for Idasen Desk with parameterized test (#133672) --- .../components/idasen_desk/quality_scale.yaml | 1 - .../idasen_desk/test_config_flow.py | 134 ++---------------- 2 files changed, 11 insertions(+), 124 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 4af2f489bd3..f91fd16176d 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -14,7 +14,6 @@ rules: status: todo comment: | - use mock_desk_api - - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/idasen_desk/test_config_flow.py b/tests/components/idasen_desk/test_config_flow.py index c27cdea58aa..be729545b88 100644 --- a/tests/components/idasen_desk/test_config_flow.py +++ b/tests/components/idasen_desk/test_config_flow.py @@ -89,9 +89,17 @@ async def test_user_step_no_new_devices_found(hass: HomeAssistant) -> None: assert result["reason"] == "no_devices_found" -@pytest.mark.parametrize("exception", [TimeoutError(), BleakError()]) +@pytest.mark.parametrize( + ("exception", "expected_error"), + [ + (TimeoutError, "cannot_connect"), + (BleakError, "cannot_connect"), + (AuthFailedError, "auth_failed"), + (RuntimeError, "unknown"), + ], +) async def test_user_step_cannot_connect( - hass: HomeAssistant, exception: Exception + hass: HomeAssistant, exception: Exception, expected_error: str ) -> None: """Test user step with a cannot connect error.""" with patch( @@ -122,7 +130,7 @@ async def test_user_step_cannot_connect( assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "cannot_connect"} + assert result2["errors"] == {"base": expected_error} with ( patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), @@ -149,126 +157,6 @@ async def test_user_step_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_step_auth_failed(hass: HomeAssistant) -> None: - """Test user step with an auth failed error.""" - with patch( - "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", - return_value=[IDASEN_DISCOVERY_INFO], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=AuthFailedError, - ), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "auth_failed"} - - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == IDASEN_DISCOVERY_INFO.name - assert result3["data"] == { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - } - assert result3["result"].unique_id == IDASEN_DISCOVERY_INFO.address - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_user_step_unknown_exception(hass: HomeAssistant) -> None: - """Test user step with an unknown exception.""" - with patch( - "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", - return_value=[NOT_IDASEN_DISCOVERY_INFO, IDASEN_DISCOVERY_INFO], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=RuntimeError, - ), - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.disconnect", - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "unknown"} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - ), - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.disconnect", - ), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == IDASEN_DISCOVERY_INFO.name - assert result3["data"] == { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - } - assert result3["result"].unique_id == IDASEN_DISCOVERY_INFO.address - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_bluetooth_step_success(hass: HomeAssistant) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( From a23b37114e25d87892f2ccd3b366701dacb548b1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 18:41:14 +0100 Subject: [PATCH 0926/1198] Improve recorder data migrator tests (#133628) --- .../recorder/test_migration_from_schema_32.py | 93 ++++++++++++++++--- 1 file changed, 80 insertions(+), 13 deletions(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index e42cd22e952..3cc654c0fa1 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -13,6 +13,7 @@ import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session +from sqlalchemy.schema import Index from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -120,9 +121,11 @@ def db_schema_32(): @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.parametrize("indices_to_drop", [[], [("events", "ix_events_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_events_context_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" importlib.import_module(SCHEMA_MODULE_32) @@ -237,6 +240,13 @@ async def test_migrate_events_context_ids( ] await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -266,7 +276,13 @@ async def test_migrate_events_context_ids( # Run again with new schema, let migration run async with async_test_home_assistant() as hass: - with freeze_time(now), instrument_migration(hass) as instrumented_migration: + with ( + freeze_time(now), + instrument_migration(hass) as instrumented_migration, + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, + ): async with async_test_recorder( hass, wait_recorder=False, wait_recorder_setup=False ) as instance: @@ -297,6 +313,10 @@ async def test_migrate_events_context_ids( await hass.async_stop() await hass.async_block_till_done() + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] + old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None assert old_uuid_context_id_event["context_user_id"] is None @@ -482,9 +502,11 @@ async def test_finish_migrate_events_context_ids( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) +@pytest.mark.parametrize("indices_to_drop", [[], [("states", "ix_states_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_states_context_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" importlib.import_module(SCHEMA_MODULE_32) @@ -577,6 +599,13 @@ async def test_migrate_states_context_ids( await async_wait_recording_done(hass) await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -606,7 +635,12 @@ async def test_migrate_states_context_ids( # Run again with new schema, let migration run async with async_test_home_assistant() as hass: - with instrument_migration(hass) as instrumented_migration: + with ( + instrument_migration(hass) as instrumented_migration, + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, + ): async with async_test_recorder( hass, wait_recorder=False, wait_recorder_setup=False ) as instance: @@ -637,6 +671,10 @@ async def test_migrate_states_context_ids( await hass.async_stop() await hass.async_block_till_done() + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] + old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None assert old_uuid_context_id["context_user_id"] is None @@ -1049,9 +1087,13 @@ async def test_migrate_entity_ids( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.parametrize( + "indices_to_drop", [[], [("states", "ix_states_entity_id_last_updated_ts")]] +) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_post_migrate_entity_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" importlib.import_module(SCHEMA_MODULE_32) @@ -1096,6 +1138,13 @@ async def test_post_migrate_entity_ids( await async_wait_recording_done(hass) await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -1109,20 +1158,38 @@ async def test_post_migrate_entity_ids( return {state.state: state.entity_id for state in states} # Run again with new schema, let migration run - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + with patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create: + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_state = await instance.async_add_executor_job( + _fetch_migrated_states + ) - await hass.async_stop() - await hass.async_block_till_done() + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name( + session, "states", "ix_states_entity_id_last_updated_ts" + ) + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None From c780933fa06db399c4b8ecfa22359cb44af9c2e9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 19:12:48 +0100 Subject: [PATCH 0927/1198] Reword invoke_pin action to avoid misunderstanding with "PIN" (#133665) * Reword invoke_pin action to avoid misunderstanding with "PIN" The previous mismatch between "PIN" and "pin" in the invoke_pin caused wrong translations as "PIN" was interpreted as the abbreviation for "Personal Identification Number". This commit fixes this by explaining "pin" as related to "pinning" content on the device. In addition the very "invoke" is replaced by "play" which every user and translator will understand immediately. Along with those changes this commit reverts my previous change to "PIN" in all strings that made things worse. * Use "Pin ID" for the field variable --- homeassistant/components/openhome/strings.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/openhome/strings.json b/homeassistant/components/openhome/strings.json index a757a2cb31c..f4b15e52e7c 100644 --- a/homeassistant/components/openhome/strings.json +++ b/homeassistant/components/openhome/strings.json @@ -1,12 +1,12 @@ { "services": { "invoke_pin": { - "name": "Invoke PIN", - "description": "Invokes a PIN on the specified device.", + "name": "Play pin", + "description": "Starts playing content pinned on the specified device.", "fields": { "pin": { - "name": "PIN", - "description": "Which PIN to invoke." + "name": "Pin ID", + "description": "ID of the pinned content." } } } From 2639bdbefdbb264e36ddca09c4d4915587a529d2 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 20:21:37 +0100 Subject: [PATCH 0928/1198] Add parallel updates to Mealie (#133660) --- homeassistant/components/mealie/calendar.py | 2 ++ homeassistant/components/mealie/quality_scale.yaml | 2 +- homeassistant/components/mealie/sensor.py | 2 ++ homeassistant/components/mealie/todo.py | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mealie/calendar.py b/homeassistant/components/mealie/calendar.py index 4c11c639c79..729bc16c6fd 100644 --- a/homeassistant/components/mealie/calendar.py +++ b/homeassistant/components/mealie/calendar.py @@ -13,6 +13,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import MealieConfigEntry, MealieMealplanCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index c72cde3672d..6a77152f615 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -33,7 +33,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: status: todo diff --git a/homeassistant/components/mealie/sensor.py b/homeassistant/components/mealie/sensor.py index 141a28ecdab..e4b1655a9d1 100644 --- a/homeassistant/components/mealie/sensor.py +++ b/homeassistant/components/mealie/sensor.py @@ -17,6 +17,8 @@ from homeassistant.helpers.typing import StateType from .coordinator import MealieConfigEntry, MealieStatisticsCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class MealieStatisticsSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/mealie/todo.py b/homeassistant/components/mealie/todo.py index 508b6aeb5e2..121e0bcbf10 100644 --- a/homeassistant/components/mealie/todo.py +++ b/homeassistant/components/mealie/todo.py @@ -20,6 +20,7 @@ from .const import DOMAIN from .coordinator import MealieConfigEntry, MealieShoppingListCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 TODO_STATUS_MAP = { False: TodoItemStatus.NEEDS_ACTION, True: TodoItemStatus.COMPLETED, From 8607ba884cda54c3426b1caa5051287e53de7a0e Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 20 Dec 2024 13:23:12 -0600 Subject: [PATCH 0929/1198] Bump intents to 2024.12.20 (#133676) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 3 +++ tests/components/conversation/test_default_agent.py | 8 ++++++++ 7 files changed, 16 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 41c9a2d2691..a2ddd5f734c 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bd78ef8e3fb..9473871efdd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.8 -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 62bc0528605..d644c4388c4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1137,7 +1137,7 @@ holidays==0.63 home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d4917df83b7..a4a6f6d16c9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -966,7 +966,7 @@ holidays==0.63 home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 369beb538ed..bd2c9d328ac 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 9cebfd9abd1..ce3247fbbad 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -24,6 +24,7 @@ 'fr', 'gl', 'he', + 'hi', 'hr', 'hu', 'id', @@ -35,6 +36,7 @@ 'lt', 'lv', 'ml', + 'mn', 'ms', 'nb', 'nl', @@ -47,6 +49,7 @@ 'sl', 'sr', 'sv', + 'sw', 'te', 'th', 'tr', diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 8df1647d18c..7e05476a349 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -3056,6 +3056,14 @@ async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: ("language", "light_name", "on_sentence", "off_sentence"), [ ("en", "test light", "turn on test light", "turn off test light"), + ("de", "Testlicht", "Schalte Testlicht ein", "Schalte Testlicht aus"), + ( + "fr", + "lumière de test", + "Allumer la lumière de test", + "Éteindre la lumière de test", + ), + ("nl", "testlicht", "Zet testlicht aan", "Zet testlicht uit"), ("zh-cn", "卧室灯", "打开卧室灯", "关闭卧室灯"), ("zh-hk", "睡房燈", "打開睡房燈", "關閉睡房燈"), ("zh-tw", "臥室檯燈", "打開臥室檯燈", "關臥室檯燈"), From b29be34f55145fcfd900ff5c1b5915f361b6a92a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 20 Dec 2024 21:21:41 +0100 Subject: [PATCH 0930/1198] Allow Filter title to be translated (#128929) --- homeassistant/components/filter/strings.json | 1 + homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/filter/strings.json b/homeassistant/components/filter/strings.json index 461eed9aefa..2a83a05bb96 100644 --- a/homeassistant/components/filter/strings.json +++ b/homeassistant/components/filter/strings.json @@ -1,4 +1,5 @@ { + "title": "Filter", "services": { "reload": { "name": "[%key:common::action::reload%]", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 48fedd9c127..f037b8d7ce6 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -7368,7 +7368,6 @@ "iot_class": "calculated" }, "filter": { - "name": "Filter", "integration_type": "helper", "config_flow": false, "iot_class": "local_push" @@ -7499,6 +7498,7 @@ "emulated_roku", "energenie_power_sockets", "filesize", + "filter", "garages_amsterdam", "generic", "generic_hygrostat", From 1e420f16f7ea141487b190f588aba5fdc0e20d2f Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Fri, 20 Dec 2024 16:01:56 -0500 Subject: [PATCH 0931/1198] Update Roborock to 2.8.4 (#133680) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 69d867aa164..bb89ecedbe3 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.8.1", + "python-roborock==2.8.4", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index d644c4388c4..75357a7d22e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2415,7 +2415,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.8.1 +python-roborock==2.8.4 # homeassistant.components.smarttub python-smarttub==0.0.38 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a4a6f6d16c9..88b30377514 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1945,7 +1945,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.8.1 +python-roborock==2.8.4 # homeassistant.components.smarttub python-smarttub==0.0.38 From 9a0035e09012c5944f2eb72fcd3c24edd104a800 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 23:45:54 +0100 Subject: [PATCH 0932/1198] Fix Mealie test coverage (#133659) --- .../components/mealie/quality_scale.yaml | 4 +- homeassistant/components/mealie/todo.py | 32 +-- tests/components/mealie/test_calendar.py | 20 +- tests/components/mealie/test_todo.py | 252 +++++++++++++++--- 4 files changed, 241 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index 6a77152f615..738c5b99d91 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -35,9 +35,7 @@ rules: log-when-unavailable: done parallel-updates: done reauthentication-flow: done - test-coverage: - status: todo - comment: Platform missing tests + test-coverage: done # Gold devices: done diagnostics: done diff --git a/homeassistant/components/mealie/todo.py b/homeassistant/components/mealie/todo.py index 121e0bcbf10..be04b00113e 100644 --- a/homeassistant/components/mealie/todo.py +++ b/homeassistant/components/mealie/todo.py @@ -148,29 +148,19 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): """Update an item on the list.""" list_items = self.shopping_items - for items in list_items: - if items.item_id == item.uid: - position = items.position - break - list_item: ShoppingItem | None = next( (x for x in list_items if x.item_id == item.uid), None ) + assert list_item is not None + position = list_item.position - if not list_item: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="item_not_found_error", - translation_placeholders={"shopping_list_item": item.uid or ""}, - ) - - udpdate_shopping_item = MutateShoppingItem( + update_shopping_item = MutateShoppingItem( item_id=list_item.item_id, list_id=list_item.list_id, note=list_item.note, display=list_item.display, checked=item.status == TodoItemStatus.COMPLETED, - position=list_item.position, + position=position, is_food=list_item.is_food, disable_amount=list_item.disable_amount, quantity=list_item.quantity, @@ -182,16 +172,16 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): stripped_item_summary = item.summary.strip() if item.summary else item.summary if list_item.display.strip() != stripped_item_summary: - udpdate_shopping_item.note = stripped_item_summary - udpdate_shopping_item.position = position - udpdate_shopping_item.is_food = False - udpdate_shopping_item.food_id = None - udpdate_shopping_item.quantity = 0.0 - udpdate_shopping_item.checked = item.status == TodoItemStatus.COMPLETED + update_shopping_item.note = stripped_item_summary + update_shopping_item.position = position + update_shopping_item.is_food = False + update_shopping_item.food_id = None + update_shopping_item.quantity = 0.0 + update_shopping_item.checked = item.status == TodoItemStatus.COMPLETED try: await self.coordinator.client.update_shopping_item( - list_item.item_id, udpdate_shopping_item + list_item.item_id, update_shopping_item ) except MealieError as exception: raise HomeAssistantError( diff --git a/tests/components/mealie/test_calendar.py b/tests/components/mealie/test_calendar.py index d11fe5d2354..cca4fcca673 100644 --- a/tests/components/mealie/test_calendar.py +++ b/tests/components/mealie/test_calendar.py @@ -4,9 +4,10 @@ from datetime import date from http import HTTPStatus from unittest.mock import AsyncMock, patch +from aiomealie import MealplanResponse from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -40,13 +41,28 @@ async def test_entities( mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test the API returns the calendar.""" + """Test the calendar entities.""" with patch("homeassistant.components.mealie.PLATFORMS", [Platform.CALENDAR]): await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_no_meal_planned( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the calendar handles no meal planned.""" + mock_mealie_client.get_mealplans.return_value = MealplanResponse([]) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("calendar.mealie_dinner").state == STATE_OFF + + async def test_api_events( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py index 920cfc47397..e7942887099 100644 --- a/tests/components/mealie/test_todo.py +++ b/tests/components/mealie/test_todo.py @@ -1,9 +1,9 @@ """Tests for the Mealie todo.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, call, patch -from aiomealie import MealieError, ShoppingListsResponse +from aiomealie import MealieError, MutateShoppingItem, ShoppingListsResponse from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -18,7 +18,7 @@ from homeassistant.components.todo import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -29,6 +29,7 @@ from tests.common import ( load_fixture, snapshot_platform, ) +from tests.typing import WebSocketGenerator async def test_entities( @@ -45,23 +46,38 @@ async def test_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_add_todo_list_item( +@pytest.mark.parametrize( + ("service", "data", "method"), + [ + (TodoServices.ADD_ITEM, {ATTR_ITEM: "Soda"}, "add_shopping_item"), + ( + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + "update_shopping_item", + ), + (TodoServices.REMOVE_ITEM, {ATTR_ITEM: "aubergine"}, "delete_shopping_item"), + ], +) +async def test_todo_actions( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, + service: str, + data: dict[str, str], + method: str, ) -> None: - """Test for adding a To-do Item.""" + """Test todo actions.""" await setup_integration(hass, mock_config_entry) await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, + service, + data, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) - mock_mealie_client.add_shopping_item.assert_called_once() + getattr(mock_mealie_client, method).assert_called_once() async def test_add_todo_list_item_error( @@ -74,7 +90,9 @@ async def test_add_todo_list_item_error( mock_mealie_client.add_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred adding an item to Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.ADD_ITEM, @@ -84,25 +102,6 @@ async def test_add_todo_list_item_error( ) -async def test_update_todo_list_item( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for updating a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.update_shopping_item.assert_called_once() - - async def test_update_todo_list_item_error( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -113,7 +112,9 @@ async def test_update_todo_list_item_error( mock_mealie_client.update_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred updating an item in Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.UPDATE_ITEM, @@ -123,23 +124,24 @@ async def test_update_todo_list_item_error( ) -async def test_delete_todo_list_item( +async def test_update_non_existent_item( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test for deleting a To-do Item.""" + """Test for updating a non-existent To-do Item.""" await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "aubergine"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.delete_shopping_item.assert_called_once() + with pytest.raises( + ServiceValidationError, match="Unable to find to-do list item: eggplant" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "eggplant", ATTR_RENAME: "Aubergine", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) async def test_delete_todo_list_item_error( @@ -153,7 +155,9 @@ async def test_delete_todo_list_item_error( mock_mealie_client.delete_shopping_item = AsyncMock() mock_mealie_client.delete_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred deleting an item in Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.REMOVE_ITEM, @@ -163,6 +167,172 @@ async def test_delete_todo_list_item_error( ) +async def test_moving_todo_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") + assert resp.get("result") is None + + assert mock_mealie_client.update_shopping_item.call_count == 3 + calls = mock_mealie_client.update_shopping_item.mock_calls + + assert calls[0] == call( + "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + MutateShoppingItem( + item_id="84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="", + display=None, + checked=False, + position=0, + is_food=True, + disable_amount=None, + quantity=1.0, + label_id=None, + food_id="09322430-d24c-4b1a-abb6-22b6ed3a88f5", + unit_id="7bf539d4-fc78-48bc-b48e-c35ccccec34a", + ), + ) + + assert calls[1] == call( + "f45430f7-3edf-45a9-a50f-73bb375090be", + MutateShoppingItem( + item_id="f45430f7-3edf-45a9-a50f-73bb375090be", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="Apples", + display=None, + checked=False, + position=1, + is_food=False, + disable_amount=None, + quantity=2.0, + label_id=None, + food_id=None, + unit_id=None, + ), + ) + + assert calls[2] == call( + "69913b9a-7c75-4935-abec-297cf7483f88", + MutateShoppingItem( + item_id="69913b9a-7c75-4935-abec-297cf7483f88", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="", + display=None, + checked=False, + position=2, + is_food=True, + disable_amount=None, + quantity=0.0, + label_id=None, + food_id="96801494-4e26-4148-849a-8155deb76327", + unit_id=None, + ), + ) + + +async def test_not_moving_todo_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to the same place.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") + assert resp.get("result") is None + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + +async def test_moving_todo_item_invalid_uid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place with invalid UID.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "cheese", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") is False + assert resp.get("result") is None + assert resp["error"]["code"] == "failed" + assert resp["error"]["message"] == "Item cheese not found" + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + +async def test_moving_todo_item_invalid_previous_uid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place with invalid previous UID.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "cheese", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") is False + assert resp.get("result") is None + assert resp["error"]["code"] == "failed" + assert resp["error"]["message"] == "Item cheese not found" + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + async def test_runtime_management( hass: HomeAssistant, mock_mealie_client: AsyncMock, From b6819cbff32033ce87d9560526c4dfb511370f9d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 13:13:21 -1000 Subject: [PATCH 0933/1198] Bump PySwitchbot to 0.55.2 (#133690) changelog: https://github.com/sblibs/pySwitchbot/compare/0.54.0...0.55.2 --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 5a328650aca..5c91a6e20a5 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.54.0"] + "requirements": ["PySwitchbot==0.55.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 75357a7d22e..2cae5d93bf8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.54.0 +PySwitchbot==0.55.2 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 88b30377514..a286cc9a0da 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.54.0 +PySwitchbot==0.55.2 # homeassistant.components.syncthru PySyncThru==0.7.10 From 861d9b334160c865255c43d77787ccfaac43f5cd Mon Sep 17 00:00:00 2001 From: greyeee <62752780+greyeee@users.noreply.github.com> Date: Sat, 21 Dec 2024 07:49:30 +0800 Subject: [PATCH 0934/1198] Add initial support for SwitchBot relay switch (#130863) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Support relay switch * 更新下版本 * add test case * change to async_abort * Upgrade PySwitchbot to 0.53.2 * change unit to volt * upgrade pySwitchbot dependency * bump lib, will be split into a seperate PR after testing is finished * dry * dry * dry * dry * dry * dry * dry * update tests * fixes * fixes * cleanups * fixes * fixes * fixes * bump again --------- Co-authored-by: J. Nick Koston Co-authored-by: Joost Lekkerkerker --- .../components/switchbot/__init__.py | 9 +- .../components/switchbot/config_flow.py | 43 +-- homeassistant/components/switchbot/const.py | 21 +- homeassistant/components/switchbot/sensor.py | 14 + .../components/switchbot/strings.json | 14 +- tests/components/switchbot/__init__.py | 20 ++ .../components/switchbot/test_config_flow.py | 270 ++++++++++++++++-- tests/components/switchbot/test_sensor.py | 50 +++- 8 files changed, 379 insertions(+), 62 deletions(-) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index c2b4b2ad736..522258c2a55 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -24,6 +24,7 @@ from .const import ( CONF_RETRY_COUNT, CONNECTABLE_SUPPORTED_MODEL_TYPES, DEFAULT_RETRY_COUNT, + ENCRYPTED_MODELS, HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL, SupportedModels, ) @@ -61,6 +62,8 @@ PLATFORMS_BY_TYPE = { Platform.SENSOR, ], SupportedModels.HUB2.value: [Platform.SENSOR], + SupportedModels.RELAY_SWITCH_1PM.value: [Platform.SWITCH, Platform.SENSOR], + SupportedModels.RELAY_SWITCH_1.value: [Platform.SWITCH], } CLASS_BY_DEVICE = { SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight, @@ -73,6 +76,8 @@ CLASS_BY_DEVICE = { SupportedModels.LOCK.value: switchbot.SwitchbotLock, SupportedModels.LOCK_PRO.value: switchbot.SwitchbotLock, SupportedModels.BLIND_TILT.value: switchbot.SwitchbotBlindTilt, + SupportedModels.RELAY_SWITCH_1PM.value: switchbot.SwitchbotRelaySwitch, + SupportedModels.RELAY_SWITCH_1.value: switchbot.SwitchbotRelaySwitch, } @@ -116,9 +121,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) -> ) cls = CLASS_BY_DEVICE.get(sensor_type, switchbot.SwitchbotDevice) - if cls is switchbot.SwitchbotLock: + if switchbot_model in ENCRYPTED_MODELS: try: - device = switchbot.SwitchbotLock( + device = cls( device=ble_device, key_id=entry.data.get(CONF_KEY_ID), encryption_key=entry.data.get(CONF_ENCRYPTION_KEY), diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index a0e45169770..fc2d9f491ac 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -10,7 +10,7 @@ from switchbot import ( SwitchBotAdvertisement, SwitchbotApiError, SwitchbotAuthenticationError, - SwitchbotLock, + SwitchbotModel, parse_advertisement_data, ) import voluptuous as vol @@ -44,8 +44,9 @@ from .const import ( DEFAULT_LOCK_NIGHTLATCH, DEFAULT_RETRY_COUNT, DOMAIN, + ENCRYPTED_MODELS, + ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS, NON_CONNECTABLE_SUPPORTED_MODEL_TYPES, - SUPPORTED_LOCK_MODELS, SUPPORTED_MODEL_TYPES, SupportedModels, ) @@ -112,8 +113,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): "name": data["modelFriendlyName"], "address": short_address(discovery_info.address), } - if model_name in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if model_name in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if self._discovered_adv.data["isEncrypted"]: return await self.async_step_password() return await self.async_step_confirm() @@ -171,7 +172,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_lock_auth( + async def async_step_encrypted_auth( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the SwitchBot API auth step.""" @@ -179,8 +180,10 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): assert self._discovered_adv is not None description_placeholders = {} if user_input is not None: + model: SwitchbotModel = self._discovered_adv.data["modelName"] + cls = ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS[model] try: - key_details = await SwitchbotLock.async_retrieve_encryption_key( + key_details = await cls.async_retrieve_encryption_key( async_get_clientsession(self.hass), self._discovered_adv.address, user_input[CONF_USERNAME], @@ -198,11 +201,11 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): errors = {"base": "auth_failed"} description_placeholders = {"error_detail": str(ex)} else: - return await self.async_step_lock_key(key_details) + return await self.async_step_encrypted_key(key_details) user_input = user_input or {} return self.async_show_form( - step_id="lock_auth", + step_id="encrypted_auth", errors=errors, data_schema=vol.Schema( { @@ -218,32 +221,34 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_lock_choose_method( + async def async_step_encrypted_choose_method( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the SwitchBot API chose method step.""" assert self._discovered_adv is not None return self.async_show_menu( - step_id="lock_choose_method", - menu_options=["lock_auth", "lock_key"], + step_id="encrypted_choose_method", + menu_options=["encrypted_auth", "encrypted_key"], description_placeholders={ "name": name_from_discovery(self._discovered_adv), }, ) - async def async_step_lock_key( + async def async_step_encrypted_key( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the encryption key step.""" errors = {} assert self._discovered_adv is not None if user_input is not None: - if not await SwitchbotLock.verify_encryption_key( + model: SwitchbotModel = self._discovered_adv.data["modelName"] + cls = ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS[model] + if not await cls.verify_encryption_key( self._discovered_adv.device, user_input[CONF_KEY_ID], user_input[CONF_ENCRYPTION_KEY], - model=self._discovered_adv.data["modelName"], + model=model, ): errors = { "base": "encryption_key_invalid", @@ -252,7 +257,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): return await self._async_create_entry_from_discovery(user_input) return self.async_show_form( - step_id="lock_key", + step_id="encrypted_key", errors=errors, data_schema=vol.Schema( { @@ -309,8 +314,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: device_adv = self._discovered_advs[user_input[CONF_ADDRESS]] await self._async_set_device(device_adv) - if device_adv.data.get("modelName") in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if device_adv.data.get("modelName") in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if device_adv.data["isEncrypted"]: return await self.async_step_password() return await self._async_create_entry_from_discovery(user_input) @@ -321,8 +326,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): # or simply confirm it device_adv = list(self._discovered_advs.values())[0] await self._async_set_device(device_adv) - if device_adv.data.get("modelName") in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if device_adv.data.get("modelName") in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if device_adv.data["isEncrypted"]: return await self.async_step_password() return await self.async_step_confirm() diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index b8cf4e8e1ab..383fd6b03b6 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -2,6 +2,7 @@ from enum import StrEnum +import switchbot from switchbot import SwitchbotModel DOMAIN = "switchbot" @@ -30,6 +31,8 @@ class SupportedModels(StrEnum): LOCK_PRO = "lock_pro" BLIND_TILT = "blind_tilt" HUB2 = "hub2" + RELAY_SWITCH_1PM = "relay_switch_1pm" + RELAY_SWITCH_1 = "relay_switch_1" CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -44,6 +47,8 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.LOCK_PRO: SupportedModels.LOCK_PRO, SwitchbotModel.BLIND_TILT: SupportedModels.BLIND_TILT, SwitchbotModel.HUB2: SupportedModels.HUB2, + SwitchbotModel.RELAY_SWITCH_1PM: SupportedModels.RELAY_SWITCH_1PM, + SwitchbotModel.RELAY_SWITCH_1: SupportedModels.RELAY_SWITCH_1, } NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -59,7 +64,21 @@ SUPPORTED_MODEL_TYPES = ( CONNECTABLE_SUPPORTED_MODEL_TYPES | NON_CONNECTABLE_SUPPORTED_MODEL_TYPES ) -SUPPORTED_LOCK_MODELS = {SwitchbotModel.LOCK, SwitchbotModel.LOCK_PRO} +ENCRYPTED_MODELS = { + SwitchbotModel.RELAY_SWITCH_1, + SwitchbotModel.RELAY_SWITCH_1PM, + SwitchbotModel.LOCK, + SwitchbotModel.LOCK_PRO, +} + +ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[ + SwitchbotModel, switchbot.SwitchbotEncryptedDevice +] = { + SwitchbotModel.LOCK: switchbot.SwitchbotLock, + SwitchbotModel.LOCK_PRO: switchbot.SwitchbotLock, + SwitchbotModel.RELAY_SWITCH_1PM: switchbot.SwitchbotRelaySwitch, + SwitchbotModel.RELAY_SWITCH_1: switchbot.SwitchbotRelaySwitch, +} HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = { str(v): k for k, v in SUPPORTED_MODEL_TYPES.items() diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index fd3de3e31e9..9787521a5e9 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -14,6 +14,8 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfElectricCurrent, + UnitOfElectricPotential, UnitOfPower, UnitOfTemperature, ) @@ -82,6 +84,18 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, ), + "current": SensorEntityDescription( + key="current", + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + ), + "voltage": SensorEntityDescription( + key="voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + ), } diff --git a/homeassistant/components/switchbot/strings.json b/homeassistant/components/switchbot/strings.json index 80ca32d4826..2a5ddaa0cba 100644 --- a/homeassistant/components/switchbot/strings.json +++ b/homeassistant/components/switchbot/strings.json @@ -16,25 +16,25 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "lock_key": { + "encrypted_key": { "description": "The {name} device requires encryption key, details on how to obtain it can be found in the documentation.", "data": { "key_id": "Key ID", "encryption_key": "Encryption key" } }, - "lock_auth": { - "description": "Please provide your SwitchBot app username and password. This data won't be saved and only used to retrieve your locks encryption key. Usernames and passwords are case sensitive.", + "encrypted_auth": { + "description": "Please provide your SwitchBot app username and password. This data won't be saved and only used to retrieve your device's encryption key. Usernames and passwords are case sensitive.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } }, - "lock_choose_method": { - "description": "A SwitchBot lock can be set up in Home Assistant in two different ways.\n\nYou can enter the key id and encryption key yourself, or Home Assistant can import them from your SwitchBot account.", + "encrypted_choose_method": { + "description": "An encrypted SwitchBot device can be set up in Home Assistant in two different ways.\n\nYou can enter the key id and encryption key yourself, or Home Assistant can import them from your SwitchBot account.", "menu_options": { - "lock_auth": "SwitchBot account (recommended)", - "lock_key": "Enter lock encryption key manually" + "encrypted_auth": "SwitchBot account (recommended)", + "encrypted_key": "Enter encryption key manually" } } }, diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index bd3985ff062..c5ecebf21b3 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -230,3 +230,23 @@ WOMETERTHPC_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) + +WORELAY_SWITCH_1PM_SERVICE_INFO = BluetoothServiceInfoBleak( + name="W1080000", + manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"}, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"<\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="W1080000", + manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"}, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"<\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "W1080000"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_config_flow.py b/tests/components/switchbot/test_config_flow.py index b0fba2a5f18..3caa2a1f0df 100644 --- a/tests/components/switchbot/test_config_flow.py +++ b/tests/components/switchbot/test_config_flow.py @@ -30,6 +30,7 @@ from . import ( WOHAND_SERVICE_INFO, WOHAND_SERVICE_INFO_NOT_CONNECTABLE, WOLOCK_SERVICE_INFO, + WORELAY_SWITCH_1PM_SERVICE_INFO, WOSENSORTH_SERVICE_INFO, init_integration, patch_async_setup_entry, @@ -95,7 +96,7 @@ async def test_bluetooth_discovery_requires_password(hass: HomeAssistant) -> Non assert len(mock_setup_entry.mock_calls) == 1 -async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: +async def test_bluetooth_discovery_encrypted_key(hass: HomeAssistant) -> None: """Test discovery via bluetooth with a lock.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -103,18 +104,18 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: data=WOLOCK_SERVICE_INFO, ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=False, ): result = await hass.config_entries.flow.async_configure( @@ -127,13 +128,13 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {"base": "encryption_key_invalid"} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -158,6 +159,51 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +async def test_bluetooth_discovery_key(hass: HomeAssistant) -> None: + """Test discovery via bluetooth with a encrypted device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=WORELAY_SWITCH_1PM_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_key"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_key" + assert result["errors"] == {} + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_bluetooth_discovery_already_setup(hass: HomeAssistant) -> None: """Test discovery via bluetooth with a valid device when already setup.""" entry = MockConfigEntry( @@ -400,7 +446,7 @@ async def test_user_setup_single_bot_with_password(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_key(hass: HomeAssistant) -> None: """Test the user initiated form for a lock.""" with patch( @@ -411,18 +457,18 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=False, ): result = await hass.config_entries.flow.async_configure( @@ -435,13 +481,13 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {"base": "encryption_key_invalid"} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -466,7 +512,7 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_auth(hass: HomeAssistant) -> None: """Test the user initiated form for a lock.""" with patch( @@ -477,18 +523,18 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_auth"} + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", side_effect=SwitchbotAuthenticationError("error from api"), ): result = await hass.config_entries.flow.async_configure( @@ -500,18 +546,18 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {"base": "auth_failed"} assert "error from api" in result["description_placeholders"]["error_detail"] with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", return_value={ CONF_KEY_ID: "ff", CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", @@ -539,7 +585,9 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_auth_switchbot_api_down(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_auth_switchbot_api_down( + hass: HomeAssistant, +) -> None: """Test the user initiated form for a lock when the switchbot api is down.""" with patch( @@ -550,18 +598,18 @@ async def test_user_setup_wolock_auth_switchbot_api_down(hass: HomeAssistant) -> DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_auth"} + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", side_effect=SwitchbotAccountConnectionError("Switchbot API down"), ): result = await hass.config_entries.flow.async_configure( @@ -600,20 +648,20 @@ async def test_user_setup_wolock_or_bot(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -845,3 +893,163 @@ async def test_options_flow_lock_pro(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 0 assert entry.options[CONF_LOCK_NIGHTLATCH] is True + + +async def test_user_setup_worelay_switch_1pm_key(hass: HomeAssistant) -> None: + """Test the user initiated form for a relay switch 1pm.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_key"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_key" + assert result["errors"] == {} + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_setup_worelay_switch_1pm_auth(hass: HomeAssistant) -> None: + """Test the user initiated form for a relay switch 1pm.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {} + + with patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + side_effect=SwitchbotAuthenticationError("error from api"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {"base": "auth_failed"} + assert "error from api" in result["description_placeholders"]["error_detail"] + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + return_value={ + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ), + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "username", + CONF_PASSWORD: "password", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_setup_worelay_switch_1pm_auth_switchbot_api_down( + hass: HomeAssistant, +) -> None: + """Test the user initiated form for a relay switch 1pm when the switchbot api is down.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {} + + with patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + side_effect=SwitchbotAccountConnectionError("Switchbot API down"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "api_error" + assert result["description_placeholders"] == {"error_detail": "Switchbot API down"} diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 3adeaef936c..205bb739508 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -1,9 +1,15 @@ """Test the switchbot sensors.""" +from unittest.mock import patch + import pytest from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.components.switchbot.const import DOMAIN +from homeassistant.components.switchbot.const import ( + CONF_ENCRYPTION_KEY, + CONF_KEY_ID, + DOMAIN, +) from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, @@ -15,7 +21,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO +from . import ( + WOHAND_SERVICE_INFO, + WOMETERTHPC_SERVICE_INFO, + WORELAY_SWITCH_1PM_SERVICE_INFO, +) from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info @@ -105,3 +115,39 @@ async def test_co2_sensor(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_relay_switch_1pm_power_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the power sensor.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WORELAY_SWITCH_1PM_SERVICE_INFO) + + with patch( + "switchbot.SwitchbotRelaySwitch.update", + return_value=None, + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "relay_switch_1pm", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + power_sensor = hass.states.get("sensor.test_name_power") + power_sensor_attrs = power_sensor.attributes + assert power_sensor.state == "4.9" + assert power_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Power" + assert power_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "W" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() From e38a85da6451203ba678e31ece6f305755257366 Mon Sep 17 00:00:00 2001 From: Kevin Worrel <37058192+dieselrabbit@users.noreply.github.com> Date: Sat, 21 Dec 2024 00:25:21 -0800 Subject: [PATCH 0935/1198] Add entity translation strings for ScreenLogic (#130708) * Add translation strings for entities * Translation key updates * Mach original name * Remove state translations * Sentence case entity names * Fix tests * Add missing translation_key for Air temperature * Revert inadvertant entity_id change on last_dose_time sensors * Update homeassistant/components/screenlogic/strings.json Lowercase 'entry' Co-authored-by: Joost Lekkerkerker * Define translations for each circuit delay sensor --------- Co-authored-by: Joost Lekkerkerker --- .../components/screenlogic/binary_sensor.py | 27 ++++ .../components/screenlogic/climate.py | 2 +- .../components/screenlogic/entity.py | 3 +- .../components/screenlogic/number.py | 6 + .../components/screenlogic/sensor.py | 33 ++++ .../components/screenlogic/strings.json | 143 +++++++++++++++++- tests/components/screenlogic/test_init.py | 2 +- 7 files changed, 207 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/screenlogic/binary_sensor.py b/homeassistant/components/screenlogic/binary_sensor.py index fda1c348edf..4a178c60d81 100644 --- a/homeassistant/components/screenlogic/binary_sensor.py +++ b/homeassistant/components/screenlogic/binary_sensor.py @@ -49,26 +49,31 @@ SUPPORTED_CORE_SENSORS = [ data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.ACTIVE_ALERT, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="active_alert", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.CLEANER_DELAY, + translation_key="cleaner_delay", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.FREEZE_MODE, + translation_key="freeze_mode", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.POOL_DELAY, + translation_key="pool_delay", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.SPA_DELAY, + translation_key="spa_delay", ), ] @@ -85,75 +90,96 @@ SUPPORTED_INTELLICHEM_SENSORS = [ data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.FLOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="flow_alarm", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_HIGH_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_high_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_LOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_low_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_SUPPLY_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_supply_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_HIGH_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_high_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_LOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_low_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_SUPPLY_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_supply_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PROBE_FAULT_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="probe_fault_alarm", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.ORP_LIMIT, + translation_key="chem_limit", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.PH_LIMIT, + translation_key="chem_limit", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.PH_LOCKOUT, + translation_key="ph_lockout", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.WATER_BALANCE), key=VALUE.CORROSIVE, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="corosive", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.WATER_BALANCE), key=VALUE.SCALING, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="scaling", ), ] @@ -161,6 +187,7 @@ SUPPORTED_SCG_SENSORS = [ ScreenLogicBinarySensorDescription( data_root=(DEVICE.SCG, GROUP.SENSOR), key=VALUE.STATE, + translation_key="scg_state", ) ] diff --git a/homeassistant/components/screenlogic/climate.py b/homeassistant/components/screenlogic/climate.py index 08300900f5d..c0cff8d511b 100644 --- a/homeassistant/components/screenlogic/climate.py +++ b/homeassistant/components/screenlogic/climate.py @@ -56,6 +56,7 @@ async def async_setup_entry( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.BODY,), key=body_index, + translation_key=f"body_{body_index}", ), ) for body_index in gateway.get_data(DEVICE.BODY) @@ -97,7 +98,6 @@ class ScreenLogicClimate(ScreenLogicPushEntity, ClimateEntity, RestoreEntity): self._attr_min_temp = self.entity_data[ATTR.MIN_SETPOINT] self._attr_max_temp = self.entity_data[ATTR.MAX_SETPOINT] - self._attr_name = self.entity_data[VALUE.HEAT_STATE][ATTR.NAME] self._last_preset = None @property diff --git a/homeassistant/components/screenlogic/entity.py b/homeassistant/components/screenlogic/entity.py index 0f7530b7289..746abc2fde6 100644 --- a/homeassistant/components/screenlogic/entity.py +++ b/homeassistant/components/screenlogic/entity.py @@ -55,7 +55,8 @@ class ScreenLogicEntity(CoordinatorEntity[ScreenlogicDataUpdateCoordinator]): self._data_path = (*self.entity_description.data_root, self._data_key) mac = self.mac self._attr_unique_id = f"{mac}_{generate_unique_id(*self._data_path)}" - self._attr_name = self.entity_data[ATTR.NAME] + if not entity_description.translation_key: + self._attr_name = self.entity_data[ATTR.NAME] assert mac is not None self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, mac)}, diff --git a/homeassistant/components/screenlogic/number.py b/homeassistant/components/screenlogic/number.py index d0eb6a71ec8..3634147e509 100644 --- a/homeassistant/components/screenlogic/number.py +++ b/homeassistant/components/screenlogic/number.py @@ -57,6 +57,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.CALCIUM_HARDNESS, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="calcium_hardness", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -64,6 +65,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.CYA, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="cya", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -71,6 +73,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.TOTAL_ALKALINITY, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="total_alkalinity", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -78,6 +81,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.SALT_TDS_PPM, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="salt_tds_ppm", ), ] @@ -86,11 +90,13 @@ SUPPORTED_SCG_NUMBERS = [ data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.POOL_SETPOINT, entity_category=EntityCategory.CONFIG, + translation_key="pool_setpoint", ), ScreenLogicNumberDescription( data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.SPA_SETPOINT, entity_category=EntityCategory.CONFIG, + translation_key="spa_setpoint", ), ] diff --git a/homeassistant/components/screenlogic/sensor.py b/homeassistant/components/screenlogic/sensor.py index c580204221f..6ae6e802859 100644 --- a/homeassistant/components/screenlogic/sensor.py +++ b/homeassistant/components/screenlogic/sensor.py @@ -58,6 +58,7 @@ SUPPORTED_CORE_SENSORS = [ key=VALUE.AIR_TEMPERATURE, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + translation_key="air_temperature", ), ] @@ -97,12 +98,16 @@ SUPPORTED_INTELLICHEM_SENSORS = [ data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.ORP_NOW, state_class=SensorStateClass.MEASUREMENT, + translation_key="chem_now", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.PH_NOW, state_class=SensorStateClass.MEASUREMENT, + translation_key="chem_now", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -110,6 +115,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_SUPPLY_LEVEL, state_class=SensorStateClass.MEASUREMENT, value_mod=lambda val: int(val) - 1, + translation_key="chem_supply_level", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -117,6 +124,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_SUPPLY_LEVEL, state_class=SensorStateClass.MEASUREMENT, value_mod=lambda val: int(val) - 1, + translation_key="chem_supply_level", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -124,46 +133,56 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_PROBE_WATER_TEMP, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + translation_key="ph_probe_water_temp", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.SATURATION, state_class=SensorStateClass.MEASUREMENT, + translation_key="saturation", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.CALCIUM_HARDNESS, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="calcium_hardness", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.CYA, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="cya", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.ORP_SETPOINT, + translation_key="chem_setpoint", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.PH_SETPOINT, + translation_key="chem_setpoint", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.TOTAL_ALKALINITY, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="total_alkalinity", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.SALT_TDS_PPM, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="salt_tds_ppm", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -172,6 +191,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ device_class=SensorDeviceClass.ENUM, options=["Dosing", "Mixing", "Monitoring"], value_mod=lambda val: DOSE_STATE(val).title, + translation_key="chem_dose_state", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -179,6 +200,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_LAST_DOSE_TIME, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_time", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -186,6 +209,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_LAST_DOSE_VOLUME, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_volume", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -194,6 +219,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ device_class=SensorDeviceClass.ENUM, options=["Dosing", "Mixing", "Monitoring"], value_mod=lambda val: DOSE_STATE(val).title, + translation_key="chem_dose_state", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -201,6 +228,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_LAST_DOSE_TIME, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_time", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -208,6 +237,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_LAST_DOSE_VOLUME, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_volume", + translation_placeholders={"chem": "pH"}, ), ] @@ -216,10 +247,12 @@ SUPPORTED_SCG_SENSORS = [ data_root=(DEVICE.SCG, GROUP.SENSOR), key=VALUE.SALT_PPM, state_class=SensorStateClass.MEASUREMENT, + translation_key="salt_ppm", ), ScreenLogicSensorDescription( data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.SUPER_CHLOR_TIMER, + translation_key="super_chlor_timer", ), ] diff --git a/homeassistant/components/screenlogic/strings.json b/homeassistant/components/screenlogic/strings.json index 91395a0e86d..da5e3156592 100644 --- a/homeassistant/components/screenlogic/strings.json +++ b/homeassistant/components/screenlogic/strings.json @@ -1,4 +1,11 @@ { + "common": { + "service_config_entry_name": "Config entry", + "service_config_entry_description": "The config entry to use for this action.", + "climate_preset_solar": "Solar", + "climate_preset_solar_prefered": "Solar Prefered", + "climate_preset_heater": "Heater" + }, "config": { "flow_title": "{name}", "error": { @@ -42,8 +49,8 @@ "description": "Sets the color mode for all color-capable lights attached to this ScreenLogic gateway.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" }, "color_mode": { "name": "Color Mode", @@ -56,8 +63,8 @@ "description": "Begins super chlorination, running for the specified period or 24 hours if none is specified.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" }, "runtime": { "name": "Run Time", @@ -70,10 +77,134 @@ "description": "Stops super chlorination.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" } } } + }, + "entity": { + "binary_sensor": { + "active_alert": { + "name": "Active alert" + }, + "pool_delay": { + "name": "Pool delay" + }, + "spa_delay": { + "name": "Spa delay" + }, + "cleaner_delay": { + "name": "Cleaner delay" + }, + "freeze_mode": { + "name": "Freeze mode" + }, + "flow_alarm": { + "name": "Flow alarm" + }, + "chem_high_alarm": { + "name": "{chem} high alarm" + }, + "chem_low_alarm": { + "name": "{chem} low alarm" + }, + "chem_supply_alarm": { + "name": "{chem} supply alarm" + }, + "probe_fault_alarm": { + "name": "Probe fault" + }, + "chem_limit": { + "name": "{chem} dose limit reached" + }, + "ph_lockout": { + "name": "pH lockout" + }, + "corosive": { + "name": "SI corrosive" + }, + "scaling": { + "name": "SI scaling" + }, + "scg_state": { + "name": "Chlorinator" + } + }, + "climate": { + "body_0": { + "name": "Pool heat" + }, + "body_1": { + "name": "Spa heat" + } + }, + "number": { + "calcium_hardness": { + "name": "Calcium hardness" + }, + "cya": { + "name": "Cyanuric acid" + }, + "total_alkalinity": { + "name": "Total alkalinity" + }, + "salt_tds_ppm": { + "name": "Salt/TDS" + }, + "pool_setpoint": { + "name": "Pool chlorinator setpoint" + }, + "spa_setpoint": { + "name": "Spa chlorinator setpoint" + } + }, + "sensor": { + "air_temperature": { + "name": "Air temperature" + }, + "chem_now": { + "name": "{chem} now" + }, + "chem_supply_level": { + "name": "{chem} supply level" + }, + "ph_probe_water_temp": { + "name": "pH probe water temperature" + }, + "saturation": { + "name": "Saturation index" + }, + "chem_setpoint": { + "name": "{chem} setpoint" + }, + "calcium_hardness": { + "name": "[%key:component::screenlogic::entity::number::calcium_hardness::name%]" + }, + "cya": { + "name": "[%key:component::screenlogic::entity::number::cya::name%]" + }, + "total_alkalinity": { + "name": "[%key:component::screenlogic::entity::number::total_alkalinity::name%]" + }, + "salt_tds_ppm": { + "name": "[%key:component::screenlogic::entity::number::salt_tds_ppm::name%]" + }, + "chem_dose_state": { + "name": "{chem} dosing state" + }, + "chem_last_dose_time": { + "name": "{chem} last dose time" + }, + "chem_last_dose_volume": { + "name": "{chem} last dose volume" + }, + "salt_ppm": { + "name": "Chlorinator salt" + }, + "super_chlor_timer": { + "name": "Super chlorination timer" + } + } } } diff --git a/tests/components/screenlogic/test_init.py b/tests/components/screenlogic/test_init.py index f21a1118b4f..b177f860606 100644 --- a/tests/components/screenlogic/test_init.py +++ b/tests/components/screenlogic/test_init.py @@ -43,7 +43,7 @@ TEST_MIGRATING_ENTITIES = [ EntityMigrationData( "Chemistry Alarm", "chem_alarm", - "Active Alert", + "Active alert", "active_alert", BINARY_SENSOR_DOMAIN, ), From 3788e942a7cedca27d4a67830c8aa0b9cc48e043 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sat, 21 Dec 2024 08:25:34 +0000 Subject: [PATCH 0936/1198] Bump Ohme library version to 1.2.0 (#133666) Bump library version --- homeassistant/components/ohme/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index c9e1ccf9ac2..4ab0697bbb7 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["ohme==1.1.1"] + "requirements": ["ohme==1.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2cae5d93bf8..4a05da9d61a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1522,7 +1522,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ohme -ohme==1.1.1 +ohme==1.2.0 # homeassistant.components.ollama ollama==0.3.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a286cc9a0da..032165b6182 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1270,7 +1270,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ohme -ohme==1.1.1 +ohme==1.2.0 # homeassistant.components.ollama ollama==0.3.3 From 9c70ec4150a882f6e23ad53ba8e0664dc48d2f44 Mon Sep 17 00:00:00 2001 From: Florent Thoumie Date: Sat, 21 Dec 2024 00:26:38 -0800 Subject: [PATCH 0937/1198] iaqualink: fix load_verify_locations() blocking call (#133459) * Try to fix blocking call * Fix lint --- homeassistant/components/iaqualink/config_flow.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/iaqualink/config_flow.py b/homeassistant/components/iaqualink/config_flow.py index 2cb1ba4b5d7..a307c1af98d 100644 --- a/homeassistant/components/iaqualink/config_flow.py +++ b/homeassistant/components/iaqualink/config_flow.py @@ -14,6 +14,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.httpx_client import get_async_client from .const import DOMAIN @@ -34,7 +35,9 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN): password = user_input[CONF_PASSWORD] try: - async with AqualinkClient(username, password): + async with AqualinkClient( + username, password, httpx_client=get_async_client(self.hass) + ): pass except AqualinkServiceUnauthorizedException: errors["base"] = "invalid_auth" From 954b6133cbe9fdd618c903cc11b95bbd283446f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Sat, 21 Dec 2024 08:35:47 +0000 Subject: [PATCH 0938/1198] Use common mock fixture in Idasen Desk config flow tests (#133679) --- .../components/idasen_desk/quality_scale.yaml | 5 +- tests/components/idasen_desk/conftest.py | 22 ++++-- .../idasen_desk/test_config_flow.py | 77 ++++++++----------- 3 files changed, 48 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index f91fd16176d..9aca846e32c 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -10,10 +10,7 @@ rules: This integration does not use polling. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - - use mock_desk_api + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index 24ef8311445..20cc95e8ae4 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -19,9 +19,14 @@ def mock_bluetooth(enable_bluetooth: None) -> Generator[None]: @pytest.fixture(autouse=False) def mock_desk_api(): """Set up idasen desk API fixture.""" - with mock.patch( - "homeassistant.components.idasen_desk.coordinator.Desk" - ) as desk_patched: + with ( + mock.patch( + "homeassistant.components.idasen_desk.coordinator.Desk" + ) as desk_patched, + mock.patch( + "homeassistant.components.idasen_desk.config_flow.Desk", new=desk_patched + ), + ): mock_desk = MagicMock() def mock_init( @@ -33,17 +38,20 @@ def mock_desk_api(): desk_patched.side_effect = mock_init - async def mock_connect(ble_device): + async def mock_connect(ble_device, retry: bool = True): mock_desk.is_connected = True - mock_desk.trigger_update_callback(None) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(None) async def mock_disconnect(): mock_desk.is_connected = False - mock_desk.trigger_update_callback(None) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(None) async def mock_move_to(height: float): mock_desk.height_percent = height - mock_desk.trigger_update_callback(height) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(height) async def mock_move_up(): await mock_move_to(100) diff --git a/tests/components/idasen_desk/test_config_flow.py b/tests/components/idasen_desk/test_config_flow.py index be729545b88..baeed6be1ab 100644 --- a/tests/components/idasen_desk/test_config_flow.py +++ b/tests/components/idasen_desk/test_config_flow.py @@ -1,6 +1,6 @@ """Test the IKEA Idasen Desk config flow.""" -from unittest.mock import ANY, patch +from unittest.mock import ANY, MagicMock, patch from bleak.exc import BleakError from idasen_ha.errors import AuthFailedError @@ -17,7 +17,7 @@ from . import IDASEN_DISCOVERY_INFO, NOT_IDASEN_DISCOVERY_INFO from tests.common import MockConfigEntry -async def test_user_step_success(hass: HomeAssistant) -> None: +async def test_user_step_success(hass: HomeAssistant, mock_desk_api: MagicMock) -> None: """Test user step success path.""" with patch( "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", @@ -30,14 +30,9 @@ async def test_user_step_success(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", return_value=True + ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -99,7 +94,10 @@ async def test_user_step_no_new_devices_found(hass: HomeAssistant) -> None: ], ) async def test_user_step_cannot_connect( - hass: HomeAssistant, exception: Exception, expected_error: str + hass: HomeAssistant, + mock_desk_api: MagicMock, + exception: Exception, + expected_error: str, ) -> None: """Test user step with a cannot connect error.""" with patch( @@ -113,33 +111,26 @@ async def test_user_step_cannot_connect( assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=exception, - ), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() + default_connect_side_effect = mock_desk_api.connect.side_effect + mock_desk_api.connect.side_effect = exception + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user" assert result2["errors"] == {"base": expected_error} - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + mock_desk_api.connect.side_effect = default_connect_side_effect + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", + return_value=True, + ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -157,7 +148,9 @@ async def test_user_step_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 -async def test_bluetooth_step_success(hass: HomeAssistant) -> None: +async def test_bluetooth_step_success( + hass: HomeAssistant, mock_desk_api: MagicMock +) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -168,16 +161,10 @@ async def test_bluetooth_step_success(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect" - ) as desk_connect, - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", + return_value=True, + ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -193,4 +180,4 @@ async def test_bluetooth_step_success(hass: HomeAssistant) -> None: } assert result2["result"].unique_id == IDASEN_DISCOVERY_INFO.address assert len(mock_setup_entry.mock_calls) == 1 - desk_connect.assert_called_with(ANY, retry=False) + mock_desk_api.connect.assert_called_with(ANY, retry=False) From 82f54eb9d221ddc46a731b912e6203ca8f6cd173 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 21 Dec 2024 09:38:59 +0100 Subject: [PATCH 0939/1198] Adjust the default backup name (#133668) --- homeassistant/components/backup/manager.py | 5 ++++- tests/components/backup/snapshots/test_websocket.ambr | 6 +++--- tests/components/backup/test_manager.py | 6 +++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index a27c1cc7170..9b20c82d709 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -726,7 +726,10 @@ class BackupManager: "Cannot include all addons and specify specific addons" ) - backup_name = name or f"Core {HAVERSION}" + backup_name = ( + name + or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}" + ) new_backup, self._backup_task = await self._reader_writer.async_create_backup( agent_ids=agent_ids, backup_name=backup_name, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 4de06861b67..16640a95ddb 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -2574,7 +2574,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', @@ -2645,7 +2645,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', @@ -2716,7 +2716,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 1c45c86149b..9b652edb087 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -121,7 +121,7 @@ async def test_async_create_backup( assert create_backup.called assert create_backup.call_args == call( agent_ids=["backup.local"], - backup_name="Core 2025.1.0", + backup_name="Custom 2025.1.0", extra_metadata={ "instance_id": hass.data["core.uuid"], "with_automatic_settings": False, @@ -254,7 +254,7 @@ async def test_async_initiate_backup( ws_client = await hass_ws_client(hass) include_database = params.get("include_database", True) - name = params.get("name", "Core 2025.1.0") + name = params.get("name", "Custom 2025.1.0") password = params.get("password") path_glob.return_value = [] @@ -502,7 +502,7 @@ async def test_async_initiate_backup_with_agent_error( "folders": [], "homeassistant_included": True, "homeassistant_version": "2025.1.0", - "name": "Core 2025.1.0", + "name": "Custom 2025.1.0", "protected": False, "size": 123, "with_automatic_settings": False, From e43f4466e0f7e04018e5a68aa0f23667a7d76f0d Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Sat, 21 Dec 2024 02:40:33 -0600 Subject: [PATCH 0940/1198] Improve HEOS group handling (#132213) * Move register method to GroupManager * Remove GroupManager mapping when entity removed * Add test for when unloaded * Error when group member not found * Use entity registery to remove entity * Update tests per feedback --- homeassistant/components/heos/__init__.py | 43 ++++++++++--------- homeassistant/components/heos/media_player.py | 14 ++++-- tests/components/heos/test_media_player.py | 32 ++++++++++++++ 3 files changed, 66 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index e6a46f5a4ca..b9b9b30a280 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -11,7 +11,7 @@ from pyheos import Heos, HeosError, HeosPlayer, const as heos_const from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import ( @@ -259,21 +259,19 @@ class GroupManager: return group_info_by_entity_id async def async_join_players( - self, leader_entity_id: str, member_entity_ids: list[str] + self, leader_id: int, leader_entity_id: str, member_entity_ids: list[str] ) -> None: """Create a group a group leader and member players.""" + # Resolve HEOS player_id for each member entity_id entity_id_to_player_id_map = self._get_entity_id_to_player_id_map() - leader_id = entity_id_to_player_id_map.get(leader_entity_id) - if not leader_id: - raise HomeAssistantError( - f"The group leader {leader_entity_id} could not be resolved to a HEOS" - " player." - ) - member_ids = [ - entity_id_to_player_id_map[member] - for member in member_entity_ids - if member in entity_id_to_player_id_map - ] + member_ids: list[int] = [] + for member in member_entity_ids: + member_id = entity_id_to_player_id_map.get(member) + if not member_id: + raise HomeAssistantError( + f"The group member {member} could not be resolved to a HEOS player." + ) + member_ids.append(member_id) try: await self.controller.create_group(leader_id, member_ids) @@ -285,14 +283,8 @@ class GroupManager: err, ) - async def async_unjoin_player(self, player_entity_id: str): + async def async_unjoin_player(self, player_id: int, player_entity_id: str): """Remove `player_entity_id` from any group.""" - player_id = self._get_entity_id_to_player_id_map().get(player_entity_id) - if not player_id: - raise HomeAssistantError( - f"The player {player_entity_id} could not be resolved to a HEOS player." - ) - try: await self.controller.create_group(player_id, []) except HeosError as err: @@ -345,6 +337,17 @@ class GroupManager: self._disconnect_player_added() self._disconnect_player_added = None + @callback + def register_media_player(self, player_id: int, entity_id: str) -> CALLBACK_TYPE: + """Register a media player player_id with it's entity_id so it can be resolved later.""" + self.entity_id_map[player_id] = entity_id + return lambda: self.unregister_media_player(player_id) + + @callback + def unregister_media_player(self, player_id) -> None: + """Remove a media player player_id from the entity_id map.""" + self.entity_id_map.pop(player_id, None) + @property def group_membership(self): """Provide access to group members for player entities.""" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 5255d369c2f..be816849e32 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -160,7 +160,11 @@ class HeosMediaPlayer(MediaPlayerEntity): async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated) ) # Register this player's entity_id so it can be resolved by the group manager - self._group_manager.entity_id_map[self._player.player_id] = self.entity_id + self.async_on_remove( + self._group_manager.register_media_player( + self._player.player_id, self.entity_id + ) + ) async_dispatcher_send(self.hass, SIGNAL_HEOS_PLAYER_ADDED) @log_command_error("clear playlist") @@ -171,7 +175,9 @@ class HeosMediaPlayer(MediaPlayerEntity): @log_command_error("join_players") async def async_join_players(self, group_members: list[str]) -> None: """Join `group_members` as a player group with the current player.""" - await self._group_manager.async_join_players(self.entity_id, group_members) + await self._group_manager.async_join_players( + self._player.player_id, self.entity_id, group_members + ) @log_command_error("pause") async def async_media_pause(self) -> None: @@ -294,7 +300,9 @@ class HeosMediaPlayer(MediaPlayerEntity): @log_command_error("unjoin_player") async def async_unjoin_player(self) -> None: """Remove this player from any group.""" - await self._group_manager.async_unjoin_player(self.entity_id) + await self._group_manager.async_unjoin_player( + self._player.player_id, self.entity_id + ) async def async_will_remove_from_hass(self) -> None: """Disconnect the device when removed.""" diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index fa3f01107c1..355cb47a0d9 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -51,6 +51,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.setup import async_setup_component @@ -1051,3 +1052,34 @@ async def test_media_player_unjoin_group( blocking=True, ) assert "Failed to ungroup media_player.test_player" in caplog.text + + +async def test_media_player_group_fails_when_entity_removed( + hass: HomeAssistant, + config_entry, + config, + controller, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test grouping fails when entity removed.""" + await setup_platform(hass, config_entry, config) + + # Remove one of the players + entity_registry.async_remove("media_player.test_player_2") + + # Attempt to group + with pytest.raises( + HomeAssistantError, + match="The group member media_player.test_player_2 could not be resolved to a HEOS player.", + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_GROUP_MEMBERS: ["media_player.test_player_2"], + }, + blocking=True, + ) + controller.create_group.assert_not_called() From 02785a4ded3961e9b9c6a4862f0b1ae0c757a6b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 23:37:16 -1000 Subject: [PATCH 0941/1198] Simplify query to find oldest state (#133700) --- homeassistant/components/recorder/queries.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 8ca7bef2691..34e9ec32f99 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -640,9 +640,9 @@ def find_states_to_purge( def find_oldest_state() -> StatementLambdaElement: """Find the last_updated_ts of the oldest state.""" return lambda_stmt( - lambda: select(States.last_updated_ts).where( - States.state_id.in_(select(func.min(States.state_id))) - ) + lambda: select(States.last_updated_ts) + .order_by(States.last_updated_ts.asc()) + .limit(1) ) From 43fab48d4e92c613570ed56e149236c962c93f8c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 23:53:15 -1000 Subject: [PATCH 0942/1198] Improve purge performance for PostgreSQL with large databases (#133699) --- homeassistant/components/recorder/models/database.py | 4 ++++ homeassistant/components/recorder/purge.py | 4 ++++ homeassistant/components/recorder/queries.py | 4 ++-- homeassistant/components/recorder/util.py | 6 ++++++ tests/components/recorder/test_util.py | 2 +- 5 files changed, 17 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/models/database.py b/homeassistant/components/recorder/models/database.py index 94c5a7cc027..b86fd299793 100644 --- a/homeassistant/components/recorder/models/database.py +++ b/homeassistant/components/recorder/models/database.py @@ -32,4 +32,8 @@ class DatabaseOptimizer: # # https://jira.mariadb.org/browse/MDEV-25020 # + # PostgreSQL does not support a skip/loose index scan so its + # also slow for large distinct queries: + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 slow_range_in_select: bool diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 11f5accc978..881952c390d 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -346,6 +346,10 @@ def _select_unused_attributes_ids( # We now break the query into groups of 100 and use a lambda_stmt to ensure # that the query is only cached once. # + # PostgreSQL also suffers from the same issue as older MariaDB with the distinct query + # when the database gets large because it doesn't support skip/loose index scan. + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 groups = [iter(attributes_ids)] * 100 for attr_ids in zip_longest(*groups, fillvalue=None): seen_ids |= { diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 34e9ec32f99..7ac4c19bc94 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -78,7 +78,7 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen def _state_attrs_exist(attr: int | None) -> Select: """Check if a state attributes id exists in the states table.""" - return select(func.min(States.attributes_id)).where(States.attributes_id == attr) + return select(States.attributes_id).where(States.attributes_id == attr).limit(1) def attributes_ids_exist_in_states_with_fast_in_distinct( @@ -315,7 +315,7 @@ def data_ids_exist_in_events_with_fast_in_distinct( def _event_data_id_exist(data_id: int | None) -> Select: """Check if a event data id exists in the events table.""" - return select(func.min(Events.data_id)).where(Events.data_id == data_id) + return select(Events.data_id).where(Events.data_id == data_id).limit(1) def data_ids_exist_in_events( diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index ba4c5194689..4cf24eb79c5 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -600,6 +600,12 @@ def setup_connection_for_dialect( execute_on_connection(dbapi_connection, "SET time_zone = '+00:00'") elif dialect_name == SupportedDialect.POSTGRESQL: max_bind_vars = DEFAULT_MAX_BIND_VARS + # PostgreSQL does not support a skip/loose index scan so its + # also slow for large distinct queries: + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 + # so we set slow_range_in_select to True + slow_range_in_select = True if first_connection: # server_version_num was added in 2006 result = query_on_connection(dbapi_connection, "SHOW server_version") diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 99bd5083489..aeeeba1865a 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -502,7 +502,7 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non assert "minimum supported version" not in caplog.text assert database_engine is not None - assert database_engine.optimizer.slow_range_in_select is False + assert database_engine.optimizer.slow_range_in_select is True @pytest.mark.parametrize( From 4a063c3f9e8e5187245b1a5d38f9705e06496e21 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 21 Dec 2024 01:54:13 -0800 Subject: [PATCH 0943/1198] Update the Google Tasks quality scale with documentation improvements (#133701) --- .../google_tasks/quality_scale.yaml | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 79d216709e5..dd1cd67d8e2 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -19,7 +19,7 @@ rules: unique-config-entry: done entity-unique-id: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done test-before-setup: done docs-high-level-description: done config-flow-test-coverage: done @@ -33,35 +33,37 @@ rules: config-entry-unloading: done reauthentication-flow: done action-exceptions: done - docs-installation-parameters: todo + docs-installation-parameters: done integration-owner: done parallel-updates: done test-coverage: done - docs-configuration-parameters: todo + docs-configuration-parameters: + status: exempt + comment: The integration does not have any configuration parameters. entity-unavailable: done # Gold - docs-examples: todo + docs-examples: done discovery-update-info: todo entity-device-class: todo entity-translations: todo - docs-data-update: todo + docs-data-update: done entity-disabled-by-default: todo discovery: todo exception-translations: todo devices: todo - docs-supported-devices: todo + docs-supported-devices: done icon-translations: todo - docs-known-limitations: todo + docs-known-limitations: done stale-devices: todo - docs-supported-functions: todo + docs-supported-functions: done repair-issues: todo reconfiguration-flow: todo entity-category: todo dynamic-devices: todo - docs-troubleshooting: todo + docs-troubleshooting: done diagnostics: todo - docs-use-cases: todo + docs-use-cases: done # Platinum async-dependency: todo From 859993e443f4956845dd03846f9e5673887724ee Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 10:55:00 +0100 Subject: [PATCH 0944/1198] Add update platform to Peblar Rocksolid EV Chargers integration (#133570) * Add update platform to Peblar Rocksolid EV Chargers integration * Use device class translations --- homeassistant/components/peblar/__init__.py | 55 ++++++++++- .../components/peblar/coordinator.py | 55 ++++++++++- homeassistant/components/peblar/entity.py | 26 ------ homeassistant/components/peblar/icons.json | 9 ++ homeassistant/components/peblar/sensor.py | 23 +++-- homeassistant/components/peblar/strings.json | 7 ++ homeassistant/components/peblar/update.py | 93 +++++++++++++++++++ 7 files changed, 229 insertions(+), 39 deletions(-) delete mode 100644 homeassistant/components/peblar/entity.py create mode 100644 homeassistant/components/peblar/icons.json create mode 100644 homeassistant/components/peblar/update.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 559b124c772..d1da6ce83b7 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio + from aiohttp import CookieJar from peblar import ( AccessMode, @@ -14,22 +16,34 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarMeterDataUpdateCoordinator, + PeblarRuntimeData, + PeblarVersionDataUpdateCoordinator, +) -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [ + Platform.SENSOR, + Platform.UPDATE, +] async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: """Set up Peblar from a config entry.""" + # Set up connection to the Peblar charger peblar = Peblar( host=entry.data[CONF_HOST], session=async_create_clientsession(hass, cookie_jar=CookieJar(unsafe=True)), ) try: await peblar.login(password=entry.data[CONF_PASSWORD]) + system_information = await peblar.system_information() api = await peblar.rest_api(enable=True, access_mode=AccessMode.READ_WRITE) except PeblarConnectionError as err: raise ConfigEntryNotReady("Could not connect to Peblar charger") from err @@ -40,10 +54,41 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo "Unknown error occurred while connecting to Peblar charger" ) from err - coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) - await coordinator.async_config_entry_first_refresh() + # Setup the data coordinators + meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + version_coordinator = PeblarVersionDataUpdateCoordinator(hass, entry, peblar) + await asyncio.gather( + meter_coordinator.async_config_entry_first_refresh(), + version_coordinator.async_config_entry_first_refresh(), + ) - entry.runtime_data = coordinator + # Store the runtime data + entry.runtime_data = PeblarRuntimeData( + system_information=system_information, + meter_coordinator=meter_coordinator, + version_coordinator=version_coordinator, + ) + + # Peblar is a single device integration. Setting up the device directly + # during setup. This way we only have to reference it in all entities. + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + configuration_url=f"http://{entry.data[CONF_HOST]}", + connections={ + (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), + (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), + }, + identifiers={(DOMAIN, system_information.product_serial_number)}, + manufacturer=system_information.product_vendor_name, + model_id=system_information.product_number, + model=system_information.product_model_name, + name="Peblar EV Charger", + serial_number=system_information.product_serial_number, + sw_version=version_coordinator.data.current.firmware, + ) + + # Forward the setup to the platforms await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 8270905648f..f83ed8f4dda 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -1,16 +1,67 @@ """Data update coordinator for Peblar EV chargers.""" +from __future__ import annotations + +from dataclasses import dataclass from datetime import timedelta -from peblar import PeblarApi, PeblarError, PeblarMeter +from peblar import Peblar, PeblarApi, PeblarError, PeblarMeter, PeblarVersions from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from tests.components.peblar.conftest import PeblarSystemInformation from .const import LOGGER -type PeblarConfigEntry = ConfigEntry[PeblarMeterDataUpdateCoordinator] + +@dataclass(kw_only=True) +class PeblarRuntimeData: + """Class to hold runtime data.""" + + system_information: PeblarSystemInformation + meter_coordinator: PeblarMeterDataUpdateCoordinator + version_coordinator: PeblarVersionDataUpdateCoordinator + + +type PeblarConfigEntry = ConfigEntry[PeblarRuntimeData] + + +@dataclass(kw_only=True, frozen=True) +class PeblarVersionInformation: + """Class to hold version information.""" + + current: PeblarVersions + available: PeblarVersions + + +class PeblarVersionDataUpdateCoordinator( + DataUpdateCoordinator[PeblarVersionInformation] +): + """Class to manage fetching Peblar version information.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, peblar: Peblar + ) -> None: + """Initialize the coordinator.""" + self.peblar = peblar + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} version", + update_interval=timedelta(hours=2), + ) + + async def _async_update_data(self) -> PeblarVersionInformation: + """Fetch data from the Peblar device.""" + try: + return PeblarVersionInformation( + current=await self.peblar.current_versions(), + available=await self.peblar.available_versions(), + ) + except PeblarError as err: + raise UpdateFailed(err) from err class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py deleted file mode 100644 index 6951cf6c21f..00000000000 --- a/homeassistant/components/peblar/entity.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Base entity for the Peblar integration.""" - -from __future__ import annotations - -from homeassistant.const import CONF_HOST -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator - - -class PeblarEntity(CoordinatorEntity[PeblarMeterDataUpdateCoordinator]): - """Defines a Peblar entity.""" - - _attr_has_entity_name = True - - def __init__(self, entry: PeblarConfigEntry) -> None: - """Initialize the Peblar entity.""" - super().__init__(coordinator=entry.runtime_data) - self._attr_device_info = DeviceInfo( - configuration_url=f"http://{entry.data[CONF_HOST]}", - identifiers={(DOMAIN, str(entry.unique_id))}, - manufacturer="Peblar", - name="Peblar EV charger", - ) diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json new file mode 100644 index 00000000000..073cd08a2c7 --- /dev/null +++ b/homeassistant/components/peblar/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "update": { + "customization": { + "default": "mdi:palette" + } + } + } +} diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index eafca23e125..d31d929fcab 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -15,10 +15,12 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfEnergy from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .coordinator import PeblarConfigEntry -from .entity import PeblarEntity +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator @dataclass(frozen=True, kw_only=True) @@ -28,7 +30,7 @@ class PeblarSensorDescription(SensorEntityDescription): value_fn: Callable[[PeblarMeter], int | None] -SENSORS: tuple[PeblarSensorDescription, ...] = ( +DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( key="energy_total", device_class=SensorDeviceClass.ENERGY, @@ -48,24 +50,33 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) for description in SENSORS + PeblarSensorEntity(entry, description) for description in DESCRIPTIONS ) -class PeblarSensorEntity(PeblarEntity, SensorEntity): +class PeblarSensorEntity( + CoordinatorEntity[PeblarMeterDataUpdateCoordinator], SensorEntity +): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription + _attr_has_entity_name = True + def __init__( self, entry: PeblarConfigEntry, description: PeblarSensorDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry) + super().__init__(entry.runtime_data.meter_coordinator) self.entity_description = description self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) @property def native_value(self) -> int | None: diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e5fa1e85a6a..2e23fcfcdcd 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -31,5 +31,12 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_serial_number": "The discovered Peblar device did not provide a serial number." } + }, + "entity": { + "update": { + "customization": { + "name": "Customization" + } + } } } diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py new file mode 100644 index 00000000000..cc0f1ee0c79 --- /dev/null +++ b/homeassistant/components/peblar/update.py @@ -0,0 +1,93 @@ +"""Support for Peblar updates.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarVersionDataUpdateCoordinator, + PeblarVersionInformation, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarUpdateEntityDescription(UpdateEntityDescription): + """Describe an Peblar update entity.""" + + installed_fn: Callable[[PeblarVersionInformation], str | None] + available_fn: Callable[[PeblarVersionInformation], str | None] + + +DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = ( + PeblarUpdateEntityDescription( + key="firmware", + device_class=UpdateDeviceClass.FIRMWARE, + installed_fn=lambda x: x.current.firmware, + available_fn=lambda x: x.available.firmware, + ), + PeblarUpdateEntityDescription( + key="customization", + translation_key="customization", + installed_fn=lambda x: x.current.customization, + available_fn=lambda x: x.available.customization, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar update based on a config entry.""" + async_add_entities( + PeblarUpdateEntity(entry, description) for description in DESCRIPTIONS + ) + + +class PeblarUpdateEntity( + CoordinatorEntity[PeblarVersionDataUpdateCoordinator], UpdateEntity +): + """Defines a Peblar update entity.""" + + entity_description: PeblarUpdateEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarUpdateEntityDescription, + ) -> None: + """Initialize the update entity.""" + super().__init__(entry.runtime_data.version_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def installed_version(self) -> str | None: + """Version currently installed and in use.""" + return self.entity_description.installed_fn(self.coordinator.data) + + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + return self.entity_description.available_fn(self.coordinator.data) From 4ee9f813aac9915f7982132cd7a470c06735f7a2 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:13:44 +0100 Subject: [PATCH 0945/1198] Fix inconsistent use of "pin" vs. "PIN" (#133685) --- homeassistant/components/frontier_silicon/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/frontier_silicon/strings.json b/homeassistant/components/frontier_silicon/strings.json index 03d9f28c016..d3e1cd84e4a 100644 --- a/homeassistant/components/frontier_silicon/strings.json +++ b/homeassistant/components/frontier_silicon/strings.json @@ -12,7 +12,7 @@ }, "device_config": { "title": "Device configuration", - "description": "The pin can be found via 'MENU button > Main Menu > System setting > Network > NetRemote PIN setup'", + "description": "The PIN can be found via 'MENU button > Main Menu > System setting > Network > NetRemote PIN setup'", "data": { "pin": "[%key:common::config_flow::data::pin%]" } From 7998a05742367e1186813d7402263209c2f9feb9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:14:14 +0100 Subject: [PATCH 0946/1198] Replace lowercase "pin" in error message with the correct "PIN" (#133684) --- homeassistant/components/tessie/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 5b677594b42..4ac645a0270 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -521,7 +521,7 @@ "message": "{name} is already inactive." }, "incorrect_pin": { - "message": "Incorrect pin for {name}." + "message": "Incorrect PIN for {name}." }, "no_cable": { "message": "Insert cable to lock" From 4efcf18c70f7cfb43a61c7bb0f0abb6e812886b8 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:14:47 +0100 Subject: [PATCH 0947/1198] Change "pin" to "PIN" for consistency with common string (#133682) --- homeassistant/components/vulcan/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/vulcan/strings.json b/homeassistant/components/vulcan/strings.json index 814621b5403..61b5a954389 100644 --- a/homeassistant/components/vulcan/strings.json +++ b/homeassistant/components/vulcan/strings.json @@ -10,7 +10,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]", "invalid_token": "[%key:common::config_flow::error::invalid_access_token%]", "expired_token": "Expired token - please generate a new token", - "invalid_pin": "Invalid pin", + "invalid_pin": "Invalid PIN", "invalid_symbol": "Invalid symbol", "expired_credentials": "Expired credentials - please create new on Vulcan mobile app registration page", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" From 989a3d1e24c54d0b173cb32a1dccefbecff30b5e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:15:11 +0100 Subject: [PATCH 0948/1198] Change "pin" to correct "PIN" for consistent translations (#133681) --- homeassistant/components/ps4/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ps4/strings.json b/homeassistant/components/ps4/strings.json index 6b1d4cd690b..778fa0215fb 100644 --- a/homeassistant/components/ps4/strings.json +++ b/homeassistant/components/ps4/strings.json @@ -21,7 +21,7 @@ "ip_address": "[%key:common::config_flow::data::ip%]" }, "data_description": { - "code": "On your PlayStation 4 console, go to **Settings**. Then, go to **Mobile App Connection Settings** and select **Add Device** to get the pin." + "code": "On your PlayStation 4 console, go to **Settings**. Then, go to **Mobile App Connection Settings** and select **Add Device** to get the PIN." } } }, From 4e316429d31df34d7e1c7eb69a368a4522fff315 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 00:18:47 -1000 Subject: [PATCH 0949/1198] Handle WebsocketConnectionError during mqtt auto reconnect (#133697) followup to #133610 to handle the exception in the auto reconnect path as well fixes #132985 --- homeassistant/components/mqtt/client.py | 5 ++++- tests/components/mqtt/test_client.py | 10 +++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 73c6b80cb14..6500c9f91c9 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -695,12 +695,15 @@ class MQTT: async def _reconnect_loop(self) -> None: """Reconnect to the MQTT server.""" + # pylint: disable-next=import-outside-toplevel + import paho.mqtt.client as mqtt + while True: if not self.connected: try: async with self._connection_lock, self._async_connect_in_executor(): await self.hass.async_add_executor_job(self._mqttc.reconnect) - except OSError as err: + except (OSError, mqtt.WebsocketConnectionError) as err: _LOGGER.debug( "Error re-connecting to MQTT server due to exception: %s", err ) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 1878045a9b9..1daad0e3914 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1888,10 +1888,18 @@ async def test_mqtt_subscribes_and_unsubscribes_in_chunks( assert len(mqtt_client_mock.unsubscribe.mock_calls[1][1][0]) == 2 +@pytest.mark.parametrize( + "exception", + [ + OSError, + paho_mqtt.WebsocketConnectionError, + ], +) async def test_auto_reconnect( hass: HomeAssistant, setup_with_birth_msg_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, + exception: Exception, ) -> None: """Test reconnection is automatically done.""" mqtt_client_mock = setup_with_birth_msg_client_mock @@ -1902,7 +1910,7 @@ async def test_auto_reconnect( mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() - mqtt_client_mock.reconnect.side_effect = OSError("foo") + mqtt_client_mock.reconnect.side_effect = exception("foo") async_fire_time_changed( hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) ) From 78c9e4742846f7fe3be7d9b94465c8fb8006cbc6 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sat, 21 Dec 2024 11:20:46 +0100 Subject: [PATCH 0950/1198] Improve BMW config flow (#133705) --- .../bmw_connected_drive/config_flow.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 04fb3842dfa..5a067d23474 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -103,9 +103,10 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - data: dict[str, Any] = {} - - _existing_entry_data: Mapping[str, Any] | None = None + def __init__(self) -> None: + """Initialize the config flow.""" + self.data: dict[str, Any] = {} + self._existing_entry_data: dict[str, Any] = {} async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -175,19 +176,15 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Show the change password step.""" - existing_data = ( - dict(self._existing_entry_data) if self._existing_entry_data else {} - ) - if user_input is not None: - return await self.async_step_user(existing_data | user_input) + return await self.async_step_user(self._existing_entry_data | user_input) return self.async_show_form( step_id="change_password", data_schema=RECONFIGURE_SCHEMA, description_placeholders={ - CONF_USERNAME: existing_data[CONF_USERNAME], - CONF_REGION: existing_data[CONF_REGION], + CONF_USERNAME: self._existing_entry_data[CONF_USERNAME], + CONF_REGION: self._existing_entry_data[CONF_REGION], }, ) @@ -195,14 +192,14 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._existing_entry_data = entry_data + self._existing_entry_data = dict(entry_data) return await self.async_step_change_password() async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self._existing_entry_data = self._get_reconfigure_entry().data + self._existing_entry_data = dict(self._get_reconfigure_entry().data) return await self.async_step_change_password() async def async_step_captcha( From 66e863a2e3866a37f5776b1810cc56611194e931 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sat, 21 Dec 2024 11:29:24 +0100 Subject: [PATCH 0951/1198] Allow lamarzocco to reconnect websocket (#133635) --- homeassistant/components/lamarzocco/coordinator.py | 7 +++++-- tests/components/lamarzocco/test_init.py | 5 ++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 0b07409adb5..2385039f53d 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -91,9 +91,11 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): _scale_address: str | None = None - async def _async_setup(self) -> None: + async def _async_connect_websocket(self) -> None: """Set up the coordinator.""" - if self._local_client is not None: + if self._local_client is not None and ( + self._local_client.websocket is None or self._local_client.websocket.closed + ): _LOGGER.debug("Init WebSocket in background task") self.config_entry.async_create_background_task( @@ -123,6 +125,7 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Fetch data from API endpoint.""" await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) + await self._async_connect_websocket() self._async_add_remove_scale() @callback diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 7d90c049a3b..fccfcbeef13 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -199,8 +199,11 @@ async def test_websocket_closed_on_unload( ) as local_client: client = local_client.return_value client.websocket = AsyncMock() - client.websocket.closed = False + await async_init_integration(hass, mock_config_entry) + mock_lamarzocco.websocket_connect.assert_called_once() + + client.websocket.closed = False hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() client.websocket.close.assert_called_once() From 5c2d769b547bba14177156b2baebba137e2908b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 21 Dec 2024 11:30:46 +0100 Subject: [PATCH 0952/1198] Enable AEMET data cache (#131226) --- homeassistant/components/aemet/__init__.py | 14 +++++++++++++- tests/components/aemet/test_init.py | 8 ++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index 9ec52faec00..79dc3cc55ce 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -1,6 +1,7 @@ """The AEMET OpenData component.""" import logging +import shutil from aemet_opendata.exceptions import AemetError, TownNotFound from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature @@ -10,8 +11,9 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CON from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.storage import STORAGE_DIR -from .const import CONF_STATION_UPDATES, PLATFORMS +from .const import CONF_STATION_UPDATES, DOMAIN, PLATFORMS from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -29,6 +31,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo options = ConnectionOptions(api_key, update_features) aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options) + aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}")) + try: await aemet.select_coordinates(latitude, longitude) except TownNotFound as err: @@ -57,3 +61,11 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Remove a config entry.""" + await hass.async_add_executor_job( + shutil.rmtree, + hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"), + ) diff --git a/tests/components/aemet/test_init.py b/tests/components/aemet/test_init.py index cf3204782cd..d6229438582 100644 --- a/tests/components/aemet/test_init.py +++ b/tests/components/aemet/test_init.py @@ -9,6 +9,7 @@ from homeassistant.components.aemet.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .util import mock_api_call @@ -24,6 +25,7 @@ CONFIG = { async def test_unload_entry( hass: HomeAssistant, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, ) -> None: """Test (un)loading the AEMET integration.""" @@ -47,6 +49,12 @@ async def test_unload_entry( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("weather.aemet") is None + assert entity_registry.async_get("weather.aemet") is None + async def test_init_town_not_found( hass: HomeAssistant, From 7326555f03fd4801536d381923d1681962f65218 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 11:38:33 +0100 Subject: [PATCH 0953/1198] Add diagnostic to Peblar Rocksolid EV Chargers integration (#133706) --- .../components/peblar/diagnostics.py | 23 +++++ .../components/peblar/quality_scale.yaml | 2 +- tests/components/peblar/conftest.py | 30 +++++- .../peblar/fixtures/available_versions.json | 4 + .../peblar/fixtures/current_versions.json | 4 + tests/components/peblar/fixtures/meter.json | 14 +++ .../peblar/snapshots/test_diagnostics.ambr | 93 +++++++++++++++++++ tests/components/peblar/test_diagnostics.py | 22 +++++ 8 files changed, 190 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/peblar/diagnostics.py create mode 100644 tests/components/peblar/fixtures/available_versions.json create mode 100644 tests/components/peblar/fixtures/current_versions.json create mode 100644 tests/components/peblar/fixtures/meter.json create mode 100644 tests/components/peblar/snapshots/test_diagnostics.ambr create mode 100644 tests/components/peblar/test_diagnostics.py diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py new file mode 100644 index 00000000000..91cdb5dc811 --- /dev/null +++ b/homeassistant/components/peblar/diagnostics.py @@ -0,0 +1,23 @@ +"""Diagnostics support for Peblar.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import PeblarConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PeblarConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + return { + "system_information": entry.runtime_data.system_information.to_dict(), + "meter": entry.runtime_data.meter_coordinator.data.to_dict(), + "versions": { + "available": entry.runtime_data.version_coordinator.data.available.to_dict(), + "current": entry.runtime_data.version_coordinator.data.current.to_dict(), + }, + } diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 51bd60cc4b4..3dc470ce76b 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -40,7 +40,7 @@ rules: test-coverage: todo # Gold devices: todo - diagnostics: todo + diagnostics: done discovery-update-info: todo discovery: todo docs-data-update: todo diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 583b2cbe7a5..ece9a8d9973 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -5,11 +5,12 @@ from __future__ import annotations from collections.abc import Generator from unittest.mock import MagicMock, patch -from peblar.models import PeblarSystemInformation +from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions import pytest from homeassistant.components.peblar.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -43,7 +44,34 @@ def mock_peblar() -> Generator[MagicMock]: patch("homeassistant.components.peblar.config_flow.Peblar", new=peblar_mock), ): peblar = peblar_mock.return_value + peblar.available_versions.return_value = PeblarVersions.from_json( + load_fixture("available_versions.json", DOMAIN) + ) + peblar.current_versions.return_value = PeblarVersions.from_json( + load_fixture("current_versions.json", DOMAIN) + ) peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) ) + + api = peblar.rest_api.return_value + api.meter.return_value = PeblarMeter.from_json( + load_fixture("meter.json", DOMAIN) + ) + yield peblar + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> MockConfigEntry: + """Set up the Peblar integration for testing.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/peblar/fixtures/available_versions.json b/tests/components/peblar/fixtures/available_versions.json new file mode 100644 index 00000000000..45b3255167c --- /dev/null +++ b/tests/components/peblar/fixtures/available_versions.json @@ -0,0 +1,4 @@ +{ + "Customization": "Peblar-1.9", + "Firmware": "1.6.2+1+WL-1" +} diff --git a/tests/components/peblar/fixtures/current_versions.json b/tests/components/peblar/fixtures/current_versions.json new file mode 100644 index 00000000000..c54fb71c457 --- /dev/null +++ b/tests/components/peblar/fixtures/current_versions.json @@ -0,0 +1,4 @@ +{ + "Customization": "Peblar-1.9", + "Firmware": "1.6.1+1+WL-1" +} diff --git a/tests/components/peblar/fixtures/meter.json b/tests/components/peblar/fixtures/meter.json new file mode 100644 index 00000000000..1f32a3fbebc --- /dev/null +++ b/tests/components/peblar/fixtures/meter.json @@ -0,0 +1,14 @@ +{ + "CurrentPhase1": 0, + "CurrentPhase2": 0, + "CurrentPhase3": 0, + "EnergySession": 0, + "EnergyTotal": 880321, + "PowerPhase1": 0, + "PowerPhase2": 0, + "PowerPhase3": 0, + "PowerTotal": 0, + "VoltagePhase1": 230, + "VoltagePhase2": null, + "VoltagePhase3": null +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..7701c1eb159 --- /dev/null +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'meter': dict({ + 'CurrentPhase1': 0, + 'CurrentPhase2': 0, + 'CurrentPhase3': 0, + 'EnergySession': 0, + 'EnergyTotal': 880321, + 'PowerPhase1': 0, + 'PowerPhase2': 0, + 'PowerPhase3': 0, + 'PowerTotal': 0, + 'VoltagePhase1': 230, + }), + 'system_information': dict({ + 'BopCalIGainA': 264625, + 'BopCalIGainB': 267139, + 'BopCalIGainC': 239155, + 'CanChangeChargingPhases': False, + 'CanChargeSinglePhase': True, + 'CanChargeThreePhases': False, + 'CustomerId': 'PBLR-0000645', + 'CustomerUpdatePackagePubKey': ''' + -----BEGIN PUBLIC KEY----- + lorem ipsum + -----END PUBLIC KEY----- + + ''', + 'EthMacAddr': '00:0F:11:58:86:97', + 'FwIdent': '1.6.1+1+WL-1', + 'Hostname': 'PBLR-0000645', + 'HwFixedCableRating': 20, + 'HwFwCompat': 'wlac-2', + 'HwHas4pRelay': False, + 'HwHasBop': True, + 'HwHasBuzzer': True, + 'HwHasDualSocket': False, + 'HwHasEichrechtLaserMarking': False, + 'HwHasEthernet': True, + 'HwHasLed': True, + 'HwHasLte': False, + 'HwHasMeter': True, + 'HwHasMeterDisplay': True, + 'HwHasPlc': False, + 'HwHasRfid': True, + 'HwHasRs485': True, + 'HwHasShutter': False, + 'HwHasSocket': False, + 'HwHasTpm': False, + 'HwHasWlan': True, + 'HwMaxCurrent': 16, + 'HwOneOrThreePhase': 3, + 'HwUKCompliant': False, + 'MainboardPn': '6004-2300-7600', + 'MainboardSn': '23-38-A4E-2MC', + 'MeterCalIGainA': 267369, + 'MeterCalIGainB': 228286, + 'MeterCalIGainC': 246455, + 'MeterCalIRmsOffsetA': 15573, + 'MeterCalIRmsOffsetB': 268422963, + 'MeterCalIRmsOffsetC': 9082, + 'MeterCalPhaseA': 250, + 'MeterCalPhaseB': 271, + 'MeterCalPhaseC': 271, + 'MeterCalVGainA': 250551, + 'MeterCalVGainB': 246074, + 'MeterCalVGainC': 230191, + 'MeterFwIdent': 'b9cbcd', + 'NorFlash': 'True', + 'ProductModelName': 'WLAC1-H11R0WE0ICR00', + 'ProductPn': '6004-2300-8002', + 'ProductSn': '23-45-A4O-MOF', + 'ProductVendorName': 'Peblar', + 'WlanApMacAddr': '00:0F:11:58:86:98', + 'WlanStaMacAddr': '00:0F:11:58:86:99', + }), + 'versions': dict({ + 'available': dict({ + 'Customization': 'Peblar-1.9', + 'Firmware': '1.6.2+1+WL-1', + 'customization_version': '1.9', + 'firmware_version': '1.6.2', + }), + 'current': dict({ + 'Customization': 'Peblar-1.9', + 'Firmware': '1.6.1+1+WL-1', + 'customization_version': '1.9', + 'firmware_version': '1.6.1', + }), + }), + }) +# --- diff --git a/tests/components/peblar/test_diagnostics.py b/tests/components/peblar/test_diagnostics.py new file mode 100644 index 00000000000..11f9af28b2d --- /dev/null +++ b/tests/components/peblar/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for the diagnostics data provided by the Peblar integration.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) From aad1d6a25d0adb8d2170a225ac7683c8418fd0e9 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 21 Dec 2024 11:19:11 +0000 Subject: [PATCH 0954/1198] Use MAC address in Twinkly `DeviceInfo.connections` (#133708) --- homeassistant/components/twinkly/light.py | 4 +++- tests/components/twinkly/__init__.py | 2 ++ tests/components/twinkly/snapshots/test_diagnostics.ambr | 1 + 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 6f6dffe63d2..771af2282dc 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -28,7 +28,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( @@ -97,6 +97,7 @@ class TwinklyLight(LightEntity): # They are expected to be updated using the device_info. self._name = conf.data[CONF_NAME] or "Twinkly light" self._model = conf.data[CONF_MODEL] + self._mac = device_info["mac"] self._client = client @@ -114,6 +115,7 @@ class TwinklyLight(LightEntity): """Get device specific attributes.""" return DeviceInfo( identifiers={(DOMAIN, self._attr_unique_id)}, + connections={(CONNECTION_NETWORK_MAC, self._mac)}, manufacturer="LEDWORKS", model=self._model, name=self._name, diff --git a/tests/components/twinkly/__init__.py b/tests/components/twinkly/__init__.py index f322004962a..192a5c0e220 100644 --- a/tests/components/twinkly/__init__.py +++ b/tests/components/twinkly/__init__.py @@ -7,6 +7,7 @@ from homeassistant.components.twinkly.const import DEV_NAME TEST_HOST = "test.twinkly.com" TEST_ID = "twinkly_test_device_id" TEST_UID = "4c8fccf5-e08a-4173-92d5-49bf479252a2" +TEST_MAC = "aa:bb:cc:dd:ee:ff" TEST_NAME = "twinkly_test_device_name" TEST_NAME_ORIGINAL = "twinkly_test_original_device_name" # the original (deprecated) name stored in the conf TEST_MODEL = "twinkly_test_device_model" @@ -31,6 +32,7 @@ class ClientMock: self.device_info = { "uuid": self.id, "device_name": TEST_NAME, + "mac": TEST_MAC, "product_code": TEST_MODEL, } diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 28ec98cf572..4d25e222501 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -15,6 +15,7 @@ }), 'device_info': dict({ 'device_name': 'twinkly_test_device_name', + 'mac': '**REDACTED**', 'product_code': 'twinkly_test_device_model', 'uuid': '4c8fccf5-e08a-4173-92d5-49bf479252a2', }), From dbe04f17ad4156162fc5d1ec9716804b220b6484 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 12:20:03 +0100 Subject: [PATCH 0955/1198] Add sensors tests for Peblar Rocksolid EV Chargers (#133710) --- tests/components/peblar/conftest.py | 11 +++- .../peblar/snapshots/test_sensor.ambr | 58 +++++++++++++++++++ tests/components/peblar/test_sensor.py | 35 +++++++++++ 3 files changed, 102 insertions(+), 2 deletions(-) create mode 100644 tests/components/peblar/snapshots/test_sensor.ambr create mode 100644 tests/components/peblar/test_sensor.py diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index ece9a8d9973..2db28d3a7e6 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Generator +from contextlib import nullcontext from unittest.mock import MagicMock, patch from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions @@ -67,11 +68,17 @@ async def init_integration( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_peblar: MagicMock, + request: pytest.FixtureRequest, ) -> MockConfigEntry: """Set up the Peblar integration for testing.""" mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + context = nullcontext() + if platform := getattr(request, "param", None): + context = patch("homeassistant.components.peblar.PLATFORMS", [platform]) + + with context: + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() return mock_config_entry diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..29a5d7f7dd1 --- /dev/null +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_entities[sensor][sensor.peblar_ev_charger_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Peblar EV Charger Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '880.321', + }) +# --- diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py new file mode 100644 index 00000000000..e2a49942cd5 --- /dev/null +++ b/tests/components/peblar/test_sensor.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From b106b88f5c5aeb5444a5ea180e4786ae793bae22 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sat, 21 Dec 2024 12:21:11 +0100 Subject: [PATCH 0956/1198] Adjust freezer tick in settings tests of IronOS integration (#133707) --- tests/components/iron_os/test_init.py | 4 ++-- tests/components/iron_os/test_number.py | 2 +- tests/components/iron_os/test_select.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index 15327c55121..4749e1b6199 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -61,7 +61,7 @@ async def test_setup_config_entry_not_ready( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() @@ -81,7 +81,7 @@ async def test_settings_exception( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index e0617a5012f..088b66feb64 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -50,7 +50,7 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/iron_os/test_select.py b/tests/components/iron_os/test_select.py index 5e981e1618e..cfd4d8ecbb1 100644 --- a/tests/components/iron_os/test_select.py +++ b/tests/components/iron_os/test_select.py @@ -61,7 +61,7 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) From 6314d7a44c3fa37106fbfbabd85b2e3580d6e840 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sat, 21 Dec 2024 12:31:17 +0100 Subject: [PATCH 0957/1198] Fix section translations check (#133683) --- tests/components/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index e95147b8664..534c471bf83 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -684,7 +684,7 @@ async def _check_step_or_section_translations( description_placeholders, data_value.schema, ) - return + continue iqs_config_flow = _get_integration_quality_scale_rule( integration, "config-flow" ) From 5665abf99168f2e4951d8093342b570cc2a4bc8b Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 21 Dec 2024 11:31:40 +0000 Subject: [PATCH 0958/1198] Store Twinkly runtime data in config entry (#133714) --- homeassistant/components/twinkly/__init__.py | 42 +++++++++++-------- homeassistant/components/twinkly/const.py | 3 -- .../components/twinkly/diagnostics.py | 10 ++--- homeassistant/components/twinkly/light.py | 40 ++++++------------ 4 files changed, 41 insertions(+), 54 deletions(-) diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index b09e58ff12f..00e40d604c0 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,29 +1,40 @@ """The twinkly component.""" +from dataclasses import dataclass +from typing import Any + from aiohttp import ClientError from ttls.client import Twinkly from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_SW_VERSION, CONF_HOST, Platform +from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION, DATA_CLIENT, DATA_DEVICE_INFO, DOMAIN +from .const import ATTR_VERSION PLATFORMS = [Platform.LIGHT] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up entries from config flow.""" - hass.data.setdefault(DOMAIN, {}) +@dataclass +class TwinklyData: + """Data for Twinkly integration.""" + client: Twinkly + device_info: dict[str, Any] + sw_version: str | None + + +type TwinklyConfigEntry = ConfigEntry[TwinklyData] + + +async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: + """Set up entries from config flow.""" # We setup the client here so if at some point we add any other entity for this device, # we will be able to properly share the connection. host = entry.data[CONF_HOST] - hass.data[DOMAIN].setdefault(entry.entry_id, {}) - client = Twinkly(host, async_get_clientsession(hass)) try: @@ -32,21 +43,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (TimeoutError, ClientError) as exception: raise ConfigEntryNotReady from exception - hass.data[DOMAIN][entry.entry_id] = { - DATA_CLIENT: client, - DATA_DEVICE_INFO: device_info, - ATTR_SW_VERSION: software_version.get(ATTR_VERSION), - } + entry.runtime_data = TwinklyData( + client, device_info, software_version.get(ATTR_VERSION) + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: """Remove a twinkly entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twinkly/const.py b/homeassistant/components/twinkly/const.py index f33024ed156..488b213b895 100644 --- a/homeassistant/components/twinkly/const.py +++ b/homeassistant/components/twinkly/const.py @@ -15,8 +15,5 @@ DEV_LED_PROFILE = "led_profile" DEV_PROFILE_RGB = "RGB" DEV_PROFILE_RGBW = "RGBW" -DATA_CLIENT = "client" -DATA_DEVICE_INFO = "device_info" - # Minimum version required to support effects MIN_EFFECT_VERSION = "2.7.1" diff --git a/homeassistant/components/twinkly/diagnostics.py b/homeassistant/components/twinkly/diagnostics.py index e188e92ecd5..9ddc65cf255 100644 --- a/homeassistant/components/twinkly/diagnostics.py +++ b/homeassistant/components/twinkly/diagnostics.py @@ -6,18 +6,18 @@ from typing import Any from homeassistant.components.diagnostics import async_redact_data from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_SW_VERSION, CONF_HOST, CONF_IP_ADDRESS, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .const import DATA_DEVICE_INFO, DOMAIN +from . import TwinklyConfigEntry +from .const import DOMAIN TO_REDACT = [CONF_HOST, CONF_IP_ADDRESS, CONF_MAC] async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: TwinklyConfigEntry ) -> dict[str, Any]: """Return diagnostics for a Twinkly config entry.""" attributes = None @@ -34,8 +34,8 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "device_info": hass.data[DOMAIN][entry.entry_id][DATA_DEVICE_INFO], - ATTR_SW_VERSION: hass.data[DOMAIN][entry.entry_id][ATTR_SW_VERSION], + "device_info": entry.runtime_data.device_info, + ATTR_SW_VERSION: entry.runtime_data.sw_version, "attributes": attributes, }, TO_REDACT, diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 771af2282dc..d05da7bab15 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -7,7 +7,6 @@ from typing import Any from aiohttp import ClientError from awesomeversion import AwesomeVersion -from ttls.client import Twinkly from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -18,22 +17,14 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SW_VERSION, - CONF_HOST, - CONF_ID, - CONF_MODEL, - CONF_NAME, -) +from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import TwinklyConfigEntry from .const import ( - DATA_CLIENT, - DATA_DEVICE_INFO, DEV_LED_PROFILE, DEV_MODEL, DEV_NAME, @@ -48,16 +39,11 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: TwinklyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Setups an entity from a config entry (UI config flow).""" - - client = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] - device_info = hass.data[DOMAIN][config_entry.entry_id][DATA_DEVICE_INFO] - software_version = hass.data[DOMAIN][config_entry.entry_id][ATTR_SW_VERSION] - - entity = TwinklyLight(config_entry, client, device_info, software_version) + entity = TwinklyLight(config_entry) async_add_entities([entity], update_before_add=True) @@ -71,14 +57,12 @@ class TwinklyLight(LightEntity): def __init__( self, - conf: ConfigEntry, - client: Twinkly, - device_info, - software_version: str | None = None, + entry: TwinklyConfigEntry, ) -> None: """Initialize a TwinklyLight entity.""" - self._attr_unique_id: str = conf.data[CONF_ID] - self._conf = conf + self._attr_unique_id: str = entry.data[CONF_ID] + device_info = entry.runtime_data.device_info + self._conf = entry if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: self._attr_supported_color_modes = {ColorMode.RGBW} @@ -95,18 +79,18 @@ class TwinklyLight(LightEntity): # Those are saved in the config entry in order to have meaningful values even # if the device is currently offline. # They are expected to be updated using the device_info. - self._name = conf.data[CONF_NAME] or "Twinkly light" - self._model = conf.data[CONF_MODEL] + self._name = entry.data[CONF_NAME] or "Twinkly light" + self._model = entry.data[CONF_MODEL] self._mac = device_info["mac"] - self._client = client + self._client = entry.runtime_data.client # Set default state before any update self._attr_is_on = False self._attr_available = False self._current_movie: dict[Any, Any] = {} self._movies: list[Any] = [] - self._software_version = software_version + self._software_version = entry.runtime_data.sw_version # We guess that most devices are "new" and support effects self._attr_supported_features = LightEntityFeature.EFFECT From 7e24b353ac925be1ff3fd05e4f46ae4f983964ab Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 12:31:58 +0100 Subject: [PATCH 0959/1198] Add updates tests for Peblar Rocksolid EV Chargers (#133712) --- .../peblar/snapshots/test_update.ambr | 118 ++++++++++++++++++ tests/components/peblar/test_update.py | 35 ++++++ 2 files changed, 153 insertions(+) create mode 100644 tests/components/peblar/snapshots/test_update.ambr create mode 100644 tests/components/peblar/test_update.py diff --git a/tests/components/peblar/snapshots/test_update.ambr b/tests/components/peblar/snapshots/test_update.ambr new file mode 100644 index 00000000000..de8bb63150d --- /dev/null +++ b/tests/components/peblar/snapshots/test_update.ambr @@ -0,0 +1,118 @@ +# serializer version: 1 +# name: test_entities[update][update.peblar_ev_charger_customization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.peblar_ev_charger_customization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Customization', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'customization', + 'unique_id': '23-45-A4O-MOF_customization', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_customization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/peblar/icon.png', + 'friendly_name': 'Peblar EV Charger Customization', + 'in_progress': False, + 'installed_version': 'Peblar-1.9', + 'latest_version': 'Peblar-1.9', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.peblar_ev_charger_customization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.peblar_ev_charger_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_firmware', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/peblar/icon.png', + 'friendly_name': 'Peblar EV Charger Firmware', + 'in_progress': False, + 'installed_version': '1.6.1+1+WL-1', + 'latest_version': '1.6.2+1+WL-1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.peblar_ev_charger_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/peblar/test_update.py b/tests/components/peblar/test_update.py new file mode 100644 index 00000000000..7a772fbe96c --- /dev/null +++ b/tests/components/peblar/test_update.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar update platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.UPDATE], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the update entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 6b666b3a0f4bd3e2ac4365f021954ec9b551b956 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 21 Dec 2024 12:44:00 +0100 Subject: [PATCH 0960/1198] Test color_temp updates are processed when an mqtt json light is turned off (#133715) --- tests/components/mqtt/test_light_json.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 988cce85653..c127c86de39 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -727,12 +727,12 @@ async def test_controlling_state_via_topic( async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":155}') light_state = hass.states.get("light.test") - assert light_state.attributes.get("color_temp") == 155 + assert light_state.attributes.get("color_temp_kelvin") == 6451 # 155 mired async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":null}') light_state = hass.states.get("light.test") - assert light_state.attributes.get("color_temp") is None + assert light_state.attributes.get("color_temp_kelvin") is None async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "effect":"colorloop"}' @@ -763,11 +763,26 @@ async def test_controlling_state_via_topic( assert light_state.state == STATE_OFF assert light_state.attributes.get("brightness") is None + # Simulate the lights color temp has been changed + # while it was switched off + async_fire_mqtt_message( + hass, + "test_light_rgb", + '{"state":"OFF","color_temp":201}', + ) + light_state = hass.states.get("light.test") + assert light_state.state == STATE_OFF + # Color temp attribute is not exposed while the lamp is off + assert light_state.attributes.get("color_temp_kelvin") is None + # test previous zero brightness received was ignored and brightness is restored + # see if the latest color_temp value received is restored async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON"}') light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") == 128 + assert light_state.attributes.get("color_temp_kelvin") == 4975 # 201 mired + # A `0` brightness value is ignored when a light is turned on async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON","brightness":0}') light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") == 128 From 11efec49dbdcbd572002dfe9e2c5f37893fca0d4 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 21 Dec 2024 12:45:00 +0100 Subject: [PATCH 0961/1198] Fix test coverage in workday (#133616) --- .../components/workday/binary_sensor.py | 6 ++- .../components/workday/config_flow.py | 8 ++-- tests/components/workday/test_config_flow.py | 45 +++++++++++++++++++ 3 files changed, 55 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index f4a2541a1d7..3684208f102 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -94,7 +94,11 @@ def _get_obj_holidays( language=language, categories=set_categories, ) - if (supported_languages := obj_holidays.supported_languages) and language == "en": + if ( + (supported_languages := obj_holidays.supported_languages) + and language + and language.startswith("en") + ): for lang in supported_languages: if lang.startswith("en"): obj_holidays = country_holidays( diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 2036d685d31..895c7cd50e2 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -136,7 +136,7 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None: year: int = dt_util.now().year if country := user_input.get(CONF_COUNTRY): - language = user_input.get(CONF_LANGUAGE) + language: str | None = user_input.get(CONF_LANGUAGE) province = user_input.get(CONF_PROVINCE) obj_holidays = country_holidays( country=country, @@ -145,8 +145,10 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None: language=language, ) if ( - supported_languages := obj_holidays.supported_languages - ) and language == "en": + (supported_languages := obj_holidays.supported_languages) + and language + and language.startswith("en") + ): for lang in supported_languages: if lang.startswith("en"): obj_holidays = country_holidays( diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index 1bf0f176fe9..51d4b899d25 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -653,3 +653,48 @@ async def test_form_with_categories(hass: HomeAssistant) -> None: "language": "de", "category": ["half_day"], } + + +async def test_options_form_removes_subdiv(hass: HomeAssistant) -> None: + """Test we get the form in options when removing a configured subdivision.""" + + entry = await init_integration( + hass, + { + "name": "Workday Sensor", + "country": "DE", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "province": "BW", + }, + ) + + result = await hass.config_entries.options.async_init(entry.entry_id) + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + "name": "Workday Sensor", + "country": "DE", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + } From a3fab094c3448030514ab3c9a0783ec13b38eede Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 13:03:44 +0100 Subject: [PATCH 0962/1198] Add device test for Peblar Rocksolid EV Chargers (#133713) --- .../peblar/snapshots/test_init.ambr | 41 +++++++++++++++++++ tests/components/peblar/test_init.py | 16 ++++++++ 2 files changed, 57 insertions(+) create mode 100644 tests/components/peblar/snapshots/test_init.ambr diff --git a/tests/components/peblar/snapshots/test_init.ambr b/tests/components/peblar/snapshots/test_init.ambr new file mode 100644 index 00000000000..ba79093b3ec --- /dev/null +++ b/tests/components/peblar/snapshots/test_init.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_peblar_device_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.127', + 'connections': set({ + tuple( + 'mac', + '00:0f:11:58:86:97', + ), + tuple( + 'mac', + '00:0f:11:58:86:99', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'peblar', + '23-45-A4O-MOF', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Peblar', + 'model': 'WLAC1-H11R0WE0ICR00', + 'model_id': '6004-2300-8002', + 'name': 'Peblar EV Charger', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '23-45-A4O-MOF', + 'suggested_area': None, + 'sw_version': '1.6.1+1+WL-1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py index 78508501ba8..ca7b0d88c24 100644 --- a/tests/components/peblar/test_init.py +++ b/tests/components/peblar/test_init.py @@ -4,10 +4,12 @@ from unittest.mock import MagicMock from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from tests.common import MockConfigEntry @@ -67,3 +69,17 @@ async def test_config_entry_authentication_failed( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +@pytest.mark.usefixtures("init_integration") +async def test_peblar_device_entry( + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test authentication error, aborts setup.""" + assert ( + device_entry := device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + ) + assert device_entry == snapshot From a3fad89d0dbb87deef66924c3d4f50775e6e8e36 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 13:19:04 +0100 Subject: [PATCH 0963/1198] Use super constructor self.config_entry in enphase_envoy coordinator (#133718) --- homeassistant/components/enphase_envoy/coordinator.py | 10 +++++----- .../components/enphase_envoy/quality_scale.yaml | 7 +------ 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index 386661402de..67f43ca64a8 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -37,6 +37,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): envoy_serial_number: str envoy_firmware: str + config_entry: EnphaseConfigEntry def __init__( self, hass: HomeAssistant, envoy: Envoy, entry: EnphaseConfigEntry @@ -44,7 +45,6 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Initialize DataUpdateCoordinator for the envoy.""" self.envoy = envoy entry_data = entry.data - self.entry = entry self.username = entry_data[CONF_USERNAME] self.password = entry_data[CONF_PASSWORD] self._setup_complete = False @@ -107,7 +107,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): await envoy.setup() assert envoy.serial_number is not None self.envoy_serial_number = envoy.serial_number - if token := self.entry.data.get(CONF_TOKEN): + if token := self.config_entry.data.get(CONF_TOKEN): with contextlib.suppress(*INVALID_AUTH_ERRORS): # Always set the username and password # so we can refresh the token if needed @@ -136,9 +136,9 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # as long as the token is valid _LOGGER.debug("%s: Updating token in config entry from auth", self.name) self.hass.config_entries.async_update_entry( - self.entry, + self.config_entry, data={ - **self.entry.data, + **self.config_entry.data, CONF_TOKEN: envoy.auth.token, }, ) @@ -189,7 +189,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) # reload the integration to get all established again self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) + self.hass.config_entries.async_reload(self.config_entry.entry_id) ) # remember firmware version for next time self.envoy_firmware = envoy.firmware diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 4b83c2886f7..8e096538f01 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -7,12 +7,7 @@ rules: status: done comment: fixed 1 minute cycle based on Enphase Envoy device characteristics brands: done - common-modules: - status: done - comment: | - In coordinator.py, you set self.entry = entry, while after the super constructor, - you can access the entry via self.config_entry (you would have to overwrite the - type to make sure you don't have to assert not None every time)done + common-modules: done config-flow-test-coverage: status: todo comment: | From dc9133f919dfbe76fe874625cc18dd2078d373b3 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 21 Dec 2024 13:26:09 +0100 Subject: [PATCH 0964/1198] Use mac address in Twinkly for unique id (#133717) --- homeassistant/components/twinkly/__init__.py | 42 +++++++++++++- .../components/twinkly/config_flow.py | 5 +- homeassistant/components/twinkly/light.py | 4 +- .../twinkly/snapshots/test_diagnostics.ambr | 4 +- tests/components/twinkly/test_init.py | 58 +++++++++++++++++-- tests/components/twinkly/test_light.py | 18 +++--- 6 files changed, 110 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index 00e40d604c0..cd76a79e1d7 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,6 +1,7 @@ """The twinkly component.""" from dataclasses import dataclass +import logging from typing import Any from aiohttp import ClientError @@ -10,12 +11,15 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION +from .const import ATTR_VERSION, DOMAIN PLATFORMS = [Platform.LIGHT] +_LOGGER = logging.getLogger(__name__) + @dataclass class TwinklyData: @@ -56,3 +60,39 @@ async def async_unload_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> """Remove a twinkly entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: + """Migrate old entry.""" + if entry.minor_version == 1: + client = Twinkly(entry.data[CONF_HOST], async_get_clientsession(hass)) + try: + device_info = await client.get_details() + except (TimeoutError, ClientError) as exception: + _LOGGER.error("Error while migrating: %s", exception) + return False + identifier = entry.unique_id + assert identifier is not None + entity_registry = er.async_get(hass) + entity_id = entity_registry.async_get_entity_id("light", DOMAIN, identifier) + if entity_id: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + entity_registry.async_update_entity( + entity_entry.entity_id, new_unique_id=device_info["mac"] + ) + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, identifier)} + ) + if device_entry: + device_registry.async_update_device( + device_entry.id, new_identifiers={(DOMAIN, device_info["mac"])} + ) + hass.config_entries.async_update_entry( + entry, + unique_id=device_info["mac"], + minor_version=2, + ) + + return True diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 837bd9ccb6a..4dec8809f07 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -23,6 +23,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): """Handle twinkly config flow.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the config flow.""" @@ -46,7 +47,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "cannot_connect" else: await self.async_set_unique_id( - device_info[DEV_ID], raise_on_progress=False + device_info["mac"], raise_on_progress=False ) self._abort_if_unique_id_configured() @@ -64,7 +65,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): device_info = await Twinkly( discovery_info.ip, async_get_clientsession(self.hass) ).get_details() - await self.async_set_unique_id(device_info[DEV_ID]) + await self.async_set_unique_id(device_info["mac"]) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip}) self._discovered_device = (device_info, discovery_info.ip) diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index d05da7bab15..7de07db3b30 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -60,8 +60,8 @@ class TwinklyLight(LightEntity): entry: TwinklyConfigEntry, ) -> None: """Initialize a TwinklyLight entity.""" - self._attr_unique_id: str = entry.data[CONF_ID] device_info = entry.runtime_data.device_info + self._attr_unique_id: str = device_info["mac"] self._conf = entry if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: @@ -98,7 +98,7 @@ class TwinklyLight(LightEntity): def device_info(self) -> DeviceInfo | None: """Get device specific attributes.""" return DeviceInfo( - identifiers={(DOMAIN, self._attr_unique_id)}, + identifiers={(DOMAIN, self._mac)}, connections={(CONNECTION_NETWORK_MAC, self._mac)}, manufacturer="LEDWORKS", model=self._model, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 4d25e222501..abd923dcb83 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -32,14 +32,14 @@ }), 'domain': 'twinkly', 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', - 'minor_version': 1, + 'minor_version': 2, 'options': dict({ }), 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', 'title': 'Twinkly', - 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, }), 'sw_version': '2.8.10', diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 6642807ac3f..60ebe65b445 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -1,14 +1,16 @@ -"""Tests of the initialization of the twinly integration.""" +"""Tests of the initialization of the twinkly integration.""" from unittest.mock import patch from uuid import uuid4 -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.twinkly.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import TEST_HOST, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock +from . import TEST_HOST, TEST_MAC, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock from tests.common import MockConfigEntry @@ -19,7 +21,7 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: device_id = str(uuid4()) config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, + domain=DOMAIN, data={ CONF_HOST: TEST_HOST, CONF_ID: device_id, @@ -27,6 +29,8 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: CONF_MODEL: TEST_MODEL, }, entry_id=device_id, + unique_id=TEST_MAC, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -47,13 +51,15 @@ async def test_config_entry_not_ready(hass: HomeAssistant) -> None: client.is_offline = True config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, + domain=DOMAIN, data={ CONF_HOST: TEST_HOST, CONF_ID: id, CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, + minor_version=2, + unique_id=TEST_MAC, ) config_entry.add_to_hass(hass) @@ -62,3 +68,45 @@ async def test_config_entry_not_ready(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_mac_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Validate that the unique_id is migrated to the MAC address.""" + client = ClientMock() + + config_entry = MockConfigEntry( + domain=DOMAIN, + minor_version=1, + unique_id="unique_id", + data={ + CONF_HOST: TEST_HOST, + CONF_ID: id, + CONF_NAME: TEST_NAME_ORIGINAL, + CONF_MODEL: TEST_MODEL, + }, + ) + config_entry.add_to_hass(hass) + entity_entry = entity_registry.async_get_or_create( + LIGHT_DOMAIN, + DOMAIN, + config_entry.unique_id, + ) + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, config_entry.unique_id)}, + ) + + with patch("homeassistant.components.twinkly.Twinkly", return_value=client): + await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.state is ConfigEntryState.LOADED + + assert entity_registry.async_get(entity_entry.entity_id).unique_id == TEST_MAC + assert device_registry.async_get_device( + identifiers={(DOMAIN, config_entry.unique_id)} + ).identifiers == {(DOMAIN, TEST_MAC)} + assert config_entry.unique_id == TEST_MAC diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index 7a55dbec14a..26df83aebe0 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -15,7 +15,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.entity_registry import RegistryEntry -from . import TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock +from . import TEST_MAC, TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock from tests.common import MockConfigEntry, async_fire_time_changed @@ -301,7 +301,7 @@ async def test_update_name( async_fire_time_changed(hass) await hass.async_block_till_done() - dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, client.id)}) + dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, TEST_MAC)}) assert dev_entry.name == "new_device_name" assert config_entry.data[CONF_NAME] == "new_device_name" @@ -310,10 +310,9 @@ async def test_update_name( async def test_unload(hass: HomeAssistant) -> None: """Validate that entities can be unloaded from the UI.""" - _, _, client, _ = await _create_entries(hass) - entry_id = client.id + _, _, _, entry = await _create_entries(hass) - assert await hass.config_entries.async_unload(entry_id) + assert await hass.config_entries.async_unload(entry.entry_id) async def _create_entries( @@ -330,18 +329,19 @@ async def _create_entries( CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, - entry_id=client.id, + unique_id=TEST_MAC, + minor_version=2, ) config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(client.id) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() device_registry = dr.async_get(hass) entity_registry = er.async_get(hass) - entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, client.id) + entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, TEST_MAC) entity_entry = entity_registry.async_get(entity_id) - device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, client.id)}) + device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, TEST_MAC)}) assert entity_entry is not None assert device is not None From 5abc03c21ebdeccea1678cd4721ae01fa09e3673 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 13:26:48 +0100 Subject: [PATCH 0965/1198] Fix spelling of "Gateway PIN" and remove two excessive spaces (#133716) --- homeassistant/components/overkiz/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index a756df4d0d6..1595cd52aeb 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -22,7 +22,7 @@ } }, "local": { - "description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network. \n\n After activation, enter your application credentials and change the host to include your gateway-pin or enter the IP address of your gateway.", + "description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\nAfter activation, enter your application credentials and change the host to include your Gateway PIN or enter the IP address of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", From a3febc4449375868c8968350262b8dd84170e76b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 14:23:16 +0100 Subject: [PATCH 0966/1198] Add select platform to Peblar Rocksolid EV Chargers integration (#133720) --- homeassistant/components/peblar/__init__.py | 9 +- .../components/peblar/coordinator.py | 38 +++++++- .../components/peblar/diagnostics.py | 1 + homeassistant/components/peblar/icons.json | 11 +++ homeassistant/components/peblar/select.py | 95 +++++++++++++++++++ homeassistant/components/peblar/strings.json | 12 +++ tests/components/peblar/conftest.py | 10 +- .../peblar/fixtures/user_configuration.json | 59 ++++++++++++ .../peblar/snapshots/test_diagnostics.ambr | 61 ++++++++++++ .../peblar/snapshots/test_select.ambr | 62 ++++++++++++ tests/components/peblar/test_select.py | 35 +++++++ 11 files changed, 389 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/peblar/select.py create mode 100644 tests/components/peblar/fixtures/user_configuration.json create mode 100644 tests/components/peblar/snapshots/test_select.ambr create mode 100644 tests/components/peblar/test_select.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index d1da6ce83b7..79ffd236f32 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -24,10 +24,12 @@ from .coordinator import ( PeblarConfigEntry, PeblarMeterDataUpdateCoordinator, PeblarRuntimeData, + PeblarUserConfigurationDataUpdateCoordinator, PeblarVersionDataUpdateCoordinator, ) PLATFORMS = [ + Platform.SELECT, Platform.SENSOR, Platform.UPDATE, ] @@ -56,16 +58,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo # Setup the data coordinators meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + user_configuration_coordinator = PeblarUserConfigurationDataUpdateCoordinator( + hass, entry, peblar + ) version_coordinator = PeblarVersionDataUpdateCoordinator(hass, entry, peblar) await asyncio.gather( meter_coordinator.async_config_entry_first_refresh(), + user_configuration_coordinator.async_config_entry_first_refresh(), version_coordinator.async_config_entry_first_refresh(), ) # Store the runtime data entry.runtime_data = PeblarRuntimeData( - system_information=system_information, meter_coordinator=meter_coordinator, + system_information=system_information, + user_configuraton_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, ) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index f83ed8f4dda..a01e3d6b41a 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -5,7 +5,14 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from peblar import Peblar, PeblarApi, PeblarError, PeblarMeter, PeblarVersions +from peblar import ( + Peblar, + PeblarApi, + PeblarError, + PeblarMeter, + PeblarUserConfiguration, + PeblarVersions, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -19,8 +26,9 @@ from .const import LOGGER class PeblarRuntimeData: """Class to hold runtime data.""" - system_information: PeblarSystemInformation meter_coordinator: PeblarMeterDataUpdateCoordinator + system_information: PeblarSystemInformation + user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator @@ -86,3 +94,29 @@ class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): return await self.api.meter() except PeblarError as err: raise UpdateFailed(err) from err + + +class PeblarUserConfigurationDataUpdateCoordinator( + DataUpdateCoordinator[PeblarUserConfiguration] +): + """Class to manage fetching Peblar user configuration data.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, peblar: Peblar + ) -> None: + """Initialize the coordinator.""" + self.peblar = peblar + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} user configuration", + update_interval=timedelta(minutes=5), + ) + + async def _async_update_data(self) -> PeblarUserConfiguration: + """Fetch data from the Peblar device.""" + try: + return await self.peblar.user_configuration() + except PeblarError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 91cdb5dc811..6c4531c0e09 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -15,6 +15,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { "system_information": entry.runtime_data.system_information.to_dict(), + "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), "meter": entry.runtime_data.meter_coordinator.data.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 073cd08a2c7..b052eb6de4d 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,16 @@ { "entity": { + "select": { + "smart_charging": { + "default": "mdi:lightning-bolt", + "state": { + "fast_solar": "mdi:solar-power", + "pure_solar": "mdi:solar-power-variant", + "scheduled": "mdi:calendar-clock", + "smart_solar": "mdi:solar-power" + } + } + }, "update": { "customization": { "default": "mdi:palette" diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py new file mode 100644 index 00000000000..95a87248804 --- /dev/null +++ b/homeassistant/components/peblar/select.py @@ -0,0 +1,95 @@ +"""Support for Peblar selects.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import Peblar, PeblarUserConfiguration, SmartChargingMode + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarSelectEntityDescription(SelectEntityDescription): + """Class describing Peblar select entities.""" + + current_fn: Callable[[PeblarUserConfiguration], str | None] + select_fn: Callable[[Peblar, str], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarSelectEntityDescription( + key="smart_charging", + translation_key="smart_charging", + entity_category=EntityCategory.CONFIG, + options=[ + "default", + "fast_solar", + "pure_solar", + "scheduled", + "smart_solar", + ], + current_fn=lambda x: x.smart_charging.value if x.smart_charging else None, + select_fn=lambda x, mode: x.smart_charging(SmartChargingMode(mode)), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar select based on a config entry.""" + async_add_entities( + PeblarSelectEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarSelectEntity( + CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], SelectEntity +): + """Defines a peblar select entity.""" + + entity_description: PeblarSelectEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSelectEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__(entry.runtime_data.user_configuraton_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.entity_description.current_fn(self.coordinator.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.select_fn(self.coordinator.peblar, option) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 2e23fcfcdcd..a36cd14fe48 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,18 @@ } }, "entity": { + "select": { + "smart_charging": { + "name": "Smart charging", + "state": { + "default": "Default", + "fast_solar": "Fast solar", + "pure_solar": "Pure solar", + "scheduled": "Scheduled", + "smart_solar": "Smart solar" + } + } + }, "update": { "customization": { "name": "Customization" diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 2db28d3a7e6..8831697f74e 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -6,7 +6,12 @@ from collections.abc import Generator from contextlib import nullcontext from unittest.mock import MagicMock, patch -from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions +from peblar import ( + PeblarMeter, + PeblarSystemInformation, + PeblarUserConfiguration, + PeblarVersions, +) import pytest from homeassistant.components.peblar.const import DOMAIN @@ -51,6 +56,9 @@ def mock_peblar() -> Generator[MagicMock]: peblar.current_versions.return_value = PeblarVersions.from_json( load_fixture("current_versions.json", DOMAIN) ) + peblar.user_configuration.return_value = PeblarUserConfiguration.from_json( + load_fixture("user_configuration.json", DOMAIN) + ) peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) ) diff --git a/tests/components/peblar/fixtures/user_configuration.json b/tests/components/peblar/fixtures/user_configuration.json new file mode 100644 index 00000000000..b778ad35f18 --- /dev/null +++ b/tests/components/peblar/fixtures/user_configuration.json @@ -0,0 +1,59 @@ +{ + "BopFallbackCurrent": 6000, + "BopHomeWizardAddress": "p1meter-093586", + "BopSource": "homewizard", + "BopSourceParameters": "{}", + "ConnectedPhases": 1, + "CurrentCtrlBopCtType": "CTK05-14", + "CurrentCtrlBopEnable": true, + "CurrentCtrlBopFuseRating": 35, + "CurrentCtrlFixedChargeCurrentLimit": 16, + "GroundMonitoring": true, + "GroupLoadBalancingEnable": false, + "GroupLoadBalancingFallbackCurrent": 6, + "GroupLoadBalancingGroupId": 1, + "GroupLoadBalancingInterface": "RS485", + "GroupLoadBalancingMaxCurrent": 0, + "GroupLoadBalancingRole": "", + "HmiBuzzerVolume": 1, + "HmiLedIntensityManual": 0, + "HmiLedIntensityMax": 100, + "HmiLedIntensityMin": 1, + "HmiLedIntensityMode": "Fixed", + "LocalRestApiAccessMode": "ReadWrite", + "LocalRestApiAllowed": true, + "LocalRestApiEnable": true, + "LocalSmartChargingAllowed": true, + "ModbusServerAccessMode": "ReadOnly", + "ModbusServerAllowed": true, + "ModbusServerEnable": true, + "PhaseRotation": "RST", + "PowerLimitInputDi1Inverse": false, + "PowerLimitInputDi1Limit": 6, + "PowerLimitInputDi2Inverse": false, + "PowerLimitInputDi2Limit": 0, + "PowerLimitInputEnable": false, + "PredefinedCpoName": "", + "ScheduledChargingAllowed": true, + "ScheduledChargingEnable": false, + "SeccOcppActive": false, + "SeccOcppUri": "", + "SessionManagerChargeWithoutAuth": false, + "SolarChargingAllowed": true, + "SolarChargingEnable": true, + "SolarChargingMode": "PureSolar", + "SolarChargingSource": "homewizard", + "SolarChargingSourceParameters": "{\"address\":\"p1meter-093586\"}", + "TimeZone": "Europe/Amsterdam", + "UserDefinedChargeLimitCurrent": 16, + "UserDefinedChargeLimitCurrentAllowed": true, + "UserDefinedHouseholdPowerLimit": 20000, + "UserDefinedHouseholdPowerLimitAllowed": true, + "UserDefinedHouseholdPowerLimitEnable": false, + "UserDefinedHouseholdPowerLimitSource": "homewizard", + "UserDefinedHouseholdPowerLimitSourceParameters": "{\"address\":\"p1meter-093586\"}", + "UserKeepSocketLocked": false, + "VDEPhaseImbalanceEnable": false, + "VDEPhaseImbalanceLimit": 20, + "WebIfUpdateHelper": true +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 7701c1eb159..fa6eb857e09 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -75,6 +75,67 @@ 'WlanApMacAddr': '00:0F:11:58:86:98', 'WlanStaMacAddr': '00:0F:11:58:86:99', }), + 'user_configuration': dict({ + 'BopFallbackCurrent': 6000, + 'BopHomeWizardAddress': 'p1meter-093586', + 'BopSource': 'homewizard', + 'BopSourceParameters': '{}', + 'ConnectedPhases': 1, + 'CurrentCtrlBopCtType': 'CTK05-14', + 'CurrentCtrlBopEnable': True, + 'CurrentCtrlBopFuseRating': 35, + 'CurrentCtrlFixedChargeCurrentLimit': 16, + 'GroundMonitoring': True, + 'GroupLoadBalancingEnable': False, + 'GroupLoadBalancingFallbackCurrent': 6, + 'GroupLoadBalancingGroupId': 1, + 'GroupLoadBalancingInterface': 'RS485', + 'GroupLoadBalancingMaxCurrent': 0, + 'GroupLoadBalancingRole': '', + 'HmiBuzzerVolume': 1, + 'HmiLedIntensityManual': 0, + 'HmiLedIntensityMax': 100, + 'HmiLedIntensityMin': 1, + 'HmiLedIntensityMode': 'Fixed', + 'LocalRestApiAccessMode': 'ReadWrite', + 'LocalRestApiAllowed': True, + 'LocalRestApiEnable': True, + 'LocalSmartChargingAllowed': True, + 'ModbusServerAccessMode': 'ReadOnly', + 'ModbusServerAllowed': True, + 'ModbusServerEnable': True, + 'PhaseRotation': 'RST', + 'PowerLimitInputDi1Inverse': False, + 'PowerLimitInputDi1Limit': 6, + 'PowerLimitInputDi2Inverse': False, + 'PowerLimitInputDi2Limit': 0, + 'PowerLimitInputEnable': False, + 'PredefinedCpoName': '', + 'ScheduledChargingAllowed': True, + 'ScheduledChargingEnable': False, + 'SeccOcppActive': False, + 'SeccOcppUri': '', + 'SessionManagerChargeWithoutAuth': False, + 'SolarChargingAllowed': True, + 'SolarChargingEnable': True, + 'SolarChargingMode': 'PureSolar', + 'SolarChargingSource': 'homewizard', + 'SolarChargingSourceParameters': dict({ + 'address': 'p1meter-093586', + }), + 'TimeZone': 'Europe/Amsterdam', + 'UserDefinedChargeLimitCurrent': 16, + 'UserDefinedChargeLimitCurrentAllowed': True, + 'UserDefinedHouseholdPowerLimit': 20000, + 'UserDefinedHouseholdPowerLimitAllowed': True, + 'UserDefinedHouseholdPowerLimitEnable': False, + 'UserDefinedHouseholdPowerLimitSource': 'homewizard', + 'UserKeepSocketLocked': False, + 'VDEPhaseImbalanceEnable': False, + 'VDEPhaseImbalanceLimit': 20, + 'WebIfUpdateHelper': True, + 'smart_charging': 'pure_solar', + }), 'versions': dict({ 'available': dict({ 'Customization': 'Peblar-1.9', diff --git a/tests/components/peblar/snapshots/test_select.ambr b/tests/components/peblar/snapshots/test_select.ambr new file mode 100644 index 00000000000..9f0852d7cf4 --- /dev/null +++ b/tests/components/peblar/snapshots/test_select.ambr @@ -0,0 +1,62 @@ +# serializer version: 1 +# name: test_entities[select][select.peblar_ev_charger_smart_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'default', + 'fast_solar', + 'pure_solar', + 'scheduled', + 'smart_solar', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.peblar_ev_charger_smart_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart charging', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_charging', + 'unique_id': '23-45-A4O-MOF-smart_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[select][select.peblar_ev_charger_smart_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Peblar EV Charger Smart charging', + 'options': list([ + 'default', + 'fast_solar', + 'pure_solar', + 'scheduled', + 'smart_solar', + ]), + }), + 'context': , + 'entity_id': 'select.peblar_ev_charger_smart_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pure_solar', + }) +# --- diff --git a/tests/components/peblar/test_select.py b/tests/components/peblar/test_select.py new file mode 100644 index 00000000000..e20d84da755 --- /dev/null +++ b/tests/components/peblar/test_select.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar select platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the select entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From b5a7a41ebe4af6545aa34ac34b33a3026b727cdc Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 21 Dec 2024 15:10:14 +0100 Subject: [PATCH 0967/1198] KNX: Option to select specific tunnel endpoint on TCP connections (#131996) --- homeassistant/components/knx/__init__.py | 3 + homeassistant/components/knx/config_flow.py | 143 ++++++++++++++-- homeassistant/components/knx/strings.json | 18 +- tests/components/knx/test_config_flow.py | 180 +++++++++++++++++++- 4 files changed, 320 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index ea654c358e7..edb9cc62008 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -401,6 +401,9 @@ class KNXModule: ) return ConnectionConfig( auto_reconnect=True, + individual_address=self.entry.data.get( + CONF_KNX_TUNNEL_ENDPOINT_IA, # may be configured at knxkey upload + ), secure_config=SecureConfig( knxkeys_password=self.entry.data.get(CONF_KNX_KNXKEY_PASSWORD), knxkeys_file_path=_knxkeys_file, diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index feeb7626577..eda160cd1a6 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import AsyncGenerator -from typing import Any, Final +from typing import Any, Final, Literal import voluptuous as vol from xknx import XKNX @@ -121,6 +121,15 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): self._gatewayscanner: GatewayScanner | None = None self._async_scan_gen: AsyncGenerator[GatewayDescriptor] | None = None + @property + def _xknx(self) -> XKNX: + """Return XKNX instance.""" + if isinstance(self, OptionsFlow) and ( + knx_module := self.hass.data.get(KNX_MODULE_KEY) + ): + return knx_module.xknx + return XKNX() + @abstractmethod def finish_flow(self) -> ConfigFlowResult: """Finish the flow.""" @@ -183,14 +192,8 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_ROUTING: CONF_KNX_ROUTING.capitalize(), } - if isinstance(self, OptionsFlow) and ( - knx_module := self.hass.data.get(KNX_MODULE_KEY) - ): - xknx = knx_module.xknx - else: - xknx = XKNX() self._gatewayscanner = GatewayScanner( - xknx, stop_on_found=0, timeout_in_seconds=2 + self._xknx, stop_on_found=0, timeout_in_seconds=2 ) # keep a reference to the generator to scan in background until user selects a connection type self._async_scan_gen = self._gatewayscanner.async_scan() @@ -204,8 +207,25 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_AUTOMATIC: CONF_KNX_AUTOMATIC.capitalize() } | supported_connection_types + default_connection_type: Literal["automatic", "tunneling", "routing"] + _current_conn = self.initial_data.get(CONF_KNX_CONNECTION_TYPE) + if _current_conn in ( + CONF_KNX_TUNNELING, + CONF_KNX_TUNNELING_TCP, + CONF_KNX_TUNNELING_TCP_SECURE, + ): + default_connection_type = CONF_KNX_TUNNELING + elif _current_conn in (CONF_KNX_ROUTING, CONF_KNX_ROUTING_SECURE): + default_connection_type = CONF_KNX_ROUTING + elif CONF_KNX_AUTOMATIC in supported_connection_types: + default_connection_type = CONF_KNX_AUTOMATIC + else: + default_connection_type = CONF_KNX_TUNNELING + fields = { - vol.Required(CONF_KNX_CONNECTION_TYPE): vol.In(supported_connection_types) + vol.Required( + CONF_KNX_CONNECTION_TYPE, default=default_connection_type + ): vol.In(supported_connection_types) } return self.async_show_form( step_id="connection_type", data_schema=vol.Schema(fields) @@ -216,8 +236,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): ) -> ConfigFlowResult: """Select a tunnel from a list. - Will be skipped if the gateway scan was unsuccessful - or if only one gateway was found. + Will be skipped if the gateway scan was unsuccessful. """ if user_input is not None: if user_input[CONF_KNX_GATEWAY] == OPTION_MANUAL_TUNNEL: @@ -247,6 +266,8 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): user_password=None, tunnel_endpoint_ia=None, ) + if connection_type == CONF_KNX_TUNNELING_TCP: + return await self.async_step_tcp_tunnel_endpoint() if connection_type == CONF_KNX_TUNNELING_TCP_SECURE: return await self.async_step_secure_key_source_menu_tunnel() self.new_title = f"Tunneling @ {self._selected_tunnel}" @@ -255,16 +276,99 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): if not self._found_tunnels: return await self.async_step_manual_tunnel() - errors: dict = {} - tunnel_options = { - str(tunnel): f"{tunnel}{' 🔐' if tunnel.tunnelling_requires_secure else ''}" + tunnel_options = [ + selector.SelectOptionDict( + value=str(tunnel), + label=( + f"{tunnel}" + f"{' TCP' if tunnel.supports_tunnelling_tcp else ' UDP'}" + f"{' 🔐 Secure tunneling' if tunnel.tunnelling_requires_secure else ''}" + ), + ) for tunnel in self._found_tunnels + ] + tunnel_options.append( + selector.SelectOptionDict( + value=OPTION_MANUAL_TUNNEL, label=OPTION_MANUAL_TUNNEL + ) + ) + default_tunnel = next( + ( + str(tunnel) + for tunnel in self._found_tunnels + if tunnel.ip_addr == self.initial_data.get(CONF_HOST) + ), + vol.UNDEFINED, + ) + fields = { + vol.Required( + CONF_KNX_GATEWAY, default=default_tunnel + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=tunnel_options, + mode=selector.SelectSelectorMode.LIST, + ) + ) } - tunnel_options |= {OPTION_MANUAL_TUNNEL: OPTION_MANUAL_TUNNEL} - fields = {vol.Required(CONF_KNX_GATEWAY): vol.In(tunnel_options)} + return self.async_show_form(step_id="tunnel", data_schema=vol.Schema(fields)) + + async def async_step_tcp_tunnel_endpoint( + self, user_input: dict | None = None + ) -> ConfigFlowResult: + """Select specific tunnel endpoint for plain TCP connection.""" + if user_input is not None: + selected_tunnel_ia: str | None = ( + None + if user_input[CONF_KNX_TUNNEL_ENDPOINT_IA] == CONF_KNX_AUTOMATIC + else user_input[CONF_KNX_TUNNEL_ENDPOINT_IA] + ) + self.new_entry_data |= KNXConfigEntryData( + tunnel_endpoint_ia=selected_tunnel_ia, + ) + self.new_title = ( + f"{selected_tunnel_ia or 'Tunneling'} @ {self._selected_tunnel}" + ) + return self.finish_flow() + + # this step is only called from async_step_tunnel so self._selected_tunnel is always set + assert self._selected_tunnel + # skip if only one tunnel endpoint or no tunnelling slot infos + if len(self._selected_tunnel.tunnelling_slots) <= 1: + return self.finish_flow() + + tunnel_endpoint_options = [ + selector.SelectOptionDict( + value=CONF_KNX_AUTOMATIC, label=CONF_KNX_AUTOMATIC.capitalize() + ) + ] + _current_ia = self._xknx.current_address + tunnel_endpoint_options.extend( + selector.SelectOptionDict( + value=str(slot), + label=( + f"{slot} - {'current connection' if slot == _current_ia else 'occupied' if not slot_status.free else 'free'}" + ), + ) + for slot, slot_status in self._selected_tunnel.tunnelling_slots.items() + ) + default_endpoint = ( + self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA) or CONF_KNX_AUTOMATIC + ) return self.async_show_form( - step_id="tunnel", data_schema=vol.Schema(fields), errors=errors + step_id="tcp_tunnel_endpoint", + data_schema=vol.Schema( + { + vol.Required( + CONF_KNX_TUNNEL_ENDPOINT_IA, default=default_endpoint + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=tunnel_endpoint_options, + mode=selector.SelectSelectorMode.LIST, + ) + ), + } + ), ) async def async_step_manual_tunnel( @@ -612,12 +716,15 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): ) for endpoint in self._tunnel_endpoints ) + default_endpoint = ( + self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA) or CONF_KNX_AUTOMATIC + ) return self.async_show_form( step_id="knxkeys_tunnel_select", data_schema=vol.Schema( { vol.Required( - CONF_KNX_TUNNEL_ENDPOINT_IA, default=CONF_KNX_AUTOMATIC + CONF_KNX_TUNNEL_ENDPOINT_IA, default=default_endpoint ): selector.SelectSelector( selector.SelectSelectorConfig( options=tunnel_endpoint_options, diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index d697fa79e78..cde697007aa 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -15,6 +15,13 @@ "gateway": "KNX Tunnel Connection" } }, + "tcp_tunnel_endpoint": { + "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", + "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "data": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + } + }, "manual_tunnel": { "title": "Tunnel settings", "description": "Please enter the connection information of your tunneling device.", @@ -61,9 +68,9 @@ }, "knxkeys_tunnel_select": { "title": "Tunnel endpoint", - "description": "Select the tunnel used for connection.", + "description": "Select the tunnel endpoint used for the connection.", "data": { - "user_id": "`Automatic` will use the first free tunnel endpoint." + "user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option." } }, "secure_tunnel_manual": { @@ -159,6 +166,13 @@ "gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]" } }, + "tcp_tunnel_endpoint": { + "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", + "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "data": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + } + }, "manual_tunnel": { "title": "[%key:component::knx::config::step::manual_tunnel::title%]", "description": "[%key:component::knx::config::step::manual_tunnel::description%]", diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 2187721a518..8ed79f837bb 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration from xknx.io import DEFAULT_MCAST_GRP, DEFAULT_MCAST_PORT from xknx.io.gateway_scanner import GatewayDescriptor +from xknx.knxip.dib import TunnelingSlotStatus from xknx.secure.keyring import sync_load_keyring from xknx.telegram import IndividualAddress @@ -105,6 +106,7 @@ def _gateway_descriptor( port: int, supports_tunnelling_tcp: bool = False, requires_secure: bool = False, + slots: bool = True, ) -> GatewayDescriptor: """Get mock gw descriptor.""" descriptor = GatewayDescriptor( @@ -120,6 +122,12 @@ def _gateway_descriptor( ) descriptor.tunnelling_requires_secure = requires_secure descriptor.routing_requires_secure = requires_secure + if supports_tunnelling_tcp and slots: + descriptor.tunnelling_slots = { + IndividualAddress("1.0.240"): TunnelingSlotStatus(True, True, True), + IndividualAddress("1.0.241"): TunnelingSlotStatus(True, True, False), + IndividualAddress("1.0.242"): TunnelingSlotStatus(True, True, True), + } return descriptor @@ -791,12 +799,14 @@ async def test_tunneling_setup_for_multiple_found_gateways( hass: HomeAssistant, knx_setup ) -> None: """Test tunneling if multiple gateways are found.""" - gateway = _gateway_descriptor("192.168.0.1", 3675) - gateway2 = _gateway_descriptor("192.168.1.100", 3675) + gateway_udp = _gateway_descriptor("192.168.0.1", 3675) + gateway_tcp = _gateway_descriptor("192.168.1.100", 3675, True) with patch( "homeassistant.components.knx.config_flow.GatewayScanner" ) as gateway_scanner_mock: - gateway_scanner_mock.return_value = GatewayScannerMock([gateway, gateway2]) + gateway_scanner_mock.return_value = GatewayScannerMock( + [gateway_udp, gateway_tcp] + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -815,7 +825,7 @@ async def test_tunneling_setup_for_multiple_found_gateways( result = await hass.config_entries.flow.async_configure( tunnel_flow["flow_id"], - {CONF_KNX_GATEWAY: str(gateway)}, + {CONF_KNX_GATEWAY: str(gateway_udp)}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { @@ -833,6 +843,110 @@ async def test_tunneling_setup_for_multiple_found_gateways( knx_setup.assert_called_once() +async def test_tunneling_setup_tcp_endpoint_select_skip( + hass: HomeAssistant, knx_setup +) -> None: + """Test tunneling TCP endpoint selection skipped if no slot info found.""" + gateway_udp = _gateway_descriptor("192.168.0.1", 3675) + gateway_tcp_no_slots = _gateway_descriptor("192.168.1.100", 3675, True, slots=False) + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock( + [gateway_udp, gateway_tcp_no_slots] + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + tunnel_flow = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING, + }, + ) + assert tunnel_flow["type"] is FlowResultType.FORM + assert tunnel_flow["step_id"] == "tunnel" + assert not tunnel_flow["errors"] + + result = await hass.config_entries.flow.async_configure( + tunnel_flow["flow_id"], + {CONF_KNX_GATEWAY: str(gateway_tcp_no_slots)}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING_TCP, + CONF_HOST: "192.168.1.100", + CONF_PORT: 3675, + CONF_KNX_INDIVIDUAL_ADDRESS: "0.0.240", + CONF_KNX_ROUTE_BACK: False, + CONF_KNX_TUNNEL_ENDPOINT_IA: None, + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + } + knx_setup.assert_called_once() + + +async def test_tunneling_setup_tcp_endpoint_select( + hass: HomeAssistant, knx_setup +) -> None: + """Test tunneling TCP endpoint selection.""" + gateway_tcp = _gateway_descriptor("192.168.1.100", 3675, True) + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock([gateway_tcp]) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + tunnel_flow = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING, + }, + ) + assert tunnel_flow["type"] is FlowResultType.FORM + assert tunnel_flow["step_id"] == "tunnel" + assert not tunnel_flow["errors"] + + endpoint_flow = await hass.config_entries.flow.async_configure( + tunnel_flow["flow_id"], + {CONF_KNX_GATEWAY: str(gateway_tcp)}, + ) + + assert endpoint_flow["type"] is FlowResultType.FORM + assert endpoint_flow["step_id"] == "tcp_tunnel_endpoint" + assert not endpoint_flow["errors"] + + result = await hass.config_entries.flow.async_configure( + endpoint_flow["flow_id"], + {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.242"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.0.242 @ 1.0.0 - Test @ 192.168.1.100:3675" + assert result["data"] == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING_TCP, + CONF_HOST: "192.168.1.100", + CONF_PORT: 3675, + CONF_KNX_INDIVIDUAL_ADDRESS: "0.0.240", + CONF_KNX_ROUTE_BACK: False, + CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.242", + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + } + knx_setup.assert_called_once() + + @pytest.mark.parametrize( "gateway", [ @@ -1319,6 +1433,64 @@ async def test_options_flow_secure_manual_to_keyfile( knx_setup.assert_called_once() +async def test_options_flow_routing(hass: HomeAssistant, knx_setup) -> None: + """Test options flow changing routing settings.""" + mock_config_entry = MockConfigEntry( + title="KNX", + domain="knx", + data={ + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + }, + ) + gateway = _gateway_descriptor("192.168.0.1", 3676) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + menu_step = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock([gateway]) + result = await hass.config_entries.options.async_configure( + menu_step["flow_id"], + {"next_step_id": "connection_type"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "connection_type" + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "routing" + assert result2["errors"] == {} + + result3 = await hass.config_entries.options.async_configure( + result2["flow_id"], + { + CONF_KNX_INDIVIDUAL_ADDRESS: "2.0.4", + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.data == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + CONF_KNX_MCAST_GRP: DEFAULT_MCAST_GRP, + CONF_KNX_MCAST_PORT: DEFAULT_MCAST_PORT, + CONF_KNX_LOCAL_IP: None, + CONF_KNX_INDIVIDUAL_ADDRESS: "2.0.4", + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + CONF_KNX_TUNNEL_ENDPOINT_IA: None, + } + knx_setup.assert_called_once() + + async def test_options_communication_settings( hass: HomeAssistant, knx_setup, mock_config_entry: MockConfigEntry ) -> None: From 7e2d382ff4a1290b77b0b705e1ec764589bfd29e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 15:10:35 +0100 Subject: [PATCH 0968/1198] Update aiohasupervisor to 0.2.2b5 (#133722) --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 7276b76afc0..c9ecf6657e8 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b4"], + "requirements": ["aiohasupervisor==0.2.2b5"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9473871efdd..bfa479b9c13 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 71eae73a859..369f6f40921 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b4", + "aiohasupervisor==0.2.2b5", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 78aa370c4ec..82405dc44ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 4a05da9d61a..2a353d7f9c3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 032165b6182..9503ac7d79a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 # homeassistant.components.homekit_controller aiohomekit==3.2.7 From 9e6c1d5b62929a2d502975d0ff2a1201a96d4a9d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 15:18:08 +0100 Subject: [PATCH 0969/1198] Add power and energy related sensors to Peblar Rocksolid EV Chargers (#133729) --- homeassistant/components/peblar/sensor.py | 163 ++++- homeassistant/components/peblar/strings.json | 35 + tests/components/peblar/fixtures/meter.json | 12 +- .../peblar/fixtures/user_configuration.json | 2 +- .../peblar/snapshots/test_diagnostics.ambr | 14 +- .../peblar/snapshots/test_sensor.ambr | 603 +++++++++++++++++- tests/components/peblar/test_sensor.py | 2 +- 7 files changed, 803 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index d31d929fcab..bb9fe9d4937 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from peblar import PeblarMeter +from peblar import PeblarMeter, PeblarUserConfiguration from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +13,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy +from homeassistant.const import ( + EntityCategory, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -27,19 +33,166 @@ from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator class PeblarSensorDescription(SensorEntityDescription): """Describe an Peblar sensor.""" + has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True value_fn: Callable[[PeblarMeter], int | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( - key="energy_total", + key="current", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 1, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_1, + ), + PeblarSensorDescription( + key="current_phase_1", + translation_key="current_phase_1", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_1, + ), + PeblarSensorDescription( + key="current_phase_2", + translation_key="current_phase_2", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_2, + ), + PeblarSensorDescription( + key="current_phase_3", + translation_key="current_phase_3", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_3, + ), + PeblarSensorDescription( + key="energy_session", + translation_key="energy_session", device_class=SensorDeviceClass.ENERGY, native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda x: x.energy_session, + ), + PeblarSensorDescription( + key="energy_total", + translation_key="energy_total", + device_class=SensorDeviceClass.ENERGY, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=2, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda x: x.energy_total, ), + PeblarSensorDescription( + key="power_total", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_total, + ), + PeblarSensorDescription( + key="power_phase_1", + translation_key="power_phase_1", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_1, + ), + PeblarSensorDescription( + key="power_phase_2", + translation_key="power_phase_2", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_2, + ), + PeblarSensorDescription( + key="power_phase_3", + translation_key="power_phase_3", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_3, + ), + PeblarSensorDescription( + key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 1, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_1, + ), + PeblarSensorDescription( + key="voltage_phase_1", + translation_key="voltage_phase_1", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_1, + ), + PeblarSensorDescription( + key="voltage_phase_2", + translation_key="voltage_phase_2", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_2, + ), + PeblarSensorDescription( + key="voltage_phase_3", + translation_key="voltage_phase_3", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_3, + ), ) @@ -50,7 +203,9 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) for description in DESCRIPTIONS + PeblarSensorEntity(entry, description) + for description in DESCRIPTIONS + if description.has_fn(entry.runtime_data.user_configuraton_coordinator.data) ) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index a36cd14fe48..02aee0eacc9 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -45,6 +45,41 @@ } } }, + "sensor": { + "current_phase_1": { + "name": "Current phase 1" + }, + "current_phase_2": { + "name": "Current phase 2" + }, + "current_phase_3": { + "name": "Current phase 3" + }, + "energy_session": { + "name": "Session energy" + }, + "energy_total": { + "name": "Lifetime energy" + }, + "power_phase_1": { + "name": "Power phase 1" + }, + "power_phase_2": { + "name": "Power phase 2" + }, + "power_phase_3": { + "name": "Power phase 3" + }, + "voltage_phase_1": { + "name": "Voltage phase 1" + }, + "voltage_phase_2": { + "name": "Voltage phase 2" + }, + "voltage_phase_3": { + "name": "Voltage phase 3" + } + }, "update": { "customization": { "name": "Customization" diff --git a/tests/components/peblar/fixtures/meter.json b/tests/components/peblar/fixtures/meter.json index 1f32a3fbebc..f426adf9b8a 100644 --- a/tests/components/peblar/fixtures/meter.json +++ b/tests/components/peblar/fixtures/meter.json @@ -1,14 +1,14 @@ { - "CurrentPhase1": 0, + "CurrentPhase1": 14242, "CurrentPhase2": 0, "CurrentPhase3": 0, - "EnergySession": 0, - "EnergyTotal": 880321, - "PowerPhase1": 0, + "EnergySession": 381, + "EnergyTotal": 880703, + "PowerPhase1": 3185, "PowerPhase2": 0, "PowerPhase3": 0, - "PowerTotal": 0, - "VoltagePhase1": 230, + "PowerTotal": 3185, + "VoltagePhase1": 223, "VoltagePhase2": null, "VoltagePhase3": null } diff --git a/tests/components/peblar/fixtures/user_configuration.json b/tests/components/peblar/fixtures/user_configuration.json index b778ad35f18..b41aecd00ef 100644 --- a/tests/components/peblar/fixtures/user_configuration.json +++ b/tests/components/peblar/fixtures/user_configuration.json @@ -3,7 +3,7 @@ "BopHomeWizardAddress": "p1meter-093586", "BopSource": "homewizard", "BopSourceParameters": "{}", - "ConnectedPhases": 1, + "ConnectedPhases": 3, "CurrentCtrlBopCtType": "CTK05-14", "CurrentCtrlBopEnable": true, "CurrentCtrlBopFuseRating": 35, diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index fa6eb857e09..08d4d3ac6c6 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -2,16 +2,16 @@ # name: test_diagnostics dict({ 'meter': dict({ - 'CurrentPhase1': 0, + 'CurrentPhase1': 14242, 'CurrentPhase2': 0, 'CurrentPhase3': 0, - 'EnergySession': 0, - 'EnergyTotal': 880321, - 'PowerPhase1': 0, + 'EnergySession': 381, + 'EnergyTotal': 880703, + 'PowerPhase1': 3185, 'PowerPhase2': 0, 'PowerPhase3': 0, - 'PowerTotal': 0, - 'VoltagePhase1': 230, + 'PowerTotal': 3185, + 'VoltagePhase1': 223, }), 'system_information': dict({ 'BopCalIGainA': 264625, @@ -80,7 +80,7 @@ 'BopHomeWizardAddress': 'p1meter-093586', 'BopSource': 'homewizard', 'BopSourceParameters': '{}', - 'ConnectedPhases': 1, + 'ConnectedPhases': 3, 'CurrentCtrlBopCtType': 'CTK05-14', 'CurrentCtrlBopEnable': True, 'CurrentCtrlBopFuseRating': 35, diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr index 29a5d7f7dd1..c3020b60078 100644 --- a/tests/components/peblar/snapshots/test_sensor.ambr +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -1,5 +1,176 @@ # serializer version: 1 -# name: test_entities[sensor][sensor.peblar_ev_charger_energy-entry] +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_1', + 'unique_id': '23-45-A4O-MOF_current_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.242', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_2', + 'unique_id': '23-45-A4O-MOF_current_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_3', + 'unique_id': '23-45-A4O-MOF_current_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_lifetime_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -12,8 +183,8 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.peblar_ev_charger_energy', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_lifetime_energy', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -31,28 +202,442 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy', + 'original_name': 'Lifetime energy', 'platform': 'peblar', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'energy_total', 'unique_id': '23-45-A4O-MOF_energy_total', 'unit_of_measurement': , }) # --- -# name: test_entities[sensor][sensor.peblar_ev_charger_energy-state] +# name: test_entities[sensor][sensor.peblar_ev_charger_lifetime_energy-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'Peblar EV Charger Energy', + 'friendly_name': 'Peblar EV Charger Lifetime energy', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.peblar_ev_charger_energy', + 'entity_id': 'sensor.peblar_ev_charger_lifetime_energy', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '880.321', + 'state': '880.703', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_power_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3185', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_1', + 'unique_id': '23-45-A4O-MOF_power_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3185', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_2', + 'unique_id': '23-45-A4O-MOF_power_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_3', + 'unique_id': '23-45-A4O-MOF_power_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_session_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_session_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Session energy', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_session', + 'unique_id': '23-45-A4O-MOF_energy_session', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_session_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Peblar EV Charger Session energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_session_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.381', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_1', + 'unique_id': '23-45-A4O-MOF_voltage_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_2', + 'unique_id': '23-45-A4O-MOF_voltage_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_3', + 'unique_id': '23-45-A4O-MOF_voltage_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', }) # --- diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py index e2a49942cd5..97402206d33 100644 --- a/tests/components/peblar/test_sensor.py +++ b/tests/components/peblar/test_sensor.py @@ -12,7 +12,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) -@pytest.mark.usefixtures("init_integration") +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, From 9292bfc6eda19ceff288d7381334d75a6f22d680 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 21 Dec 2024 15:19:55 +0100 Subject: [PATCH 0970/1198] Update AEMET-OpenData to v0.6.4 (#133723) --- homeassistant/components/aemet/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aemet/manifest.json b/homeassistant/components/aemet/manifest.json index 5c9d1ff7e5a..24ca0099091 100644 --- a/homeassistant/components/aemet/manifest.json +++ b/homeassistant/components/aemet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aemet", "iot_class": "cloud_polling", "loggers": ["aemet_opendata"], - "requirements": ["AEMET-OpenData==0.6.3"] + "requirements": ["AEMET-OpenData==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2a353d7f9c3..e9376072875 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.6.3 +AEMET-OpenData==0.6.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.28 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9503ac7d79a..c0761fafc8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.6.3 +AEMET-OpenData==0.6.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.28 From ef31413a5986de598b1e29d73e157ea928d84e53 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 15:20:10 +0100 Subject: [PATCH 0971/1198] Add missing asserts to enphase_envoy config flow test (#133730) --- homeassistant/components/enphase_envoy/quality_scale.yaml | 3 --- tests/components/enphase_envoy/test_config_flow.py | 8 ++++++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 8e096538f01..d64a62d4a48 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -11,13 +11,10 @@ rules: config-flow-test-coverage: status: todo comment: | - - test_form is missing an assertion for the unique id of the resulting entry - - Let's also have test_user_no_serial_number assert the unique_id (as in, it can't be set to the serial_number since we dont have one, so let's assert what it will result in) - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? - test_reauth -> Let's also assert result before we start with the async_configure part config-flow: status: todo comment: | diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 44e2e680d5f..b0b139053ce 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -49,6 +49,7 @@ async def test_form( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Envoy 1234" + assert result["result"].unique_id == "1234" assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_NAME: "Envoy 1234", @@ -80,6 +81,7 @@ async def test_user_no_serial_number( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Envoy" + assert result["result"].unique_id is None assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_NAME: "Envoy", @@ -100,6 +102,8 @@ async def test_form_invalid_auth( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -131,6 +135,8 @@ async def test_form_cannot_connect( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -634,6 +640,8 @@ async def test_reauth( """Test we reauth auth.""" await setup_integration(hass, config_entry) result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { From cc134c820bcfd28ba23fcb2a1bdf2bd5e5b4d50a Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 15:49:24 +0100 Subject: [PATCH 0972/1198] Reuse title of deleted enphase_envoy config entry if present (#133611) --- .../components/enphase_envoy/config_flow.py | 8 +- .../enphase_envoy/quality_scale.yaml | 2 - .../enphase_envoy/test_config_flow.py | 82 +++++++++++++++++++ 3 files changed, 88 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 70ba3570e91..1a2186d305e 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -141,9 +141,13 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): and entry.data[CONF_HOST] == self.ip_address ): _LOGGER.debug( - "Zeroconf update envoy with this ip and blank serial in unique_id", + "Zeroconf update envoy with this ip and blank unique_id", ) - title = f"{ENVOY} {serial}" if entry.title == ENVOY else ENVOY + # Found an entry with blank unique_id (prior deleted) with same ip + # If the title is still default shorthand 'Envoy' then append serial + # to differentiate multiple Envoy. Don't change the title if any other + # title is still present in the old entry. + title = f"{ENVOY} {serial}" if entry.title == ENVOY else entry.title return self.async_update_reload_and_abort( entry, title=title, unique_id=serial, reason="already_configured" ) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index d64a62d4a48..2b9350ed944 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -18,8 +18,6 @@ rules: config-flow: status: todo comment: | - - async_step_zeroconf -> a config entry title is considered userland, - so if someone renamed their entry, it will be reverted back with the code at L146. - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow - The config flow is missing data descriptions for the other fields dependency-transparency: done diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index b0b139053ce..c20e73d774b 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -631,6 +631,88 @@ async def test_zero_conf_old_blank_entry( assert entry.title == "Envoy 1234" +async def test_zero_conf_old_blank_entry_standard_title( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test re-using old blank entry was Envoy as title.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + # test if shorthand title Envoy gets serial appended + hass.config_entries.async_update_entry(entry, title="Envoy") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data[CONF_HOST] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy 1234" + + +async def test_zero_conf_old_blank_entry_user_title( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test re-using old blank entry with user title.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + # set user title on entry + hass.config_entries.async_update_entry(entry, title="Envoy Backyard") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data[CONF_HOST] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy Backyard" + + async def test_reauth( hass: HomeAssistant, config_entry: MockConfigEntry, From 3dad5f68961012ee8ee1bc69c2697e615d9e2a1b Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 15:54:02 +0100 Subject: [PATCH 0973/1198] Replace two outdated occurrences of "service" with "action" (#133728) --- homeassistant/components/knx/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index cde697007aa..6c717c932b8 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -385,7 +385,7 @@ }, "event_register": { "name": "Register knx_event", - "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", + "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this action can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -403,7 +403,7 @@ }, "exposure_register": { "name": "Expose to KNX bus", - "description": "Adds or removes exposures to KNX bus. Only exposures added with this service can be removed.", + "description": "Adds or removes exposures to KNX bus. Only exposures added with this action can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", From ac2090d2f5e753bba5849c9037ab54e7442b9983 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 16:16:12 +0100 Subject: [PATCH 0974/1198] Replace "service" with "action" in Z-Wave action descriptions (#133727) Replace all occurrence of "service" with "action" Clean up the remaining occurrences of "service" with "action" to be consistent with the new terminology in Home Assistant. --- homeassistant/components/zwave_js/strings.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 28789bbf9f4..0c3ca6313d4 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -306,7 +306,7 @@ "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -314,7 +314,7 @@ "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, "device_id": { - "description": "The device(s) to target for this service.", + "description": "The device(s) to target for this action.", "name": "Device ID(s)" }, "endpoint": { @@ -322,7 +322,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, "entity_id": { - "description": "The entity ID(s) to target for this service.", + "description": "The entity ID(s) to target for this action.", "name": "Entity ID(s)" }, "method_name": { @@ -556,7 +556,7 @@ "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -564,7 +564,7 @@ "name": "Command class" }, "device_id": { - "description": "The device(s) to target for this service.", + "description": "The device(s) to target for this action.", "name": "Device ID(s)" }, "endpoint": { @@ -572,7 +572,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, "entity_id": { - "description": "The entity ID(s) to target for this service.", + "description": "The entity ID(s) to target for this action.", "name": "Entity ID(s)" }, "options": { From 4b6febc7579e113f8c22dbf64ff09193fde8445e Mon Sep 17 00:00:00 2001 From: Tom Date: Sat, 21 Dec 2024 16:44:14 +0100 Subject: [PATCH 0975/1198] Add reconfiguration flow to Plugwise (#132878) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Abílio Costa Co-authored-by: Joost Lekkerkerker --- .../components/plugwise/config_flow.py | 102 ++++++++++++++---- .../components/plugwise/quality_scale.yaml | 4 +- .../components/plugwise/strings.json | 17 ++- tests/components/plugwise/conftest.py | 12 ++- tests/components/plugwise/test_config_flow.py | 101 +++++++++++++++-- 5 files changed, 202 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/plugwise/config_flow.py b/homeassistant/components/plugwise/config_flow.py index 57abb1ccb86..6114dd39a6d 100644 --- a/homeassistant/components/plugwise/config_flow.py +++ b/homeassistant/components/plugwise/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any, Self from plugwise import Smile @@ -41,8 +42,16 @@ from .const import ( ZEROCONF_MAP, ) +_LOGGER = logging.getLogger(__name__) -def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: +SMILE_RECONF_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +def smile_user_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: """Generate base schema for gateways.""" schema = vol.Schema({vol.Required(CONF_PASSWORD): str}) @@ -50,6 +59,7 @@ def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: schema = schema.extend( { vol.Required(CONF_HOST): str, + # Port under investigation for removal (hence not added in #132878) vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Required(CONF_USERNAME, default=SMILE): vol.In( {SMILE: FLOW_SMILE, STRETCH: FLOW_STRETCH} @@ -63,7 +73,7 @@ def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: """Validate whether the user input allows us to connect to the gateway. - Data has the keys from base_schema() with values provided by the user. + Data has the keys from the schema with values provided by the user. """ websession = async_get_clientsession(hass, verify_ssl=False) api = Smile( @@ -77,6 +87,32 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: return api +async def verify_connection( + hass: HomeAssistant, user_input: dict[str, Any] +) -> tuple[Smile | None, dict[str, str]]: + """Verify and return the gateway connection or an error.""" + errors: dict[str, str] = {} + + try: + return (await validate_input(hass, user_input), errors) + except ConnectionFailedError: + errors[CONF_BASE] = "cannot_connect" + except InvalidAuthentication: + errors[CONF_BASE] = "invalid_auth" + except InvalidSetupError: + errors[CONF_BASE] = "invalid_setup" + except (InvalidXMLError, ResponseError): + errors[CONF_BASE] = "response_error" + except UnsupportedDeviceError: + errors[CONF_BASE] = "unsupported" + except Exception: # noqa: BLE001 + _LOGGER.exception( + "Unknown exception while verifying connection with your Plugwise Smile" + ) + errors[CONF_BASE] = "unknown" + return (None, errors) + + class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Plugwise Smile.""" @@ -166,30 +202,56 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_PORT] = self.discovery_info.port user_input[CONF_USERNAME] = self._username - try: - api = await validate_input(self.hass, user_input) - except ConnectionFailedError: - errors[CONF_BASE] = "cannot_connect" - except InvalidAuthentication: - errors[CONF_BASE] = "invalid_auth" - except InvalidSetupError: - errors[CONF_BASE] = "invalid_setup" - except (InvalidXMLError, ResponseError): - errors[CONF_BASE] = "response_error" - except UnsupportedDeviceError: - errors[CONF_BASE] = "unsupported" - except Exception: # noqa: BLE001 - errors[CONF_BASE] = "unknown" - else: + api, errors = await verify_connection(self.hass, user_input) + if api: await self.async_set_unique_id( - api.smile_hostname or api.gateway_id, raise_on_progress=False + api.smile_hostname or api.gateway_id, + raise_on_progress=False, ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=api.smile_name, data=user_input) return self.async_show_form( step_id=SOURCE_USER, - data_schema=base_schema(self.discovery_info), + data_schema=smile_user_schema(self.discovery_info), + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + + reconfigure_entry = self._get_reconfigure_entry() + + if user_input: + # Keep current username and password + full_input = { + CONF_HOST: user_input.get(CONF_HOST), + CONF_PORT: reconfigure_entry.data.get(CONF_PORT), + CONF_USERNAME: reconfigure_entry.data.get(CONF_USERNAME), + CONF_PASSWORD: reconfigure_entry.data.get(CONF_PASSWORD), + } + + api, errors = await verify_connection(self.hass, full_input) + if api: + await self.async_set_unique_id( + api.smile_hostname or api.gateway_id, + raise_on_progress=False, + ) + self._abort_if_unique_id_mismatch(reason="not_the_same_smile") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=full_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + data_schema=SMILE_RECONF_SCHEMA, + suggested_values=reconfigure_entry.data, + ), + description_placeholders={"title": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index ce0788c44f7..a7b955b4713 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -52,9 +52,7 @@ rules: diagnostics: done exception-translations: done icon-translations: done - reconfiguration-flow: - status: todo - comment: This integration does not have any reconfiguration steps (yet) investigate how/why + reconfiguration-flow: done dynamic-devices: done discovery-update-info: done repair-issues: diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index 87a8e120591..d16b38df992 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -1,12 +1,23 @@ { "config": { "step": { + "reconfigure": { + "description": "Update configuration for {title}.", + "data": { + "host": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "[%key:component::plugwise::config::step::user::data_description::host%]", + "port": "[%key:component::plugwise::config::step::user::data_description::port%]" + } + }, "user": { "title": "Connect to the Smile", "description": "Please enter", "data": { - "password": "Smile ID", "host": "[%key:common::config_flow::data::ip%]", + "password": "Smile ID", "port": "[%key:common::config_flow::data::port%]", "username": "Smile Username" }, @@ -28,7 +39,9 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "anna_with_adam": "Both Anna and Adam detected. Add your Adam instead of your Anna" + "anna_with_adam": "Both Anna and Adam detected. Add your Adam instead of your Anna", + "not_the_same_smile": "The configured Smile ID does not match the Smile ID on the requested IP address.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index dead58e0581..e0ada8ea849 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -77,9 +77,15 @@ def mock_smile_adam() -> Generator[MagicMock]: """Create a Mock Adam environment for testing exceptions.""" chosen_env = "m_adam_multiple_devices_per_zone" - with patch( - "homeassistant.components.plugwise.coordinator.Smile", autospec=True - ) as smile_mock: + with ( + patch( + "homeassistant.components.plugwise.coordinator.Smile", autospec=True + ) as smile_mock, + patch( + "homeassistant.components.plugwise.config_flow.Smile", + new=smile_mock, + ), + ): smile = smile_mock.return_value smile.gateway_id = "fe799307f1624099878210aa0b9f1475" diff --git a/tests/components/plugwise/test_config_flow.py b/tests/components/plugwise/test_config_flow.py index 9e1e29f4a48..1f30fc972bb 100644 --- a/tests/components/plugwise/test_config_flow.py +++ b/tests/components/plugwise/test_config_flow.py @@ -14,7 +14,7 @@ import pytest from homeassistant.components.plugwise.const import DEFAULT_PORT, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -35,7 +35,7 @@ TEST_PASSWORD = "test_password" TEST_PORT = 81 TEST_USERNAME = "smile" TEST_USERNAME2 = "stretch" -MOCK_SMILE_ID = "smile12345" +TEST_SMILE_HOST = "smile12345" TEST_DISCOVERY = ZeroconfServiceInfo( ip_address=ip_address(TEST_HOST), @@ -129,7 +129,7 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 - assert result2["result"].unique_id == MOCK_SMILE_ID + assert result2["result"].unique_id == TEST_SMILE_HOST @pytest.mark.parametrize( @@ -175,7 +175,7 @@ async def test_zeroconf_flow( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 - assert result2["result"].unique_id == MOCK_SMILE_ID + assert result2["result"].unique_id == TEST_SMILE_HOST async def test_zeroconf_flow_stretch( @@ -274,7 +274,7 @@ async def test_flow_errors( side_effect: Exception, reason: str, ) -> None: - """Test we handle invalid auth.""" + """Test we handle each exception error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -285,6 +285,7 @@ async def test_flow_errors( assert "flow_id" in result mock_smile_config_flow.connect.side_effect = side_effect + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: TEST_HOST, CONF_PASSWORD: TEST_PASSWORD}, @@ -330,7 +331,7 @@ async def test_user_abort_existing_anna( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, }, - unique_id=MOCK_SMILE_ID, + unique_id=TEST_SMILE_HOST, ) entry.add_to_hass(hass) @@ -435,3 +436,91 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] assert len(flows_in_progress) == 1 assert list(flows_in_progress)[0].product == "smile_open_therm" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + host_ip: str, +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], {CONF_HOST: host_ip} + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + assert mock_config_entry.data.get(CONF_HOST) == TEST_HOST + + +async def test_reconfigure_flow_smile_mismatch( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow aborts on other Smile ID.""" + mock_smile_adam.smile_hostname = TEST_SMILE_HOST + + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_the_same_smile" + + +@pytest.mark.parametrize( + ("side_effect", "reason"), + [ + (ConnectionFailedError, "cannot_connect"), + (InvalidAuthentication, "invalid_auth"), + (InvalidSetupError, "invalid_setup"), + (InvalidXMLError, "response_error"), + (RuntimeError, "unknown"), + (UnsupportedDeviceError, "unsupported"), + ], +) +async def test_reconfigure_flow_connect_errors( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + reason: str, +) -> None: + """Test we handle each reconfigure exception error and recover.""" + + mock_smile_adam.connect.side_effect = side_effect + + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": reason} + assert result.get("step_id") == "reconfigure" + + mock_smile_adam.connect.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST} + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + + assert mock_config_entry.data.get(CONF_HOST) == TEST_HOST From 944ad9022d349f4e6a1909ad98299246f6fb8bb1 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sat, 21 Dec 2024 18:04:09 +0000 Subject: [PATCH 0976/1198] Bump tplink python-kasa dependency to 0.9.0 (#133735) Release notes: https://github.com/python-kasa/python-kasa/releases/tag/0.9.0 --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 6ce46c0d488..65061882027 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,5 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "requirements": ["python-kasa[speedups]==0.8.1"] + "requirements": ["python-kasa[speedups]==0.9.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e9376072875..b1aa085ee52 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2375,7 +2375,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.1 +python-kasa[speedups]==0.9.0 # homeassistant.components.linkplay python-linkplay==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c0761fafc8b..3fdd84009fc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1911,7 +1911,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.1 +python-kasa[speedups]==0.9.0 # homeassistant.components.linkplay python-linkplay==0.1.1 From 0037799bfe50e812cb141b4a21b0b8e308609941 Mon Sep 17 00:00:00 2001 From: "Glenn Vandeuren (aka Iondependent)" Date: Sat, 21 Dec 2024 19:28:11 +0100 Subject: [PATCH 0977/1198] Change niko_home_control library to nhc to get push updates (#132750) Co-authored-by: Joost Lekkerkerker Co-authored-by: VandeurenGlenn <8685280+VandeurenGlenn@users.noreply.github.com> Co-authored-by: Joostlek --- .../components/niko_home_control/__init__.py | 87 +++++++++---------- .../niko_home_control/config_flow.py | 14 +-- .../components/niko_home_control/const.py | 3 + .../components/niko_home_control/light.py | 57 ++++++------ .../niko_home_control/manifest.json | 4 +- homeassistant/generated/integrations.json | 2 +- requirements_all.txt | 6 +- requirements_test_all.txt | 6 +- .../components/niko_home_control/conftest.py | 2 +- .../niko_home_control/test_config_flow.py | 6 +- .../components/niko_home_control/test_init.py | 36 ++++++++ 11 files changed, 131 insertions(+), 92 deletions(-) create mode 100644 tests/components/niko_home_control/test_init.py diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py index bdbb8d6b85f..0bc1b117a70 100644 --- a/homeassistant/components/niko_home_control/__init__.py +++ b/homeassistant/components/niko_home_control/__init__.py @@ -2,35 +2,29 @@ from __future__ import annotations -from datetime import timedelta -import logging - from nclib.errors import NetcatError -from nikohomecontrol import NikoHomeControl +from nhc.controller import NHCController from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.util import Throttle +from homeassistant.helpers import entity_registry as er + +from .const import _LOGGER PLATFORMS: list[Platform] = [Platform.LIGHT] -type NikoHomeControlConfigEntry = ConfigEntry[NikoHomeControlData] - - -_LOGGER = logging.getLogger(__name__) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) +type NikoHomeControlConfigEntry = ConfigEntry[NHCController] async def async_setup_entry( hass: HomeAssistant, entry: NikoHomeControlConfigEntry ) -> bool: """Set Niko Home Control from a config entry.""" + controller = NHCController(entry.data[CONF_HOST]) try: - controller = NikoHomeControl({"ip": entry.data[CONF_HOST], "port": 8000}) - niko_data = NikoHomeControlData(hass, controller) - await niko_data.async_update() + await controller.connect() except NetcatError as err: raise ConfigEntryNotReady("cannot connect to controller.") from err except OSError as err: @@ -38,46 +32,45 @@ async def async_setup_entry( "unknown error while connecting to controller." ) from err - entry.runtime_data = niko_data + entry.runtime_data = controller await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def async_migrate_entry( + hass: HomeAssistant, config_entry: NikoHomeControlConfigEntry +) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.minor_version < 2: + registry = er.async_get(hass) + entries = er.async_entries_for_config_entry(registry, config_entry.entry_id) + + for entry in entries: + if entry.unique_id.startswith("light-"): + action_id = entry.unique_id.split("-")[-1] + new_unique_id = f"{config_entry.entry_id}-{action_id}" + registry.async_update_entity( + entry.entity_id, new_unique_id=new_unique_id + ) + + hass.config_entries.async_update_entry(config_entry, minor_version=2) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True + + async def async_unload_entry( hass: HomeAssistant, entry: NikoHomeControlConfigEntry ) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class NikoHomeControlData: - """The class for handling data retrieval.""" - - def __init__(self, hass, nhc): - """Set up Niko Home Control Data object.""" - self.nhc = nhc - self.hass = hass - self.available = True - self.data = {} - self._system_info = None - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): - """Get the latest data from the NikoHomeControl API.""" - _LOGGER.debug("Fetching async state in bulk") - try: - self.data = await self.hass.async_add_executor_job( - self.nhc.list_actions_raw - ) - self.available = True - except OSError as ex: - _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) - self.available = False - - def get_state(self, aid): - """Find and filter state based on action id.""" - for state in self.data: - if state["id"] == aid: - return state["value1"] - _LOGGER.error("Failed to retrieve state off unknown light") - return None diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py index 9174a932534..f37e5e9248a 100644 --- a/homeassistant/components/niko_home_control/config_flow.py +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from nikohomecontrol import NikoHomeControlConnection +from nhc.controller import NHCController import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -19,10 +19,12 @@ DATA_SCHEMA = vol.Schema( ) -def test_connection(host: str) -> str | None: +async def test_connection(host: str) -> str | None: """Test if we can connect to the Niko Home Control controller.""" + + controller = NHCController(host, 8000) try: - NikoHomeControlConnection(host, 8000) + await controller.connect() except Exception: # noqa: BLE001 return "cannot_connect" return None @@ -31,7 +33,7 @@ def test_connection(host: str) -> str | None: class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Niko Home Control.""" - VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -41,7 +43,7 @@ class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - error = test_connection(user_input[CONF_HOST]) + error = await test_connection(user_input[CONF_HOST]) if not error: return self.async_create_entry( title="Niko Home Control", @@ -56,7 +58,7 @@ class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) - error = test_connection(import_info[CONF_HOST]) + error = await test_connection(import_info[CONF_HOST]) if not error: return self.async_create_entry( diff --git a/homeassistant/components/niko_home_control/const.py b/homeassistant/components/niko_home_control/const.py index 202b031b9a2..82b7ce7ed38 100644 --- a/homeassistant/components/niko_home_control/const.py +++ b/homeassistant/components/niko_home_control/const.py @@ -1,3 +1,6 @@ """Constants for niko_home_control integration.""" +import logging + DOMAIN = "niko_home_control" +_LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index f2bf302eab7..29b952fcb77 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -2,10 +2,9 @@ from __future__ import annotations -from datetime import timedelta -import logging from typing import Any +from nhc.light import NHCLight import voluptuous as vol from homeassistant.components.light import ( @@ -24,12 +23,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import NikoHomeControlConfigEntry +from . import NHCController, NikoHomeControlConfigEntry from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(seconds=30) - # delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -87,43 +83,52 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Niko Home Control light entry.""" - niko_data = entry.runtime_data + controller = entry.runtime_data async_add_entities( - NikoHomeControlLight(light, niko_data) for light in niko_data.nhc.list_actions() + NikoHomeControlLight(light, controller, entry.entry_id) + for light in controller.lights ) class NikoHomeControlLight(LightEntity): - """Representation of an Niko Light.""" + """Representation of a Niko Light.""" - def __init__(self, light, data): + def __init__( + self, action: NHCLight, controller: NHCController, unique_id: str + ) -> None: """Set up the Niko Home Control light platform.""" - self._data = data - self._light = light - self._attr_unique_id = f"light-{light.id}" - self._attr_name = light.name - self._attr_is_on = light.is_on + self._controller = controller + self._action = action + self._attr_unique_id = f"{unique_id}-{action.id}" + self._attr_name = action.name + self._attr_is_on = action.is_on self._attr_color_mode = ColorMode.ONOFF self._attr_supported_color_modes = {ColorMode.ONOFF} - if light._state["type"] == 2: # noqa: SLF001 + self._attr_should_poll = False + if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._controller.register_callback( + self._action.id, self.async_update_callback + ) + ) + def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - _LOGGER.debug("Turn on: %s", self.name) - self._light.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) + self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) def turn_off(self, **kwargs: Any) -> None: """Instruct the light to turn off.""" - _LOGGER.debug("Turn off: %s", self.name) - self._light.turn_off() + self._action.turn_off() - async def async_update(self) -> None: - """Get the latest data from NikoHomeControl API.""" - await self._data.async_update() - state = self._data.get_state(self._light.id) - self._attr_is_on = state != 0 + async def async_update_callback(self, state: int) -> None: + """Handle updates from the controller.""" + self._attr_is_on = state > 0 if brightness_supported(self.supported_color_modes): - self._attr_brightness = state * 2.55 + self._attr_brightness = round(state * 2.55) + self.async_write_ha_state() diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index 194596d534f..d252a11b38e 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -4,7 +4,7 @@ "codeowners": ["@VandeurenGlenn"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/niko_home_control", - "iot_class": "local_polling", + "iot_class": "local_push", "loggers": ["nikohomecontrol"], - "requirements": ["niko-home-control==0.2.1"] + "requirements": ["nhc==0.3.2"] } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f037b8d7ce6..ad4af2f024c 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4160,7 +4160,7 @@ "name": "Niko Home Control", "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_push" }, "nilu": { "name": "Norwegian Institute for Air Research (NILU)", diff --git a/requirements_all.txt b/requirements_all.txt index b1aa085ee52..4cf22eaf153 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1463,15 +1463,15 @@ nextcord==2.6.0 # homeassistant.components.nextdns nextdns==4.0.0 +# homeassistant.components.niko_home_control +nhc==0.3.2 + # homeassistant.components.nibe_heatpump nibe==2.14.0 # homeassistant.components.nice_go nice-go==1.0.0 -# homeassistant.components.niko_home_control -niko-home-control==0.2.1 - # homeassistant.components.nilu niluclient==0.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3fdd84009fc..747594117e6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1226,15 +1226,15 @@ nextcord==2.6.0 # homeassistant.components.nextdns nextdns==4.0.0 +# homeassistant.components.niko_home_control +nhc==0.3.2 + # homeassistant.components.nibe_heatpump nibe==2.14.0 # homeassistant.components.nice_go nice-go==1.0.0 -# homeassistant.components.niko_home_control -niko-home-control==0.2.1 - # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py index 932480ac710..63307a88e8a 100644 --- a/tests/components/niko_home_control/conftest.py +++ b/tests/components/niko_home_control/conftest.py @@ -26,7 +26,7 @@ def mock_niko_home_control_connection() -> Generator[AsyncMock]: """Mock a NHC client.""" with ( patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController", autospec=True, ) as mock_client, ): diff --git a/tests/components/niko_home_control/test_config_flow.py b/tests/components/niko_home_control/test_config_flow.py index 8220ee15e02..f911f4ebb1a 100644 --- a/tests/components/niko_home_control/test_config_flow.py +++ b/tests/components/niko_home_control/test_config_flow.py @@ -46,7 +46,7 @@ async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) assert result["errors"] == {} with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", side_effect=Exception, ): result = await hass.config_entries.flow.async_configure( @@ -58,7 +58,7 @@ async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) assert result["errors"] == {"base": "cannot_connect"} with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection" + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -114,7 +114,7 @@ async def test_import_cannot_connect( """Test the cannot connect error.""" with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", side_effect=Exception, ): result = await hass.config_entries.flow.async_init( diff --git a/tests/components/niko_home_control/test_init.py b/tests/components/niko_home_control/test_init.py new file mode 100644 index 00000000000..422b7d7c30c --- /dev/null +++ b/tests/components/niko_home_control/test_init.py @@ -0,0 +1,36 @@ +"""Test init.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_migrate_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_setup_entry: AsyncMock +) -> None: + """Validate that the unique_id is migrated to the new unique_id.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + minor_version=1, + data={CONF_HOST: "192.168.0.123"}, + ) + config_entry.add_to_hass(hass) + entity_entry = entity_registry.async_get_or_create( + LIGHT_DOMAIN, DOMAIN, "light-1", config_entry=config_entry + ) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_entry = entity_registry.async_get(entity_entry.entity_id) + + assert config_entry.minor_version == 2 + assert ( + entity_registry.async_get(entity_entry.entity_id).unique_id + == f"{config_entry.entry_id}-1" + ) From f2df57e230fef61d5d404c9cd93096429d825da6 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 21 Dec 2024 19:36:13 +0100 Subject: [PATCH 0978/1198] Add DHCP discovery to Withings (#133737) --- .../components/withings/manifest.json | 5 ++ homeassistant/generated/dhcp.py | 4 ++ tests/components/withings/test_config_flow.py | 65 ++++++++++++++++++- 3 files changed, 73 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 886eb66f5e0..ad9b9a6fe71 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -5,6 +5,11 @@ "codeowners": ["@joostlek"], "config_flow": true, "dependencies": ["application_credentials", "http", "webhook"], + "dhcp": [ + { + "macaddress": "0024E4*" + } + ], "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 22a09945a80..67531ceced8 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -1119,6 +1119,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "vicare", "macaddress": "B87424*", }, + { + "domain": "withings", + "macaddress": "0024E4*", + }, { "domain": "wiz", "registered_devices": True, diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 39c8340a78e..d0ad5b2659a 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -4,8 +4,9 @@ from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -293,3 +294,65 @@ async def test_config_flow_with_invalid_credentials( assert result assert result["type"] is FlowResultType.ABORT assert result["reason"] == "oauth_error" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_dhcp( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check DHCP discovery.""" + + service_info = DhcpServiceInfo( + hostname="device", + ip="192.168.0.1", + macaddress="0024e4bd30de", + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=service_info + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( + "https://account.withings.com/oauth2_user/authorize2?" + f"response_type=code&client_id={CLIENT_ID}&" + "redirect_uri=https://example.com/auth/external/callback&" + f"state={state}" + "&scope=user.info,user.metrics,user.activity,user.sleepevents" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + "https://wbsapi.withings.net/v2/oauth2", + json={ + "body": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "userid": 600, + }, + }, + ) + with patch( + "homeassistant.components.withings.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY From 04276d352317fff120d2c98c446e83d85bb24cd0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 20:16:18 +0100 Subject: [PATCH 0979/1198] Add number platform to Peblar Rocksolid EV Chargers integration (#133739) --- homeassistant/components/peblar/__init__.py | 7 +- .../components/peblar/coordinator.py | 27 ++++- .../components/peblar/diagnostics.py | 3 +- homeassistant/components/peblar/icons.json | 5 + homeassistant/components/peblar/number.py | 104 ++++++++++++++++++ homeassistant/components/peblar/sensor.py | 42 ++++--- homeassistant/components/peblar/strings.json | 5 + tests/components/peblar/conftest.py | 4 + .../peblar/fixtures/ev_interface.json | 7 ++ .../peblar/snapshots/test_diagnostics.ambr | 7 ++ .../peblar/snapshots/test_number.ambr | 58 ++++++++++ tests/components/peblar/test_number.py | 35 ++++++ 12 files changed, 273 insertions(+), 31 deletions(-) create mode 100644 homeassistant/components/peblar/number.py create mode 100644 tests/components/peblar/fixtures/ev_interface.json create mode 100644 tests/components/peblar/snapshots/test_number.ambr create mode 100644 tests/components/peblar/test_number.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 79ffd236f32..2ab255037ac 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -22,13 +22,14 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, - PeblarMeterDataUpdateCoordinator, + PeblarDataUpdateCoordinator, PeblarRuntimeData, PeblarUserConfigurationDataUpdateCoordinator, PeblarVersionDataUpdateCoordinator, ) PLATFORMS = [ + Platform.NUMBER, Platform.SELECT, Platform.SENSOR, Platform.UPDATE, @@ -57,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo ) from err # Setup the data coordinators - meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + meter_coordinator = PeblarDataUpdateCoordinator(hass, entry, api) user_configuration_coordinator = PeblarUserConfigurationDataUpdateCoordinator( hass, entry, peblar ) @@ -70,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo # Store the runtime data entry.runtime_data = PeblarRuntimeData( - meter_coordinator=meter_coordinator, + data_coordinator=meter_coordinator, system_information=system_information, user_configuraton_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index a01e3d6b41a..33c66266e47 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -9,6 +9,7 @@ from peblar import ( Peblar, PeblarApi, PeblarError, + PeblarEVInterface, PeblarMeter, PeblarUserConfiguration, PeblarVersions, @@ -26,7 +27,7 @@ from .const import LOGGER class PeblarRuntimeData: """Class to hold runtime data.""" - meter_coordinator: PeblarMeterDataUpdateCoordinator + data_coordinator: PeblarDataUpdateCoordinator system_information: PeblarSystemInformation user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator @@ -43,6 +44,19 @@ class PeblarVersionInformation: available: PeblarVersions +@dataclass(kw_only=True) +class PeblarData: + """Class to hold active charging related information of Peblar. + + This is data that needs to be polled and updated at a relatively high + frequency in order for this integration to function correctly. + All this data is updated at the same time by a single coordinator. + """ + + ev: PeblarEVInterface + meter: PeblarMeter + + class PeblarVersionDataUpdateCoordinator( DataUpdateCoordinator[PeblarVersionInformation] ): @@ -72,8 +86,8 @@ class PeblarVersionDataUpdateCoordinator( raise UpdateFailed(err) from err -class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): - """Class to manage fetching Peblar meter data.""" +class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): + """Class to manage fetching Peblar active data.""" def __init__( self, hass: HomeAssistant, entry: PeblarConfigEntry, api: PeblarApi @@ -88,10 +102,13 @@ class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): update_interval=timedelta(seconds=10), ) - async def _async_update_data(self) -> PeblarMeter: + async def _async_update_data(self) -> PeblarData: """Fetch data from the Peblar device.""" try: - return await self.api.meter() + return PeblarData( + ev=await self.api.ev_interface(), + meter=await self.api.meter(), + ) except PeblarError as err: raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 6c4531c0e09..ab18956ecbb 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -16,7 +16,8 @@ async def async_get_config_entry_diagnostics( return { "system_information": entry.runtime_data.system_information.to_dict(), "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), - "meter": entry.runtime_data.meter_coordinator.data.to_dict(), + "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), + "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), "current": entry.runtime_data.version_coordinator.data.current.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index b052eb6de4d..3ead366f4bf 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,10 @@ { "entity": { + "number": { + "charge_current_limit": { + "default": "mdi:speedometer" + } + }, "select": { "smart_charging": { "default": "mdi:lightning-bolt", diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py new file mode 100644 index 00000000000..72c7b02a6e0 --- /dev/null +++ b/homeassistant/components/peblar/number.py @@ -0,0 +1,104 @@ +"""Support for Peblar numbers.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import PeblarApi + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, +) +from homeassistant.const import EntityCategory, UnitOfElectricCurrent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarData, + PeblarDataUpdateCoordinator, + PeblarRuntimeData, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarNumberEntityDescription(NumberEntityDescription): + """Describe a Peblar number.""" + + native_max_value_fn: Callable[[PeblarRuntimeData], int] + set_value_fn: Callable[[PeblarApi, float], Awaitable[Any]] + value_fn: Callable[[PeblarData], int | None] + + +DESCRIPTIONS = [ + PeblarNumberEntityDescription( + key="charge_current_limit", + translation_key="charge_current_limit", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=6, + native_max_value_fn=lambda x: x.system_information.hardware_max_current, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), + value_fn=lambda x: round(x.ev.charge_current_limit_actual / 1000), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar number based on a config entry.""" + async_add_entities( + PeblarNumberEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarNumberEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], NumberEntity): + """Defines a Peblar number.""" + + entity_description: PeblarNumberEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarNumberEntityDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + self._attr_native_max_value = description.native_max_value_fn( + entry.runtime_data + ) + + @property + def native_value(self) -> int | None: + """Return the number value.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + await self.entity_description.set_value_fn(self.coordinator.api, value) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index bb9fe9d4937..285a8dd5ea0 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from peblar import PeblarMeter, PeblarUserConfiguration +from peblar import PeblarUserConfiguration from homeassistant.components.sensor import ( SensorDeviceClass, @@ -26,15 +26,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator +from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator @dataclass(frozen=True, kw_only=True) class PeblarSensorDescription(SensorEntityDescription): - """Describe an Peblar sensor.""" + """Describe a Peblar sensor.""" has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True - value_fn: Callable[[PeblarMeter], int | None] + value_fn: Callable[[PeblarData], int | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( @@ -48,7 +48,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_1, + value_fn=lambda x: x.meter.current_phase_1, ), PeblarSensorDescription( key="current_phase_1", @@ -61,7 +61,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_1, + value_fn=lambda x: x.meter.current_phase_1, ), PeblarSensorDescription( key="current_phase_2", @@ -74,7 +74,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_2, + value_fn=lambda x: x.meter.current_phase_2, ), PeblarSensorDescription( key="current_phase_3", @@ -87,7 +87,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_3, + value_fn=lambda x: x.meter.current_phase_3, ), PeblarSensorDescription( key="energy_session", @@ -97,7 +97,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_fn=lambda x: x.energy_session, + value_fn=lambda x: x.meter.energy_session, ), PeblarSensorDescription( key="energy_total", @@ -108,14 +108,14 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_fn=lambda x: x.energy_total, + value_fn=lambda x: x.meter.energy_total, ), PeblarSensorDescription( key="power_total", device_class=SensorDeviceClass.POWER, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_total, + value_fn=lambda x: x.meter.power_total, ), PeblarSensorDescription( key="power_phase_1", @@ -126,7 +126,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_1, + value_fn=lambda x: x.meter.power_phase_1, ), PeblarSensorDescription( key="power_phase_2", @@ -137,7 +137,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_2, + value_fn=lambda x: x.meter.power_phase_2, ), PeblarSensorDescription( key="power_phase_3", @@ -148,7 +148,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 3, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_3, + value_fn=lambda x: x.meter.power_phase_3, ), PeblarSensorDescription( key="voltage", @@ -158,7 +158,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 1, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_1, + value_fn=lambda x: x.meter.voltage_phase_1, ), PeblarSensorDescription( key="voltage_phase_1", @@ -169,7 +169,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_1, + value_fn=lambda x: x.meter.voltage_phase_1, ), PeblarSensorDescription( key="voltage_phase_2", @@ -180,7 +180,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_2, + value_fn=lambda x: x.meter.voltage_phase_2, ), PeblarSensorDescription( key="voltage_phase_3", @@ -191,7 +191,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 3, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_3, + value_fn=lambda x: x.meter.voltage_phase_3, ), ) @@ -209,9 +209,7 @@ async def async_setup_entry( ) -class PeblarSensorEntity( - CoordinatorEntity[PeblarMeterDataUpdateCoordinator], SensorEntity -): +class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorEntity): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription @@ -224,7 +222,7 @@ class PeblarSensorEntity( description: PeblarSensorDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.meter_coordinator) + super().__init__(entry.runtime_data.data_coordinator) self.entity_description = description self._attr_unique_id = f"{entry.unique_id}_{description.key}" self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 02aee0eacc9..e4311df17cd 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,11 @@ } }, "entity": { + "number": { + "charge_current_limit": { + "name": "Charge limit" + } + }, "select": { "smart_charging": { "name": "Smart charging", diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 8831697f74e..b8e77da08cd 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -7,6 +7,7 @@ from contextlib import nullcontext from unittest.mock import MagicMock, patch from peblar import ( + PeblarEVInterface, PeblarMeter, PeblarSystemInformation, PeblarUserConfiguration, @@ -64,6 +65,9 @@ def mock_peblar() -> Generator[MagicMock]: ) api = peblar.rest_api.return_value + api.ev_interface.return_value = PeblarEVInterface.from_json( + load_fixture("ev_interface.json", DOMAIN) + ) api.meter.return_value = PeblarMeter.from_json( load_fixture("meter.json", DOMAIN) ) diff --git a/tests/components/peblar/fixtures/ev_interface.json b/tests/components/peblar/fixtures/ev_interface.json new file mode 100644 index 00000000000..901807a7068 --- /dev/null +++ b/tests/components/peblar/fixtures/ev_interface.json @@ -0,0 +1,7 @@ +{ + "ChargeCurrentLimit": 16000, + "ChargeCurrentLimitActual": 6000, + "ChargeCurrentLimitSource": "Current limiter", + "CpState": "State C", + "Force1Phase": false +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 08d4d3ac6c6..625bb196402 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -1,6 +1,13 @@ # serializer version: 1 # name: test_diagnostics dict({ + 'ev': dict({ + 'ChargeCurrentLimit': 16000, + 'ChargeCurrentLimitActual': 6000, + 'ChargeCurrentLimitSource': 'Current limiter', + 'CpState': 'State C', + 'Force1Phase': False, + }), 'meter': dict({ 'CurrentPhase1': 14242, 'CurrentPhase2': 0, diff --git a/tests/components/peblar/snapshots/test_number.ambr b/tests/components/peblar/snapshots/test_number.ambr new file mode 100644 index 00000000000..50b44583d1c --- /dev/null +++ b/tests/components/peblar/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_entities[number][number.peblar_ev_charger_charge_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 16, + 'min': 6, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.peblar_ev_charger_charge_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge limit', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_current_limit', + 'unique_id': '23-45-A4O-MOF_charge_current_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[number][number.peblar_ev_charger_charge_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Charge limit', + 'max': 16, + 'min': 6, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.peblar_ev_charger_charge_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- diff --git a/tests/components/peblar/test_number.py b/tests/components/peblar/test_number.py new file mode 100644 index 00000000000..4c2ff928210 --- /dev/null +++ b/tests/components/peblar/test_number.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar number platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the number entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From c67e2047e327910325fb5e7ea832f1b1adc638d9 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 20:28:55 +0100 Subject: [PATCH 0980/1198] Add switch platform to Peblar Rocksolid EV Chargers integration (#133749) --- homeassistant/components/peblar/__init__.py | 1 + .../components/peblar/coordinator.py | 3 + .../components/peblar/diagnostics.py | 1 + homeassistant/components/peblar/icons.json | 5 + homeassistant/components/peblar/strings.json | 5 + homeassistant/components/peblar/switch.py | 102 ++++++++++++++++++ tests/components/peblar/conftest.py | 4 + tests/components/peblar/fixtures/system.json | 12 +++ .../peblar/snapshots/test_diagnostics.ambr | 12 +++ .../peblar/snapshots/test_switch.ambr | 47 ++++++++ tests/components/peblar/test_switch.py | 35 ++++++ 11 files changed, 227 insertions(+) create mode 100644 homeassistant/components/peblar/switch.py create mode 100644 tests/components/peblar/fixtures/system.json create mode 100644 tests/components/peblar/snapshots/test_switch.ambr create mode 100644 tests/components/peblar/test_switch.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 2ab255037ac..854565081e8 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -32,6 +32,7 @@ PLATFORMS = [ Platform.NUMBER, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.UPDATE, ] diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 33c66266e47..e2b16e1e62a 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -11,6 +11,7 @@ from peblar import ( PeblarError, PeblarEVInterface, PeblarMeter, + PeblarSystem, PeblarUserConfiguration, PeblarVersions, ) @@ -55,6 +56,7 @@ class PeblarData: ev: PeblarEVInterface meter: PeblarMeter + system: PeblarSystem class PeblarVersionDataUpdateCoordinator( @@ -108,6 +110,7 @@ class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): return PeblarData( ev=await self.api.ev_interface(), meter=await self.api.meter(), + system=await self.api.system(), ) except PeblarError as err: raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index ab18956ecbb..32716148c3f 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -18,6 +18,7 @@ async def async_get_config_entry_diagnostics( "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), + "system": entry.runtime_data.data_coordinator.data.system.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), "current": entry.runtime_data.version_coordinator.data.current.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 3ead366f4bf..2da03b00519 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -16,6 +16,11 @@ } } }, + "switch": { + "force_single_phase": { + "default": "mdi:power-cycle" + } + }, "update": { "customization": { "default": "mdi:palette" diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e4311df17cd..e7e531f3bf7 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -85,6 +85,11 @@ "name": "Voltage phase 3" } }, + "switch": { + "force_single_phase": { + "name": "Force single phase" + } + }, "update": { "customization": { "name": "Customization" diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py new file mode 100644 index 00000000000..9a6788a62be --- /dev/null +++ b/homeassistant/components/peblar/switch.py @@ -0,0 +1,102 @@ +"""Support for Peblar selects.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import PeblarApi + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarData, + PeblarDataUpdateCoordinator, + PeblarRuntimeData, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarSwitchEntityDescription(SwitchEntityDescription): + """Class describing Peblar switch entities.""" + + has_fn: Callable[[PeblarRuntimeData], bool] = lambda x: True + is_on_fn: Callable[[PeblarData], bool] + set_fn: Callable[[PeblarApi, bool], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarSwitchEntityDescription( + key="force_single_phase", + translation_key="force_single_phase", + entity_category=EntityCategory.CONFIG, + has_fn=lambda x: ( + x.data_coordinator.data.system.force_single_phase_allowed + and x.user_configuraton_coordinator.data.connected_phases > 1 + ), + is_on_fn=lambda x: x.ev.force_single_phase, + set_fn=lambda x, on: x.ev_interface(force_single_phase=on), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar switch based on a config entry.""" + async_add_entities( + PeblarSwitchEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + if description.has_fn(entry.runtime_data) + ) + + +class PeblarSwitchEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SwitchEntity): + """Defines a Peblar switch entity.""" + + entity_description: PeblarSwitchEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSwitchEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def is_on(self) -> bool: + """Return state of the switch.""" + return self.entity_description.is_on_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + await self.entity_description.set_fn(self.coordinator.api, True) + await self.coordinator.async_request_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self.entity_description.set_fn(self.coordinator.api, False) + await self.coordinator.async_request_refresh() diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index b8e77da08cd..95daad545b5 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -9,6 +9,7 @@ from unittest.mock import MagicMock, patch from peblar import ( PeblarEVInterface, PeblarMeter, + PeblarSystem, PeblarSystemInformation, PeblarUserConfiguration, PeblarVersions, @@ -71,6 +72,9 @@ def mock_peblar() -> Generator[MagicMock]: api.meter.return_value = PeblarMeter.from_json( load_fixture("meter.json", DOMAIN) ) + api.system.return_value = PeblarSystem.from_json( + load_fixture("system.json", DOMAIN) + ) yield peblar diff --git a/tests/components/peblar/fixtures/system.json b/tests/components/peblar/fixtures/system.json new file mode 100644 index 00000000000..87bb60575da --- /dev/null +++ b/tests/components/peblar/fixtures/system.json @@ -0,0 +1,12 @@ +{ + "ActiveErrorCodes": [], + "ActiveWarningCodes": [], + "CellularSignalStrength": null, + "FirmwareVersion": "1.6.1+1+WL-1", + "Force1PhaseAllowed": true, + "PhaseCount": 3, + "ProductPn": "6004-2300-8002", + "ProductSn": "23-45-A4O-MOF", + "Uptime": 322094, + "WlanSignalStrength": null +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 625bb196402..e33a2f557de 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -20,6 +20,18 @@ 'PowerTotal': 3185, 'VoltagePhase1': 223, }), + 'system': dict({ + 'ActiveErrorCodes': list([ + ]), + 'ActiveWarningCodes': list([ + ]), + 'FirmwareVersion': '1.6.1+1+WL-1', + 'Force1PhaseAllowed': True, + 'PhaseCount': 3, + 'ProductPn': '6004-2300-8002', + 'ProductSn': '23-45-A4O-MOF', + 'Uptime': 322094, + }), 'system_information': dict({ 'BopCalIGainA': 264625, 'BopCalIGainB': 267139, diff --git a/tests/components/peblar/snapshots/test_switch.ambr b/tests/components/peblar/snapshots/test_switch.ambr new file mode 100644 index 00000000000..f4fc768030f --- /dev/null +++ b/tests/components/peblar/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_entities[switch][switch.peblar_ev_charger_force_single_phase-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.peblar_ev_charger_force_single_phase', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force single phase', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'force_single_phase', + 'unique_id': '23-45-A4O-MOF-force_single_phase', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[switch][switch.peblar_ev_charger_force_single_phase-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Peblar EV Charger Force single phase', + }), + 'context': , + 'entity_id': 'switch.peblar_ev_charger_force_single_phase', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/peblar/test_switch.py b/tests/components/peblar/test_switch.py new file mode 100644 index 00000000000..7a8fcf7705b --- /dev/null +++ b/tests/components/peblar/test_switch.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar switch platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the switch entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 81ce5f45059e2d819efd5ef1fad6df239652999e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 21:26:40 +0100 Subject: [PATCH 0981/1198] Update peblar to v0.3.0 (#133751) --- homeassistant/components/peblar/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 1ae2a491ba9..0e3a66dd256 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["peblar==0.2.1"], + "requirements": ["peblar==0.3.0"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/requirements_all.txt b/requirements_all.txt index 4cf22eaf153..e7f4aadfe05 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1600,7 +1600,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.2.1 +peblar==0.3.0 # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 747594117e6..84fc0f11967 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1327,7 +1327,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.2.1 +peblar==0.3.0 # homeassistant.components.peco peco==0.0.30 From 85519a312c0e8ecfbc2c1c219aa00627f2f4ee31 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:23:36 +0100 Subject: [PATCH 0982/1198] Fix Peblar current limit user setting value (#133753) --- homeassistant/components/peblar/number.py | 2 +- tests/components/peblar/snapshots/test_number.ambr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index 72c7b02a6e0..a5e926714d9 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -48,7 +48,7 @@ DESCRIPTIONS = [ native_max_value_fn=lambda x: x.system_information.hardware_max_current, native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), - value_fn=lambda x: round(x.ev.charge_current_limit_actual / 1000), + value_fn=lambda x: round(x.ev.charge_current_limit / 1000), ), ] diff --git a/tests/components/peblar/snapshots/test_number.ambr b/tests/components/peblar/snapshots/test_number.ambr index 50b44583d1c..d78067849f3 100644 --- a/tests/components/peblar/snapshots/test_number.ambr +++ b/tests/components/peblar/snapshots/test_number.ambr @@ -53,6 +53,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6', + 'state': '16', }) # --- From 5e4e1ce5a7962d71ac78db62772db69cab27e045 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:29:11 +0100 Subject: [PATCH 0983/1198] Add binary sensor platform to Peblar Rocksolid EV Chargers integration (#133755) --- homeassistant/components/peblar/__init__.py | 1 + .../components/peblar/binary_sensor.py | 89 +++++++++++++++++ homeassistant/components/peblar/icons.json | 8 ++ homeassistant/components/peblar/strings.json | 8 ++ .../peblar/snapshots/test_binary_sensor.ambr | 95 +++++++++++++++++++ tests/components/peblar/test_binary_sensor.py | 35 +++++++ 6 files changed, 236 insertions(+) create mode 100644 homeassistant/components/peblar/binary_sensor.py create mode 100644 tests/components/peblar/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/peblar/test_binary_sensor.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 854565081e8..43c48e28bd0 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -29,6 +29,7 @@ from .coordinator import ( ) PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py new file mode 100644 index 00000000000..f28a02422a9 --- /dev/null +++ b/homeassistant/components/peblar/binary_sensor.py @@ -0,0 +1,89 @@ +"""Support for Peblar binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Peblar binary sensor entities.""" + + is_on_fn: Callable[[PeblarData], bool] + + +DESCRIPTIONS = [ + PeblarBinarySensorEntityDescription( + key="active_error_codes", + translation_key="active_error_codes", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + is_on_fn=lambda x: bool(x.system.active_error_codes), + ), + PeblarBinarySensorEntityDescription( + key="active_warning_codes", + translation_key="active_warning_codes", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + is_on_fn=lambda x: bool(x.system.active_warning_codes), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar binary sensor based on a config entry.""" + async_add_entities( + PeblarBinarySensorEntity(entry=entry, description=description) + for description in DESCRIPTIONS + ) + + +class PeblarBinarySensorEntity( + CoordinatorEntity[PeblarDataUpdateCoordinator], BinarySensorEntity +): + """Defines a Peblar binary sensor entity.""" + + entity_description: PeblarBinarySensorEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarBinarySensorEntityDescription, + ) -> None: + """Initialize the binary sensor entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def is_on(self) -> bool: + """Return state of the binary sensor.""" + return self.entity_description.is_on_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 2da03b00519..2b24bf71ebc 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,13 @@ { "entity": { + "binary_sensor": { + "active_error_codes": { + "default": "mdi:alert" + }, + "active_warning_codes": { + "default": "mdi:alert" + } + }, "number": { "charge_current_limit": { "default": "mdi:speedometer" diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e7e531f3bf7..0632fa31dd0 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,14 @@ } }, "entity": { + "binary_sensor": { + "active_error_codes": { + "name": "Active errors" + }, + "active_warning_codes": { + "name": "Active warnings" + } + }, "number": { "charge_current_limit": { "name": "Charge limit" diff --git a/tests/components/peblar/snapshots/test_binary_sensor.ambr b/tests/components/peblar/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..5dd008dd320 --- /dev/null +++ b/tests/components/peblar/snapshots/test_binary_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_errors-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_errors', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active errors', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_error_codes', + 'unique_id': '23-45-A4O-MOF-active_error_codes', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_errors-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Peblar EV Charger Active errors', + }), + 'context': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_errors', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_warnings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active warnings', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_warning_codes', + 'unique_id': '23-45-A4O-MOF-active_warning_codes', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Peblar EV Charger Active warnings', + }), + 'context': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/peblar/test_binary_sensor.py b/tests/components/peblar/test_binary_sensor.py new file mode 100644 index 00000000000..670b5b67145 --- /dev/null +++ b/tests/components/peblar/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar binary sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.BINARY_SENSOR], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the binary sensors entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 9dc20b5709b7bfe2c11e1e518a9d86eb81d8f143 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:40:15 +0100 Subject: [PATCH 0984/1198] Add more sensors to Peblar Rocksolid EV Chargers integration (#133754) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/peblar/const.py | 32 +++ homeassistant/components/peblar/icons.json | 11 + homeassistant/components/peblar/sensor.py | 44 ++- homeassistant/components/peblar/strings.json | 61 ++++- .../peblar/snapshots/test_sensor.ambr | 256 ++++++++++++++++++ tests/components/peblar/test_sensor.py | 1 + 6 files changed, 386 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/peblar/const.py b/homeassistant/components/peblar/const.py index b986c866d16..d7d7c2fa5b5 100644 --- a/homeassistant/components/peblar/const.py +++ b/homeassistant/components/peblar/const.py @@ -5,6 +5,38 @@ from __future__ import annotations import logging from typing import Final +from peblar import ChargeLimiter, CPState + DOMAIN: Final = "peblar" LOGGER = logging.getLogger(__package__) + +PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT = { + ChargeLimiter.CHARGING_CABLE: "charging_cable", + ChargeLimiter.CURRENT_LIMITER: "current_limiter", + ChargeLimiter.DYNAMIC_LOAD_BALANCING: "dynamic_load_balancing", + ChargeLimiter.EXTERNAL_POWER_LIMIT: "external_power_limit", + ChargeLimiter.GROUP_LOAD_BALANCING: "group_load_balancing", + ChargeLimiter.HARDWARE_LIMITATION: "hardware_limitation", + ChargeLimiter.HIGH_TEMPERATURE: "high_temperature", + ChargeLimiter.HOUSEHOLD_POWER_LIMIT: "household_power_limit", + ChargeLimiter.INSTALLATION_LIMIT: "installation_limit", + ChargeLimiter.LOCAL_MODBUS_API: "local_modbus_api", + ChargeLimiter.LOCAL_REST_API: "local_rest_api", + ChargeLimiter.LOCAL_SCHEDULED: "local_scheduled", + ChargeLimiter.OCPP_SMART_CHARGING: "ocpp_smart_charging", + ChargeLimiter.OVERCURRENT_PROTECTION: "overcurrent_protection", + ChargeLimiter.PHASE_IMBALANCE: "phase_imbalance", + ChargeLimiter.POWER_FACTOR: "power_factor", + ChargeLimiter.SOLAR_CHARGING: "solar_charging", +} + +PEBLAR_CP_STATE_TO_HOME_ASSISTANT = { + CPState.CHARGING_SUSPENDED: "suspended", + CPState.CHARGING_VENTILATION: "charging", + CPState.CHARGING: "charging", + CPState.ERROR: "error", + CPState.FAULT: "fault", + CPState.INVALID: "invalid", + CPState.NO_EV_CONNECTED: "no_ev_connected", +} diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 2b24bf71ebc..6244945077b 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -24,6 +24,17 @@ } } }, + "sensor": { + "cp_state": { + "default": "mdi:ev-plug-type2" + }, + "charge_current_limit_source": { + "default": "mdi:arrow-collapse-up" + }, + "uptime": { + "default": "mdi:timer" + } + }, "switch": { "force_single_phase": { "default": "mdi:power-cycle" diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index 285a8dd5ea0..233417051cb 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta from peblar import PeblarUserConfiguration @@ -24,8 +25,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util.dt import utcnow -from .const import DOMAIN +from .const import ( + DOMAIN, + PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT, + PEBLAR_CP_STATE_TO_HOME_ASSISTANT, +) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator @@ -34,21 +40,37 @@ class PeblarSensorDescription(SensorEntityDescription): """Describe a Peblar sensor.""" has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True - value_fn: Callable[[PeblarData], int | None] + value_fn: Callable[[PeblarData], datetime | int | str | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( - key="current", + key="cp_state", + translation_key="cp_state", + device_class=SensorDeviceClass.ENUM, + options=list(PEBLAR_CP_STATE_TO_HOME_ASSISTANT.values()), + value_fn=lambda x: PEBLAR_CP_STATE_TO_HOME_ASSISTANT[x.ev.cp_state], + ), + PeblarSensorDescription( + key="charge_current_limit_source", + translation_key="charge_current_limit_source", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + options=list(PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT.values()), + value_fn=lambda x: PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT[ + x.ev.charge_current_limit_source + ], + ), + PeblarSensorDescription( + key="current_total", device_class=SensorDeviceClass.CURRENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - has_fn=lambda x: x.connected_phases == 1, native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.meter.current_phase_1, + value_fn=lambda x: x.meter.current_total, ), PeblarSensorDescription( key="current_phase_1", @@ -193,6 +215,16 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, value_fn=lambda x: x.meter.voltage_phase_3, ), + PeblarSensorDescription( + key="uptime", + translation_key="uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + value_fn=lambda x: ( + utcnow().replace(microsecond=0) - timedelta(seconds=x.system.uptime) + ), + ), ) @@ -232,6 +264,6 @@ class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorE ) @property - def native_value(self) -> int | None: + def native_value(self) -> datetime | int | str | None: """Return the state of the sensor.""" return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 0632fa31dd0..01022a19c38 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -1,8 +1,16 @@ { "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_serial_number": "The discovered Peblar device did not provide a serial number." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, "step": { "user": { - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", "data": { "host": "[%key:common::config_flow::data::host%]", "password": "[%key:common::config_flow::data::password%]" @@ -10,26 +18,18 @@ "data_description": { "host": "The hostname or IP address of your Peblar charger on your home network.", "password": "The same password as you use to log in to the Peblar device' local web interface." - } + }, + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." }, "zeroconf_confirm": { - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", "data": { "password": "[%key:common::config_flow::data::password%]" }, "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" - } + }, + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." } - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_serial_number": "The discovered Peblar device did not provide a serial number." } }, "entity": { @@ -59,6 +59,38 @@ } }, "sensor": { + "charge_current_limit_source": { + "name": "Limit source", + "state": { + "charging_cable": "Charging cable", + "current_limiter": "Current limiter", + "dynamic_load_balancing": "Dynamic load balancing", + "external_power_limit": "External power limit", + "group_load_balancing": "Group load balancing", + "hardware_limitation": "Hardware limitation", + "high_temperature": "High temperature", + "household_power_limit": "Household power limit", + "installation_limit": "Installation limit", + "local_modbus_api": "Modbus API", + "local_rest_api": "REST API", + "ocpp_smart_charging": "OCPP smart charging", + "overcurrent_protection": "Overcurrent protection", + "phase_imbalance": "Phase imbalance", + "power_factor": "Power factor", + "solar_charging": "Solar charging" + } + }, + "cp_state": { + "name": "State", + "state": { + "charging": "Charging", + "error": "Error", + "fault": "Fault", + "invalid": "Invalid", + "no_ev_connected": "No EV connected", + "suspended": "Suspended" + } + }, "current_phase_1": { "name": "Current phase 1" }, @@ -83,6 +115,9 @@ "power_phase_3": { "name": "Power phase 3" }, + "uptime": { + "name": "Uptime" + }, "voltage_phase_1": { "name": "Voltage phase 1" }, diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr index c3020b60078..da17a4661ee 100644 --- a/tests/components/peblar/snapshots/test_sensor.ambr +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -1,4 +1,61 @@ # serializer version: 1 +# name: test_entities[sensor][sensor.peblar_ev_charger_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_current_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.242', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -227,6 +284,92 @@ 'state': '880.703', }) # --- +# name: test_entities[sensor][sensor.peblar_ev_charger_limit_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'charging_cable', + 'current_limiter', + 'dynamic_load_balancing', + 'external_power_limit', + 'group_load_balancing', + 'hardware_limitation', + 'high_temperature', + 'household_power_limit', + 'installation_limit', + 'local_modbus_api', + 'local_rest_api', + 'local_scheduled', + 'ocpp_smart_charging', + 'overcurrent_protection', + 'phase_imbalance', + 'power_factor', + 'solar_charging', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_limit_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Limit source', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_current_limit_source', + 'unique_id': '23-45-A4O-MOF_charge_current_limit_source', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_limit_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Peblar EV Charger Limit source', + 'options': list([ + 'charging_cable', + 'current_limiter', + 'dynamic_load_balancing', + 'external_power_limit', + 'group_load_balancing', + 'hardware_limitation', + 'high_temperature', + 'household_power_limit', + 'installation_limit', + 'local_modbus_api', + 'local_rest_api', + 'local_scheduled', + 'ocpp_smart_charging', + 'overcurrent_protection', + 'phase_imbalance', + 'power_factor', + 'solar_charging', + ]), + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_limit_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'current_limiter', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -488,6 +631,119 @@ 'state': '0.381', }) # --- +# name: test_entities[sensor][sensor.peblar_ev_charger_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'suspended', + 'charging', + 'charging', + 'error', + 'fault', + 'invalid', + 'no_ev_connected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cp_state', + 'unique_id': '23-45-A4O-MOF_cp_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Peblar EV Charger State', + 'options': list([ + 'suspended', + 'charging', + 'charging', + 'error', + 'fault', + 'invalid', + 'no_ev_connected', + ]), + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'charging', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': '23-45-A4O-MOF_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Peblar EV Charger Uptime', + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-12-18T04:16:46+00:00', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py index 97402206d33..bad81486838 100644 --- a/tests/components/peblar/test_sensor.py +++ b/tests/components/peblar/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +@pytest.mark.freeze_time("2024-12-21 21:45:00") @pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") async def test_entities( From 9fcf8f22d2cd2b88fa4ce34382e999565eb00f61 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 23:00:29 +0100 Subject: [PATCH 0985/1198] Add reauthentication support to Peblar Rocksolid EV Chargers integration (#133757) --- homeassistant/components/peblar/__init__.py | 4 +- .../components/peblar/config_flow.py | 51 +++++++++++++ .../components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/strings.json | 12 +++- tests/components/peblar/test_config_flow.py | 72 +++++++++++++++++++ tests/components/peblar/test_init.py | 13 +++- 6 files changed, 149 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 43c48e28bd0..a055a1a02c8 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -15,7 +15,7 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo except PeblarConnectionError as err: raise ConfigEntryNotReady("Could not connect to Peblar charger") from err except PeblarAuthenticationError as err: - raise ConfigEntryError("Could not login to Peblar charger") from err + raise ConfigEntryAuthFailed from err except PeblarError as err: raise ConfigEntryNotReady( "Unknown error occurred while connecting to Peblar charger" diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index a9cfb7d89b9..809cb13746e 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from aiohttp import CookieJar @@ -129,3 +130,53 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle initiation of re-authentication with a Peblar device.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication with a Peblar device.""" + errors = {} + + if user_input is not None: + reauth_entry = self._get_reauth_entry() + peblar = Peblar( + host=reauth_entry.data[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data={ + CONF_HOST: reauth_entry.data[CONF_HOST], + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 3dc470ce76b..2b0684793a8 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -36,7 +36,7 @@ rules: integration-owner: done log-when-unavailable: done parallel-updates: todo - reauthentication-flow: todo + reauthentication-flow: done test-coverage: todo # Gold devices: todo diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 01022a19c38..0cce7ed8191 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_serial_number": "The discovered Peblar device did not provide a serial number." + "no_serial_number": "The discovered Peblar device did not provide a serial number.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -10,6 +11,15 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { + "reauth_confirm": { + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + }, + "description": "Reauthenticate with your Peblar RV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar's device web interface." + }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index 4e3ab008047..a4a461b6bba 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -319,3 +319,75 @@ async def test_user_flow_with_zeroconf_in_progress(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert not hass.config_entries.flow.async_progress() + + +@pytest.mark.usefixtures("mock_peblar") +async def test_reauth_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the reauthentication configuration flow.""" + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data[CONF_PASSWORD] == "OMGSPIDERS" + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "OMGPUPPIES"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGPUPPIES", + } + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {"base": "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_reauth_flow_errors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show form on a error.""" + mock_config_entry.add_to_hass(hass) + mock_peblar.login.side_effect = side_effect + + result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py index ca7b0d88c24..6e6a9c2af05 100644 --- a/tests/components/peblar/test_init.py +++ b/tests/components/peblar/test_init.py @@ -7,7 +7,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -70,6 +70,17 @@ async def test_config_entry_authentication_failed( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id + @pytest.mark.usefixtures("init_integration") async def test_peblar_device_entry( From 662dea28eddb9b7b28dd328f4c40398224e780ad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 17:25:48 -1000 Subject: [PATCH 0986/1198] Replace queries using distinct with correlated scalar subqueries to significantly improve purge performance (#133748) Replace queries using distinct with correlated scalar subqueries like #133553 and #133699 PostgreSQL does not support skip/loose index scan https://wiki.postgresql.org/wiki/Loose_indexscan This makes the `distinct` query (see section `Selecting Distinct Values` in the wiki above) to find the unused ids very expense. We can replace them with correlated scalar subqueries as done in #133553 to avoid the `distinct` --- homeassistant/components/recorder/queries.py | 73 ++++++++++++++++---- 1 file changed, 58 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 7ac4c19bc94..71e50cf13d6 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -5,7 +5,16 @@ from __future__ import annotations from collections.abc import Iterable from datetime import datetime -from sqlalchemy import delete, distinct, func, lambda_stmt, select, union_all, update +from sqlalchemy import ( + and_, + delete, + distinct, + func, + lambda_stmt, + select, + union_all, + update, +) from sqlalchemy.sql.lambdas import StatementLambdaElement from sqlalchemy.sql.selectable import Select @@ -838,16 +847,33 @@ def get_migration_changes() -> StatementLambdaElement: def find_event_types_to_purge() -> StatementLambdaElement: - """Find event_type_ids to purge.""" + """Find event_type_ids to purge. + + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest time_fired_ts + for each event_type. This is then used to filter out the event_type_ids + that no longer exist in the Events table. + + This query is fast for SQLite, MariaDB, MySQL, and PostgreSQL. + """ return lambda_stmt( lambda: select(EventTypes.event_type_id, EventTypes.event_type).where( EventTypes.event_type_id.not_in( - select(EventTypes.event_type_id).join( - used_event_type_ids := select( - distinct(Events.event_type_id).label("used_event_type_id") - ).subquery(), - EventTypes.event_type_id - == used_event_type_ids.c.used_event_type_id, + select(EventTypes.event_type_id) + .select_from(EventTypes) + .join( + Events, + and_( + EventTypes.event_type_id == Events.event_type_id, + Events.time_fired_ts + == select(Events.time_fired_ts) + .where(Events.event_type_id == EventTypes.event_type_id) + .limit(1) + .scalar_subquery() + .correlate(EventTypes), + ), ) ) ) @@ -855,16 +881,33 @@ def find_event_types_to_purge() -> StatementLambdaElement: def find_entity_ids_to_purge() -> StatementLambdaElement: - """Find entity_ids to purge.""" + """Find metadata_ids for each entity_id to purge. + + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest last_updated_ts + for each entity_id. This is then used to filter out the metadata_ids + that no longer exist in the States table. + + This query is fast for SQLite, MariaDB, MySQL, and PostgreSQL. + """ return lambda_stmt( lambda: select(StatesMeta.metadata_id, StatesMeta.entity_id).where( StatesMeta.metadata_id.not_in( - select(StatesMeta.metadata_id).join( - used_states_metadata_id := select( - distinct(States.metadata_id).label("used_states_metadata_id") - ).subquery(), - StatesMeta.metadata_id - == used_states_metadata_id.c.used_states_metadata_id, + select(StatesMeta.metadata_id) + .select_from(StatesMeta) + .join( + States, + and_( + StatesMeta.metadata_id == States.metadata_id, + States.last_updated_ts + == select(States.last_updated_ts) + .where(States.metadata_id == StatesMeta.metadata_id) + .limit(1) + .scalar_subquery() + .correlate(StatesMeta), + ), ) ) ) From c2a9b0ff527aa69ef55a26f05c3a0abef5f1041a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 19:38:11 -1000 Subject: [PATCH 0987/1198] Reduce complexity to find unused data_ids and attributes_ids for db engines with slow range select (#133752) --- homeassistant/components/recorder/purge.py | 85 +--- homeassistant/components/recorder/queries.py | 475 ++----------------- 2 files changed, 61 insertions(+), 499 deletions(-) diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 881952c390d..ea2b93efba7 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime -from itertools import zip_longest import logging import time from typing import TYPE_CHECKING @@ -297,68 +296,18 @@ def _select_unused_attributes_ids( seen_ids: set[int] = set() if not database_engine.optimizer.slow_range_in_select: - # + query = attributes_ids_exist_in_states_with_fast_in_distinct # SQLite has a superior query optimizer for the distinct query below as it uses # the covering index without having to examine the rows directly for both of the # queries below. - # - # We use the distinct query for SQLite since the query in the other branch can - # generate more than 500 unions which SQLite does not support. - # - # How MariaDB's query optimizer handles this query: - # > explain select distinct attributes_id from states where attributes_id in - # (136723); - # ...Using index - # - for attributes_ids_chunk in chunked_or_all( - attributes_ids, instance.max_bind_vars - ): - seen_ids.update( - state[0] - for state in session.execute( - attributes_ids_exist_in_states_with_fast_in_distinct( - attributes_ids_chunk - ) - ).all() - ) else: - # + query = attributes_ids_exist_in_states # This branch is for DBMS that cannot optimize the distinct query well and has # to examine all the rows that match. - # - # This branch uses a union of simple queries, as each query is optimized away - # as the answer to the query can be found in the index. - # - # The below query works for SQLite as long as there are no more than 500 - # attributes_id to be selected. We currently do not have MySQL or PostgreSQL - # servers running in the test suite; we test this path using SQLite when there - # are less than 500 attributes_id. - # - # How MariaDB's query optimizer handles this query: - # > explain select min(attributes_id) from states where attributes_id = 136723; - # ...Select tables optimized away - # - # We used to generate a query based on how many attribute_ids to find but - # that meant sqlalchemy Transparent SQL Compilation Caching was working against - # us by cached up to max_bind_vars different statements which could be - # up to 500MB for large database due to the complexity of the ORM objects. - # - # We now break the query into groups of 100 and use a lambda_stmt to ensure - # that the query is only cached once. - # - # PostgreSQL also suffers from the same issue as older MariaDB with the distinct query - # when the database gets large because it doesn't support skip/loose index scan. - # https://wiki.postgresql.org/wiki/Loose_indexscan - # https://github.com/home-assistant/core/issues/126084 - groups = [iter(attributes_ids)] * 100 - for attr_ids in zip_longest(*groups, fillvalue=None): - seen_ids |= { - attrs_id[0] - for attrs_id in session.execute( - attributes_ids_exist_in_states(*attr_ids) # type: ignore[arg-type] - ).all() - if attrs_id[0] is not None - } + for attributes_ids_chunk in chunked_or_all(attributes_ids, instance.max_bind_vars): + seen_ids.update( + state[0] for state in session.execute(query(attributes_ids_chunk)).all() + ) to_remove = attributes_ids - seen_ids _LOGGER.debug( "Selected %s shared attributes to remove", @@ -395,23 +344,13 @@ def _select_unused_event_data_ids( # See _select_unused_attributes_ids for why this function # branches for non-sqlite databases. if not database_engine.optimizer.slow_range_in_select: - for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): - seen_ids.update( - state[0] - for state in session.execute( - data_ids_exist_in_events_with_fast_in_distinct(data_ids_chunk) - ).all() - ) + query = data_ids_exist_in_events_with_fast_in_distinct else: - groups = [iter(data_ids)] * 100 - for data_ids_group in zip_longest(*groups, fillvalue=None): - seen_ids |= { - data_id[0] - for data_id in session.execute( - data_ids_exist_in_events(*data_ids_group) # type: ignore[arg-type] - ).all() - if data_id[0] is not None - } + query = data_ids_exist_in_events + for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): + seen_ids.update( + state[0] for state in session.execute(query(data_ids_chunk)).all() + ) to_remove = data_ids - seen_ids _LOGGER.debug("Selected %s shared event data to remove", len(to_remove)) return to_remove diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 71e50cf13d6..eb681f86702 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -5,16 +5,7 @@ from __future__ import annotations from collections.abc import Iterable from datetime import datetime -from sqlalchemy import ( - and_, - delete, - distinct, - func, - lambda_stmt, - select, - union_all, - update, -) +from sqlalchemy import and_, delete, distinct, func, lambda_stmt, select, update from sqlalchemy.sql.lambdas import StatementLambdaElement from sqlalchemy.sql.selectable import Select @@ -85,11 +76,6 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen ) -def _state_attrs_exist(attr: int | None) -> Select: - """Check if a state attributes id exists in the states table.""" - return select(States.attributes_id).where(States.attributes_id == attr).limit(1) - - def attributes_ids_exist_in_states_with_fast_in_distinct( attributes_ids: Iterable[int], ) -> StatementLambdaElement: @@ -102,214 +88,35 @@ def attributes_ids_exist_in_states_with_fast_in_distinct( def attributes_ids_exist_in_states( - attr1: int, - attr2: int | None, - attr3: int | None, - attr4: int | None, - attr5: int | None, - attr6: int | None, - attr7: int | None, - attr8: int | None, - attr9: int | None, - attr10: int | None, - attr11: int | None, - attr12: int | None, - attr13: int | None, - attr14: int | None, - attr15: int | None, - attr16: int | None, - attr17: int | None, - attr18: int | None, - attr19: int | None, - attr20: int | None, - attr21: int | None, - attr22: int | None, - attr23: int | None, - attr24: int | None, - attr25: int | None, - attr26: int | None, - attr27: int | None, - attr28: int | None, - attr29: int | None, - attr30: int | None, - attr31: int | None, - attr32: int | None, - attr33: int | None, - attr34: int | None, - attr35: int | None, - attr36: int | None, - attr37: int | None, - attr38: int | None, - attr39: int | None, - attr40: int | None, - attr41: int | None, - attr42: int | None, - attr43: int | None, - attr44: int | None, - attr45: int | None, - attr46: int | None, - attr47: int | None, - attr48: int | None, - attr49: int | None, - attr50: int | None, - attr51: int | None, - attr52: int | None, - attr53: int | None, - attr54: int | None, - attr55: int | None, - attr56: int | None, - attr57: int | None, - attr58: int | None, - attr59: int | None, - attr60: int | None, - attr61: int | None, - attr62: int | None, - attr63: int | None, - attr64: int | None, - attr65: int | None, - attr66: int | None, - attr67: int | None, - attr68: int | None, - attr69: int | None, - attr70: int | None, - attr71: int | None, - attr72: int | None, - attr73: int | None, - attr74: int | None, - attr75: int | None, - attr76: int | None, - attr77: int | None, - attr78: int | None, - attr79: int | None, - attr80: int | None, - attr81: int | None, - attr82: int | None, - attr83: int | None, - attr84: int | None, - attr85: int | None, - attr86: int | None, - attr87: int | None, - attr88: int | None, - attr89: int | None, - attr90: int | None, - attr91: int | None, - attr92: int | None, - attr93: int | None, - attr94: int | None, - attr95: int | None, - attr96: int | None, - attr97: int | None, - attr98: int | None, - attr99: int | None, - attr100: int | None, + attributes_ids: Iterable[int], ) -> StatementLambdaElement: - """Generate the find attributes select only once. + """Find attributes ids that exist in the states table. - https://docs.sqlalchemy.org/en/14/core/connections.html#quick-guidelines-for-lambdas + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest last_updated_ts + for each attributes_id. This is then used to filter out the attributes_id + that no longer exist in the States table. + + This query is fast for older MariaDB, older MySQL, and PostgreSQL. """ return lambda_stmt( - lambda: union_all( - _state_attrs_exist(attr1), - _state_attrs_exist(attr2), - _state_attrs_exist(attr3), - _state_attrs_exist(attr4), - _state_attrs_exist(attr5), - _state_attrs_exist(attr6), - _state_attrs_exist(attr7), - _state_attrs_exist(attr8), - _state_attrs_exist(attr9), - _state_attrs_exist(attr10), - _state_attrs_exist(attr11), - _state_attrs_exist(attr12), - _state_attrs_exist(attr13), - _state_attrs_exist(attr14), - _state_attrs_exist(attr15), - _state_attrs_exist(attr16), - _state_attrs_exist(attr17), - _state_attrs_exist(attr18), - _state_attrs_exist(attr19), - _state_attrs_exist(attr20), - _state_attrs_exist(attr21), - _state_attrs_exist(attr22), - _state_attrs_exist(attr23), - _state_attrs_exist(attr24), - _state_attrs_exist(attr25), - _state_attrs_exist(attr26), - _state_attrs_exist(attr27), - _state_attrs_exist(attr28), - _state_attrs_exist(attr29), - _state_attrs_exist(attr30), - _state_attrs_exist(attr31), - _state_attrs_exist(attr32), - _state_attrs_exist(attr33), - _state_attrs_exist(attr34), - _state_attrs_exist(attr35), - _state_attrs_exist(attr36), - _state_attrs_exist(attr37), - _state_attrs_exist(attr38), - _state_attrs_exist(attr39), - _state_attrs_exist(attr40), - _state_attrs_exist(attr41), - _state_attrs_exist(attr42), - _state_attrs_exist(attr43), - _state_attrs_exist(attr44), - _state_attrs_exist(attr45), - _state_attrs_exist(attr46), - _state_attrs_exist(attr47), - _state_attrs_exist(attr48), - _state_attrs_exist(attr49), - _state_attrs_exist(attr50), - _state_attrs_exist(attr51), - _state_attrs_exist(attr52), - _state_attrs_exist(attr53), - _state_attrs_exist(attr54), - _state_attrs_exist(attr55), - _state_attrs_exist(attr56), - _state_attrs_exist(attr57), - _state_attrs_exist(attr58), - _state_attrs_exist(attr59), - _state_attrs_exist(attr60), - _state_attrs_exist(attr61), - _state_attrs_exist(attr62), - _state_attrs_exist(attr63), - _state_attrs_exist(attr64), - _state_attrs_exist(attr65), - _state_attrs_exist(attr66), - _state_attrs_exist(attr67), - _state_attrs_exist(attr68), - _state_attrs_exist(attr69), - _state_attrs_exist(attr70), - _state_attrs_exist(attr71), - _state_attrs_exist(attr72), - _state_attrs_exist(attr73), - _state_attrs_exist(attr74), - _state_attrs_exist(attr75), - _state_attrs_exist(attr76), - _state_attrs_exist(attr77), - _state_attrs_exist(attr78), - _state_attrs_exist(attr79), - _state_attrs_exist(attr80), - _state_attrs_exist(attr81), - _state_attrs_exist(attr82), - _state_attrs_exist(attr83), - _state_attrs_exist(attr84), - _state_attrs_exist(attr85), - _state_attrs_exist(attr86), - _state_attrs_exist(attr87), - _state_attrs_exist(attr88), - _state_attrs_exist(attr89), - _state_attrs_exist(attr90), - _state_attrs_exist(attr91), - _state_attrs_exist(attr92), - _state_attrs_exist(attr93), - _state_attrs_exist(attr94), - _state_attrs_exist(attr95), - _state_attrs_exist(attr96), - _state_attrs_exist(attr97), - _state_attrs_exist(attr98), - _state_attrs_exist(attr99), - _state_attrs_exist(attr100), + lambda: select(StateAttributes.attributes_id) + .select_from(StateAttributes) + .join( + States, + and_( + States.attributes_id == StateAttributes.attributes_id, + States.last_updated_ts + == select(States.last_updated_ts) + .where(States.attributes_id == StateAttributes.attributes_id) + .limit(1) + .scalar_subquery() + .correlate(StateAttributes), + ), ) + .where(StateAttributes.attributes_id.in_(attributes_ids)) ) @@ -322,220 +129,36 @@ def data_ids_exist_in_events_with_fast_in_distinct( ) -def _event_data_id_exist(data_id: int | None) -> Select: - """Check if a event data id exists in the events table.""" - return select(Events.data_id).where(Events.data_id == data_id).limit(1) - - def data_ids_exist_in_events( - id1: int, - id2: int | None, - id3: int | None, - id4: int | None, - id5: int | None, - id6: int | None, - id7: int | None, - id8: int | None, - id9: int | None, - id10: int | None, - id11: int | None, - id12: int | None, - id13: int | None, - id14: int | None, - id15: int | None, - id16: int | None, - id17: int | None, - id18: int | None, - id19: int | None, - id20: int | None, - id21: int | None, - id22: int | None, - id23: int | None, - id24: int | None, - id25: int | None, - id26: int | None, - id27: int | None, - id28: int | None, - id29: int | None, - id30: int | None, - id31: int | None, - id32: int | None, - id33: int | None, - id34: int | None, - id35: int | None, - id36: int | None, - id37: int | None, - id38: int | None, - id39: int | None, - id40: int | None, - id41: int | None, - id42: int | None, - id43: int | None, - id44: int | None, - id45: int | None, - id46: int | None, - id47: int | None, - id48: int | None, - id49: int | None, - id50: int | None, - id51: int | None, - id52: int | None, - id53: int | None, - id54: int | None, - id55: int | None, - id56: int | None, - id57: int | None, - id58: int | None, - id59: int | None, - id60: int | None, - id61: int | None, - id62: int | None, - id63: int | None, - id64: int | None, - id65: int | None, - id66: int | None, - id67: int | None, - id68: int | None, - id69: int | None, - id70: int | None, - id71: int | None, - id72: int | None, - id73: int | None, - id74: int | None, - id75: int | None, - id76: int | None, - id77: int | None, - id78: int | None, - id79: int | None, - id80: int | None, - id81: int | None, - id82: int | None, - id83: int | None, - id84: int | None, - id85: int | None, - id86: int | None, - id87: int | None, - id88: int | None, - id89: int | None, - id90: int | None, - id91: int | None, - id92: int | None, - id93: int | None, - id94: int | None, - id95: int | None, - id96: int | None, - id97: int | None, - id98: int | None, - id99: int | None, - id100: int | None, + data_ids: Iterable[int], ) -> StatementLambdaElement: - """Generate the find event data select only once. + """Find data ids that exist in the events table. - https://docs.sqlalchemy.org/en/14/core/connections.html#quick-guidelines-for-lambdas + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest time_fired_ts + for each data_id. This is then used to filter out the data_id + that no longer exist in the Events table. + + This query is fast for older MariaDB, older MySQL, and PostgreSQL. """ return lambda_stmt( - lambda: union_all( - _event_data_id_exist(id1), - _event_data_id_exist(id2), - _event_data_id_exist(id3), - _event_data_id_exist(id4), - _event_data_id_exist(id5), - _event_data_id_exist(id6), - _event_data_id_exist(id7), - _event_data_id_exist(id8), - _event_data_id_exist(id9), - _event_data_id_exist(id10), - _event_data_id_exist(id11), - _event_data_id_exist(id12), - _event_data_id_exist(id13), - _event_data_id_exist(id14), - _event_data_id_exist(id15), - _event_data_id_exist(id16), - _event_data_id_exist(id17), - _event_data_id_exist(id18), - _event_data_id_exist(id19), - _event_data_id_exist(id20), - _event_data_id_exist(id21), - _event_data_id_exist(id22), - _event_data_id_exist(id23), - _event_data_id_exist(id24), - _event_data_id_exist(id25), - _event_data_id_exist(id26), - _event_data_id_exist(id27), - _event_data_id_exist(id28), - _event_data_id_exist(id29), - _event_data_id_exist(id30), - _event_data_id_exist(id31), - _event_data_id_exist(id32), - _event_data_id_exist(id33), - _event_data_id_exist(id34), - _event_data_id_exist(id35), - _event_data_id_exist(id36), - _event_data_id_exist(id37), - _event_data_id_exist(id38), - _event_data_id_exist(id39), - _event_data_id_exist(id40), - _event_data_id_exist(id41), - _event_data_id_exist(id42), - _event_data_id_exist(id43), - _event_data_id_exist(id44), - _event_data_id_exist(id45), - _event_data_id_exist(id46), - _event_data_id_exist(id47), - _event_data_id_exist(id48), - _event_data_id_exist(id49), - _event_data_id_exist(id50), - _event_data_id_exist(id51), - _event_data_id_exist(id52), - _event_data_id_exist(id53), - _event_data_id_exist(id54), - _event_data_id_exist(id55), - _event_data_id_exist(id56), - _event_data_id_exist(id57), - _event_data_id_exist(id58), - _event_data_id_exist(id59), - _event_data_id_exist(id60), - _event_data_id_exist(id61), - _event_data_id_exist(id62), - _event_data_id_exist(id63), - _event_data_id_exist(id64), - _event_data_id_exist(id65), - _event_data_id_exist(id66), - _event_data_id_exist(id67), - _event_data_id_exist(id68), - _event_data_id_exist(id69), - _event_data_id_exist(id70), - _event_data_id_exist(id71), - _event_data_id_exist(id72), - _event_data_id_exist(id73), - _event_data_id_exist(id74), - _event_data_id_exist(id75), - _event_data_id_exist(id76), - _event_data_id_exist(id77), - _event_data_id_exist(id78), - _event_data_id_exist(id79), - _event_data_id_exist(id80), - _event_data_id_exist(id81), - _event_data_id_exist(id82), - _event_data_id_exist(id83), - _event_data_id_exist(id84), - _event_data_id_exist(id85), - _event_data_id_exist(id86), - _event_data_id_exist(id87), - _event_data_id_exist(id88), - _event_data_id_exist(id89), - _event_data_id_exist(id90), - _event_data_id_exist(id91), - _event_data_id_exist(id92), - _event_data_id_exist(id93), - _event_data_id_exist(id94), - _event_data_id_exist(id95), - _event_data_id_exist(id96), - _event_data_id_exist(id97), - _event_data_id_exist(id98), - _event_data_id_exist(id99), - _event_data_id_exist(id100), + lambda: select(EventData.data_id) + .select_from(EventData) + .join( + Events, + and_( + Events.data_id == EventData.data_id, + Events.time_fired_ts + == select(Events.time_fired_ts) + .where(Events.data_id == EventData.data_id) + .limit(1) + .scalar_subquery() + .correlate(EventData), + ), ) + .where(EventData.data_id.in_(data_ids)) ) From d322398d066cca19ebbf7dc3e3b6e94aef015b25 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Sat, 21 Dec 2024 23:59:54 -0800 Subject: [PATCH 0988/1198] TotalConnect use entry.runtime_data (#133756) * use entry.runtime_data * type the entry * update quality scale * recommended fixes * Update homeassistant/components/totalconnect/alarm_control_panel.py * Update homeassistant/components/totalconnect/binary_sensor.py * Update homeassistant/components/totalconnect/button.py --------- Co-authored-by: Joost Lekkerkerker --- .../components/totalconnect/__init__.py | 25 ++++++++++--------- .../totalconnect/alarm_control_panel.py | 2 +- .../components/totalconnect/binary_sensor.py | 3 +-- .../components/totalconnect/button.py | 3 +-- .../components/totalconnect/diagnostics.py | 4 +-- .../totalconnect/quality_scale.yaml | 2 +- 6 files changed, 18 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/totalconnect/__init__.py b/homeassistant/components/totalconnect/__init__.py index 0d8b915770a..9f291ea15a6 100644 --- a/homeassistant/components/totalconnect/__init__.py +++ b/homeassistant/components/totalconnect/__init__.py @@ -8,13 +8,17 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN +from .const import AUTO_BYPASS, CONF_USERCODES from .coordinator import TotalConnectDataUpdateCoordinator PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON] +type TotalConnectConfigEntry = ConfigEntry[TotalConnectDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry( + hass: HomeAssistant, entry: TotalConnectConfigEntry +) -> bool: """Set up upon config entry in user interface.""" conf = entry.data username = conf[CONF_USERNAME] @@ -40,8 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = TotalConnectDataUpdateCoordinator(hass, client) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -49,18 +52,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: TotalConnectConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: TotalConnectConfigEntry) -> None: """Update listener.""" bypass = entry.options.get(AUTO_BYPASS, False) - client = hass.data[DOMAIN][entry.entry_id].client + client = entry.runtime_data.client for location_id in client.locations: client.locations[location_id].auto_bypass_low_battery = bypass diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index bc33129a741..48ba78acc92 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -30,7 +30,7 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect alarm panels based on a config entry.""" - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data code_required = entry.options.get(CODE_REQUIRED, False) async_add_entities( diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 3126efff88a..9a3c2558999 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -17,7 +17,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity @@ -125,7 +124,7 @@ async def async_setup_entry( """Set up TotalConnect device sensors based on a config entry.""" sensors: list = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data client_locations = coordinator.client.locations diff --git a/homeassistant/components/totalconnect/button.py b/homeassistant/components/totalconnect/button.py index fc5b5e89587..e228f03ec6b 100644 --- a/homeassistant/components/totalconnect/button.py +++ b/homeassistant/components/totalconnect/button.py @@ -12,7 +12,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity @@ -43,7 +42,7 @@ async def async_setup_entry( ) -> None: """Set up TotalConnect buttons based on a config entry.""" buttons: list = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data for location_id, location in coordinator.client.locations.items(): buttons.extend( diff --git a/homeassistant/components/totalconnect/diagnostics.py b/homeassistant/components/totalconnect/diagnostics.py index b590c54e2ba..85f52ccc670 100644 --- a/homeassistant/components/totalconnect/diagnostics.py +++ b/homeassistant/components/totalconnect/diagnostics.py @@ -8,8 +8,6 @@ from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN - TO_REDACT = [ "username", "Password", @@ -27,7 +25,7 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - client = hass.data[DOMAIN][config_entry.entry_id].client + client = config_entry.runtime_data.client data: dict[str, Any] = {} data["client"] = { diff --git a/homeassistant/components/totalconnect/quality_scale.yaml b/homeassistant/components/totalconnect/quality_scale.yaml index a8e5b60f7ee..fb0f1e5098a 100644 --- a/homeassistant/components/totalconnect/quality_scale.yaml +++ b/homeassistant/components/totalconnect/quality_scale.yaml @@ -4,7 +4,7 @@ rules: test-before-configure: done unique-config-entry: done config-flow-test-coverage: todo - runtime-data: todo + runtime-data: done test-before-setup: todo appropriate-polling: done entity-unique-id: done From cef182c596c7d77441f3f3fb188659c511621c28 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 09:02:58 +0100 Subject: [PATCH 0989/1198] Bump pyOverkiz to 1.15.4 (#133769) Bump pyoverkiz to 1.15.4 --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 9ab901d5005..84fdc11ae47 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.3"], + "requirements": ["pyoverkiz==1.15.4"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index e7f4aadfe05..56255fc997e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.3 +pyoverkiz==1.15.4 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 84fc0f11967..d80ad1320f5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1758,7 +1758,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.3 +pyoverkiz==1.15.4 # homeassistant.components.onewire pyownet==0.10.0.post1 From 284ccbc778edc92a63bf1cfe63d4321a7451fd58 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 09:40:06 +0100 Subject: [PATCH 0990/1198] Add additional Hitachi sensors to Overkiz (#133772) Add additional Hitachi sensors --- homeassistant/components/overkiz/sensor.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 184b4938fef..8b20d817921 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -458,6 +458,24 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTemperature.CELSIUS, ), + # HitachiHeatingSystem/HitachiAirToWaterHeatingZone + OverkizSensorDescription( + key=OverkizState.MODBUS_ROOM_AMBIENT_TEMPERATURE_STATUS_ZONE_1, + name="Room ambient temperature", + native_value=lambda value: cast(float, value), + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), + # HitachiHeatingSystem/HitachiAirToWaterMainComponent + OverkizSensorDescription( + key=OverkizState.MODBUS_OUTDOOR_AMBIENT_TEMPERATURE, + name="Outdoor ambient temperature", + native_value=lambda value: cast(int, value), + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), ] SUPPORTED_STATES = {description.key: description for description in SENSOR_DESCRIPTIONS} From 0c24afec6c32b8b6b0eac613425804dd6e302d74 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 10:03:16 +0100 Subject: [PATCH 0991/1198] Update integration quality scale for Peblar Rocksolid EV Chargers (#133764) --- .../components/peblar/quality_scale.yaml | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 2b0684793a8..78ec3718caf 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -30,8 +30,11 @@ rules: # Silver action-exceptions: todo config-entry-unloading: done - docs-configuration-parameters: todo - docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: done entity-unavailable: done integration-owner: done log-when-unavailable: done @@ -39,10 +42,10 @@ rules: reauthentication-flow: done test-coverage: todo # Gold - devices: todo + devices: done diagnostics: done - discovery-update-info: todo - discovery: todo + discovery-update-info: done + discovery: done docs-data-update: todo docs-examples: todo docs-known-limitations: todo @@ -54,15 +57,15 @@ rules: status: exempt comment: | This integration connects to a single device. - entity-category: todo - entity-device-class: todo - entity-disabled-by-default: todo - entity-translations: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done exception-translations: status: exempt comment: | The coordinator needs translation when the update failed. - icon-translations: todo + icon-translations: done reconfiguration-flow: todo repair-issues: status: exempt From cd6da9d9e88eee5565814fa2f81f9ee2cee1824e Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 10:07:35 +0100 Subject: [PATCH 0992/1198] Merge similar tests to parameterized tests for enphase_envoy (#133740) --- .../enphase_envoy/quality_scale.yaml | 2 - .../enphase_envoy/test_config_flow.py | 103 +++++------------- 2 files changed, 29 insertions(+), 76 deletions(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 2b9350ed944..171c07e9474 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -11,8 +11,6 @@ rules: config-flow-test-coverage: status: todo comment: | - - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) - - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? config-flow: diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index c20e73d774b..121c2583050 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -90,47 +90,23 @@ async def test_user_no_serial_number( } -async def test_form_invalid_auth( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: - """Test we handle invalid auth.""" - mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( - "fail authentication" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - - @pytest.mark.parametrize( ("exception", "error"), [ + (EnvoyAuthenticationError("fail authentication"), "invalid_auth"), (EnvoyError, "cannot_connect"), + (Exception, "unknown"), (ValueError, "unknown"), ], ) -async def test_form_cannot_connect( +async def test_form_errors( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_envoy: AsyncMock, exception: Exception, error: str, ) -> None: - """Test we handle cannot connect error.""" + """Test we handle form errors.""" mock_envoy.setup.side_effect = exception result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -148,41 +124,8 @@ async def test_form_cannot_connect( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - -def _get_schema_default(schema, key_name): - """Iterate schema to find a key.""" - for schema_key in schema: - if schema_key == key_name: - return schema_key.default() - raise KeyError(f"{key_name} not found in schema") - - -async def test_zeroconf_pre_token_firmware( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: - """Test we can setup from zeroconf.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], - hostname="mock_hostname", - name="mock_name", - port=None, - properties={"serialnum": "1234", "protovers": "3.0.0"}, - type="mock_type", - ), - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - assert ( - _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "installer" - ) - + mock_envoy.setup.side_effect = None + # mock successful authentication and update of credentials result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -192,20 +135,29 @@ async def test_zeroconf_pre_token_firmware( }, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy 1234" - assert result["result"].unique_id == "1234" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } -async def test_zeroconf_token_firmware( +def _get_schema_default(schema, key_name): + """Iterate schema to find a key.""" + for schema_key in schema: + if schema_key == key_name: + return schema_key.default() + raise KeyError(f"{key_name} not found in schema") + + +@pytest.mark.parametrize( + ("version", "schema_username"), + [ + ("7.0.0", ""), + ("3.0.0", "installer"), + ], +) +async def test_zeroconf( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_envoy: AsyncMock, + version: str, + schema_username: str, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( @@ -217,13 +169,16 @@ async def test_zeroconf_token_firmware( hostname="mock_hostname", name="mock_name", port=None, - properties={"serialnum": "1234", "protovers": "7.0.0"}, + properties={"serialnum": "1234", "protovers": version}, type="mock_type", ), ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "" + assert ( + _get_schema_default(result["data_schema"].schema, CONF_USERNAME) + == schema_username + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], From c3d0a01776cf679eab25b15319bbdb0751bda5f1 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:25:59 +0100 Subject: [PATCH 0993/1198] Migrate to runtime data in Overkiz (#133760) * Migrate to runtime data * Revert * Improve typing --- homeassistant/components/overkiz/__init__.py | 21 +++++----- .../components/overkiz/alarm_control_panel.py | 8 ++-- .../components/overkiz/binary_sensor.py | 5 +-- homeassistant/components/overkiz/button.py | 9 ++-- .../components/overkiz/climate/__init__.py | 8 ++-- .../components/overkiz/cover/__init__.py | 10 ++--- .../components/overkiz/diagnostics.py | 15 +++---- homeassistant/components/overkiz/light.py | 8 ++-- homeassistant/components/overkiz/lock.py | 8 ++-- homeassistant/components/overkiz/number.py | 9 ++-- homeassistant/components/overkiz/scene.py | 8 ++-- homeassistant/components/overkiz/select.py | 9 ++-- homeassistant/components/overkiz/sensor.py | 7 ++-- homeassistant/components/overkiz/siren.py | 8 ++-- homeassistant/components/overkiz/switch.py | 8 ++-- .../components/overkiz/water_heater.py | 42 ------------------- .../overkiz/water_heater/__init__.py | 8 ++-- 17 files changed, 63 insertions(+), 128 deletions(-) delete mode 100644 homeassistant/components/overkiz/water_heater.py diff --git a/homeassistant/components/overkiz/__init__.py b/homeassistant/components/overkiz/__init__.py index ce877e15261..2b4a0367bf7 100644 --- a/homeassistant/components/overkiz/__init__.py +++ b/homeassistant/components/overkiz/__init__.py @@ -47,14 +47,17 @@ from .coordinator import OverkizDataUpdateCoordinator @dataclass class HomeAssistantOverkizData: - """Overkiz data stored in the Home Assistant data object.""" + """Overkiz data stored in the runtime data object.""" coordinator: OverkizDataUpdateCoordinator platforms: defaultdict[Platform, list[Device]] scenarios: list[Scenario] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type OverkizDataConfigEntry = ConfigEntry[HomeAssistantOverkizData] + + +async def async_setup_entry(hass: HomeAssistant, entry: OverkizDataConfigEntry) -> bool: """Set up Overkiz from a config entry.""" client: OverkizClient | None = None api_type = entry.data.get(CONF_API_TYPE, APIType.CLOUD) @@ -123,7 +126,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: platforms: defaultdict[Platform, list[Device]] = defaultdict(list) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantOverkizData( + entry.runtime_data = HomeAssistantOverkizData( coordinator=coordinator, platforms=platforms, scenarios=scenarios ) @@ -162,17 +165,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: OverkizDataConfigEntry +) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def _async_migrate_entries( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: OverkizDataConfigEntry ) -> bool: """Migrate old entries to new unique IDs.""" entity_registry = er.async_get(hass) diff --git a/homeassistant/components/overkiz/alarm_control_panel.py b/homeassistant/components/overkiz/alarm_control_panel.py index bdbf4d0cc8d..90c135291c3 100644 --- a/homeassistant/components/overkiz/alarm_control_panel.py +++ b/homeassistant/components/overkiz/alarm_control_panel.py @@ -16,14 +16,12 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizDescriptiveEntity @@ -210,11 +208,11 @@ SUPPORTED_DEVICES = {description.key: description for description in ALARM_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz alarm control panel from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizAlarmControlPanel( diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 57df3cd4e09..7d0fee6f70e 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -18,8 +18,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -147,7 +146,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz binary sensors from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizBinarySensor] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index c34be5cde84..92711ac8ca8 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -12,13 +12,12 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -100,11 +99,11 @@ SUPPORTED_COMMANDS = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz button from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[ButtonEntity] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/climate/__init__.py b/homeassistant/components/overkiz/climate/__init__.py index 97840df7a41..77ca23b9ae1 100644 --- a/homeassistant/components/overkiz/climate/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -7,14 +7,12 @@ from enum import StrEnum, unique from pyoverkiz.enums import Protocol from pyoverkiz.enums.ui import UIWidget -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from .atlantic_electrical_heater import AtlanticElectricalHeater from .atlantic_electrical_heater_with_adjustable_temperature_setpoint import ( AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint, @@ -79,11 +77,11 @@ WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz climate from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data # Match devices based on the widget. entities_based_on_widget: list[Entity] = [ diff --git a/homeassistant/components/overkiz/cover/__init__.py b/homeassistant/components/overkiz/cover/__init__.py index f9df3256253..38c02eba1bb 100644 --- a/homeassistant/components/overkiz/cover/__init__.py +++ b/homeassistant/components/overkiz/cover/__init__.py @@ -2,23 +2,23 @@ from pyoverkiz.enums import OverkizCommand, UIClass -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from .awning import Awning from .generic_cover import OverkizGenericCover from .vertical_cover import LowSpeedCover, VerticalCover async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: OverkizDataConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz covers from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizGenericCover] = [ Awning(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/diagnostics.py b/homeassistant/components/overkiz/diagnostics.py index 427230b9c82..dae0c6c59cf 100644 --- a/homeassistant/components/overkiz/diagnostics.py +++ b/homeassistant/components/overkiz/diagnostics.py @@ -7,20 +7,18 @@ from typing import Any from pyoverkiz.enums import APIType from pyoverkiz.obfuscate import obfuscate_id -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from . import HomeAssistantOverkizData -from .const import CONF_API_TYPE, CONF_HUB, DOMAIN +from . import OverkizDataConfigEntry +from .const import CONF_API_TYPE, CONF_HUB async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: OverkizDataConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - entry_data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - client = entry_data.coordinator.client + client = entry.runtime_data.coordinator.client data = { "setup": await client.get_diagnostic_data(), @@ -39,11 +37,10 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: OverkizDataConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" - entry_data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - client = entry_data.coordinator.client + client = entry.runtime_data.coordinator.client device_url = min(device.identifiers)[1] diff --git a/homeassistant/components/overkiz/light.py b/homeassistant/components/overkiz/light.py index 18d724dd63a..933d4cf695b 100644 --- a/homeassistant/components/overkiz/light.py +++ b/homeassistant/components/overkiz/light.py @@ -12,24 +12,22 @@ from homeassistant.components.light import ( ColorMode, LightEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz lights from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizLight(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/lock.py b/homeassistant/components/overkiz/lock.py index 2494903d076..1c073d2f9aa 100644 --- a/homeassistant/components/overkiz/lock.py +++ b/homeassistant/components/overkiz/lock.py @@ -7,23 +7,21 @@ from typing import Any from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.lock import LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz locks from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizLock(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/number.py b/homeassistant/components/overkiz/number.py index 494d430c393..0e03e822424 100644 --- a/homeassistant/components/overkiz/number.py +++ b/homeassistant/components/overkiz/number.py @@ -14,13 +14,12 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizDescriptiveEntity @@ -191,11 +190,11 @@ SUPPORTED_STATES = {description.key: description for description in NUMBER_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz number from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizNumber] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/scene.py b/homeassistant/components/overkiz/scene.py index 8cbbb9dbe5d..4533ed3245c 100644 --- a/homeassistant/components/overkiz/scene.py +++ b/homeassistant/components/overkiz/scene.py @@ -8,21 +8,19 @@ from pyoverkiz.client import OverkizClient from pyoverkiz.models import Scenario from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz scenes from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizScene(scene, data.coordinator.client) for scene in data.scenarios diff --git a/homeassistant/components/overkiz/select.py b/homeassistant/components/overkiz/select.py index 83cdc9c4f2b..ac467eaaa7a 100644 --- a/homeassistant/components/overkiz/select.py +++ b/homeassistant/components/overkiz/select.py @@ -8,13 +8,12 @@ from dataclasses import dataclass from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -129,11 +128,11 @@ SUPPORTED_STATES = {description.key: description for description in SELECT_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz select from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizSelect] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 8b20d817921..84d25b01d24 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, @@ -34,7 +33,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HomeAssistantOverkizData +from . import OverkizDataConfigEntry from .const import ( DOMAIN, IGNORED_OVERKIZ_DEVICES, @@ -483,11 +482,11 @@ SUPPORTED_STATES = {description.key: description for description in SENSOR_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz sensors from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[SensorEntity] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/siren.py b/homeassistant/components/overkiz/siren.py index a7ba41e2fef..f7246e50ec0 100644 --- a/homeassistant/components/overkiz/siren.py +++ b/homeassistant/components/overkiz/siren.py @@ -10,23 +10,21 @@ from homeassistant.components.siren import ( SirenEntity, SirenEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz sirens from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizSiren(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/switch.py b/homeassistant/components/overkiz/switch.py index ac3ea351559..c921dbab776 100644 --- a/homeassistant/components/overkiz/switch.py +++ b/homeassistant/components/overkiz/switch.py @@ -15,13 +15,11 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizDescriptiveEntity @@ -111,11 +109,11 @@ SUPPORTED_DEVICES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz switch from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizSwitch( diff --git a/homeassistant/components/overkiz/water_heater.py b/homeassistant/components/overkiz/water_heater.py deleted file mode 100644 index 99bfb279e4c..00000000000 --- a/homeassistant/components/overkiz/water_heater.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Support for Overkiz water heater devices.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import HomeAssistantOverkizData -from .const import DOMAIN -from .entity import OverkizEntity -from .water_heater_entities import ( - CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY, - WIDGET_TO_WATER_HEATER_ENTITY, -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Overkiz DHW from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - entities: list[OverkizEntity] = [] - - for device in data.platforms[Platform.WATER_HEATER]: - if device.controllable_name in CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY: - entities.append( - CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY[device.controllable_name]( - device.device_url, data.coordinator - ) - ) - elif device.widget in WIDGET_TO_WATER_HEATER_ENTITY: - entities.append( - WIDGET_TO_WATER_HEATER_ENTITY[device.widget]( - device.device_url, data.coordinator - ) - ) - - async_add_entities(entities) diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py index 1fb5e5696bd..1dd1d596a33 100644 --- a/homeassistant/components/overkiz/water_heater/__init__.py +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -4,13 +4,11 @@ from __future__ import annotations from pyoverkiz.enums.ui import UIWidget -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from ..entity import OverkizEntity from .atlantic_domestic_hot_water_production_mlb_component import ( AtlanticDomesticHotWaterProductionMBLComponent, @@ -22,11 +20,11 @@ from .hitachi_dhw import HitachiDHW async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz DHW from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizEntity] = [] for device in data.platforms[Platform.WATER_HEATER]: From 3f1acff6521d457c9c40ff7f09c82e3fd5aa1db2 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:31:09 +0100 Subject: [PATCH 0994/1198] Add support for HitachiAirToWaterHeatingZone in Overkiz (#133768) * Add support for HitachiAirToWaterHeatingZone in Overkiz * Clean up * Fix typing * Fix typing * Fix typing * Adapt to new PyOverkiz --- .../components/overkiz/climate/__init__.py | 2 + .../hitachi_air_to_water_heating_zone.py | 123 ++++++++++++++++++ homeassistant/components/overkiz/const.py | 1 + 3 files changed, 126 insertions(+) create mode 100644 homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py diff --git a/homeassistant/components/overkiz/climate/__init__.py b/homeassistant/components/overkiz/climate/__init__.py index 77ca23b9ae1..1398bb7c25a 100644 --- a/homeassistant/components/overkiz/climate/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -27,6 +27,7 @@ from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl from .atlantic_pass_apc_zone_control_zone import AtlanticPassAPCZoneControlZone from .hitachi_air_to_air_heat_pump_hlrrwifi import HitachiAirToAirHeatPumpHLRRWIFI from .hitachi_air_to_air_heat_pump_ovp import HitachiAirToAirHeatPumpOVP +from .hitachi_air_to_water_heating_zone import HitachiAirToWaterHeatingZone from .somfy_heating_temperature_interface import SomfyHeatingTemperatureInterface from .somfy_thermostat import SomfyThermostat from .valve_heating_temperature_interface import ValveHeatingTemperatureInterface @@ -51,6 +52,7 @@ WIDGET_TO_CLIMATE_ENTITY = { UIWidget.ATLANTIC_HEAT_RECOVERY_VENTILATION: AtlanticHeatRecoveryVentilation, UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: AtlanticPassAPCHeatingZone, UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: AtlanticPassAPCZoneControl, + UIWidget.HITACHI_AIR_TO_WATER_HEATING_ZONE: HitachiAirToWaterHeatingZone, UIWidget.SOMFY_HEATING_TEMPERATURE_INTERFACE: SomfyHeatingTemperatureInterface, UIWidget.SOMFY_THERMOSTAT: SomfyThermostat, UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface, diff --git a/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py b/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py new file mode 100644 index 00000000000..8410e50873d --- /dev/null +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py @@ -0,0 +1,123 @@ +"""Support for HitachiAirToWaterHeatingZone.""" + +from __future__ import annotations + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.climate import ( + PRESET_COMFORT, + PRESET_ECO, + PRESET_NONE, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..const import DOMAIN +from ..entity import OverkizDataUpdateCoordinator, OverkizEntity + +OVERKIZ_TO_HVAC_MODE: dict[str, HVACMode] = { + OverkizCommandParam.MANU: HVACMode.HEAT, + OverkizCommandParam.AUTO: HVACMode.AUTO, +} + +HVAC_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_HVAC_MODE.items()} + +OVERKIZ_TO_PRESET_MODE: dict[str, str] = { + OverkizCommandParam.COMFORT: PRESET_COMFORT, + OverkizCommandParam.ECO: PRESET_ECO, +} + +PRESET_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_PRESET_MODE.items()} + + +class HitachiAirToWaterHeatingZone(OverkizEntity, ClimateEntity): + """Representation of HitachiAirToWaterHeatingZone.""" + + _attr_hvac_modes = [*HVAC_MODE_TO_OVERKIZ] + _attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ] + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + _attr_min_temp = 5.0 + _attr_max_temp = 35.0 + _attr_precision = 0.1 + _attr_target_temperature_step = 0.5 + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = DOMAIN + + def __init__( + self, device_url: str, coordinator: OverkizDataUpdateCoordinator + ) -> None: + """Init method.""" + super().__init__(device_url, coordinator) + + if self._attr_device_info: + self._attr_device_info["manufacturer"] = "Hitachi" + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat, cool mode.""" + if ( + state := self.device.states[OverkizState.MODBUS_AUTO_MANU_MODE_ZONE_1] + ) and state.value_as_str: + return OVERKIZ_TO_HVAC_MODE[state.value_as_str] + + return HVACMode.OFF + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + await self.executor.async_execute_command( + OverkizCommand.SET_AUTO_MANU_MODE, HVAC_MODE_TO_OVERKIZ[hvac_mode] + ) + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode, e.g., home, away, temp.""" + if ( + state := self.device.states[OverkizState.MODBUS_YUTAKI_TARGET_MODE] + ) and state.value_as_str: + return OVERKIZ_TO_PRESET_MODE[state.value_as_str] + + return PRESET_NONE + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + await self.executor.async_execute_command( + OverkizCommand.SET_TARGET_MODE, PRESET_MODE_TO_OVERKIZ[preset_mode] + ) + + @property + def current_temperature(self) -> float | None: + """Return the current temperature.""" + current_temperature = self.device.states[ + OverkizState.MODBUS_ROOM_AMBIENT_TEMPERATURE_STATUS_ZONE_1 + ] + + if current_temperature: + return current_temperature.value_as_float + + return None + + @property + def target_temperature(self) -> float | None: + """Return the temperature we try to reach.""" + target_temperature = self.device.states[ + OverkizState.MODBUS_THERMOSTAT_SETTING_CONTROL_ZONE_1 + ] + + if target_temperature: + return target_temperature.value_as_float + + return None + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = cast(float, kwargs.get(ATTR_TEMPERATURE)) + + await self.executor.async_execute_command( + OverkizCommand.SET_THERMOSTAT_SETTING_CONTROL_ZONE_1, int(temperature) + ) diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index a90260e0f0f..e596b566717 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -102,6 +102,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = { UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported) UIWidget.DOMESTIC_HOT_WATER_TANK: Platform.SWITCH, # widgetName, uiClass is WaterHeatingSystem (not supported) UIWidget.HITACHI_AIR_TO_AIR_HEAT_PUMP: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) + UIWidget.HITACHI_AIR_TO_WATER_HEATING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.HITACHI_DHW: Platform.WATER_HEATER, # widgetName, uiClass is HitachiHeatingSystem (not supported) UIWidget.MY_FOX_ALARM_CONTROLLER: Platform.ALARM_CONTROL_PANEL, # widgetName, uiClass is Alarm (not supported) UIWidget.MY_FOX_SECURITY_CAMERA: Platform.SWITCH, # widgetName, uiClass is Camera (not supported) From 619aed39b70fb6eb4712cb4fad643e461b16f765 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:36:07 +0100 Subject: [PATCH 0995/1198] Use new UnitOfEnergy constants in Overkiz (#133778) --- homeassistant/components/overkiz/const.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index e596b566717..1a89fecf9c0 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -142,8 +142,8 @@ OVERKIZ_UNIT_TO_HA: dict[str, str] = { MeasuredValueType.ELECTRICAL_POWER_IN_W: UnitOfPower.WATT, MeasuredValueType.ELECTRIC_CURRENT_IN_AMPERE: UnitOfElectricCurrent.AMPERE, MeasuredValueType.ELECTRIC_CURRENT_IN_MILLI_AMPERE: UnitOfElectricCurrent.MILLIAMPERE, - MeasuredValueType.ENERGY_IN_CAL: "cal", - MeasuredValueType.ENERGY_IN_KCAL: "kcal", + MeasuredValueType.ENERGY_IN_CAL: UnitOfEnergy.CALORIE, + MeasuredValueType.ENERGY_IN_KCAL: UnitOfEnergy.KILO_CALORIE, MeasuredValueType.FLOW_IN_LITRE_PER_SECOND: f"{UnitOfVolume.LITERS}/{UnitOfTime.SECONDS}", MeasuredValueType.FLOW_IN_METER_CUBE_PER_HOUR: UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, MeasuredValueType.FLOW_IN_METER_CUBE_PER_SECOND: f"{UnitOfVolume.CUBIC_METERS}/{UnitOfTime.SECONDS}", From 84d359c0d94b4188c139253ec7d6cd93fbb793e6 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 11:33:32 +0100 Subject: [PATCH 0996/1198] Fix binary_sensor typing in Overkiz (#133782) --- homeassistant/components/overkiz/binary_sensor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 7d0fee6f70e..3a75cd77c2f 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -14,10 +14,10 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import OverkizDataConfigEntry from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -142,7 +142,7 @@ SUPPORTED_STATES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz binary sensors from a config entry.""" From 31c6443a9bb51faeae1164db771fd8a18eb31682 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 11:51:01 +0100 Subject: [PATCH 0997/1198] Add button platform to Peblar Rocksolid EV Chargers integration (#133780) --- homeassistant/components/peblar/__init__.py | 1 + homeassistant/components/peblar/button.py | 92 ++++++++++++++++++ .../peblar/snapshots/test_button.ambr | 95 +++++++++++++++++++ tests/components/peblar/test_button.py | 36 +++++++ 4 files changed, 224 insertions(+) create mode 100644 homeassistant/components/peblar/button.py create mode 100644 tests/components/peblar/snapshots/test_button.ambr create mode 100644 tests/components/peblar/test_button.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index a055a1a02c8..c185a0e2550 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -30,6 +30,7 @@ from .coordinator import ( PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py new file mode 100644 index 00000000000..0b0f12be1b3 --- /dev/null +++ b/homeassistant/components/peblar/button.py @@ -0,0 +1,92 @@ +"""Support for Peblar button.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import Peblar + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarButtonEntityDescription(ButtonEntityDescription): + """Describe a Peblar button.""" + + press_fn: Callable[[Peblar], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarButtonEntityDescription( + key="identify", + device_class=ButtonDeviceClass.IDENTIFY, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + press_fn=lambda x: x.identify(), + ), + PeblarButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + press_fn=lambda x: x.reboot(), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar buttons based on a config entry.""" + async_add_entities( + PeblarButtonEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarButtonEntity( + CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], ButtonEntity +): + """Defines an Peblar button.""" + + entity_description: PeblarButtonEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarButtonEntityDescription, + ) -> None: + """Initialize the button entity.""" + super().__init__(coordinator=entry.runtime_data.user_configuraton_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + async def async_press(self) -> None: + """Trigger button press on the Peblar device.""" + await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/tests/components/peblar/snapshots/test_button.ambr b/tests/components/peblar/snapshots/test_button.ambr new file mode 100644 index 00000000000..96aab5c93ef --- /dev/null +++ b/tests/components/peblar/snapshots/test_button.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_entities[button][button.peblar_ev_charger_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.peblar_ev_charger_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_identify', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Peblar EV Charger Identify', + }), + 'context': , + 'entity_id': 'button.peblar_ev_charger_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.peblar_ev_charger_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Peblar EV Charger Restart', + }), + 'context': , + 'entity_id': 'button.peblar_ev_charger_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/peblar/test_button.py b/tests/components/peblar/test_button.py new file mode 100644 index 00000000000..7b271d3747a --- /dev/null +++ b/tests/components/peblar/test_button.py @@ -0,0 +1,36 @@ +"""Tests for the Peblar button platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.freeze_time("2024-12-21 21:45:00") +@pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the button entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 7be3cad1db91b0cab526cfda1764ca23935b6e14 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 12:00:24 +0100 Subject: [PATCH 0998/1198] Refactor Twinkly tests (#133725) --- homeassistant/components/twinkly/strings.json | 2 +- tests/components/twinkly/__init__.py | 121 +---- tests/components/twinkly/conftest.py | 89 ++-- tests/components/twinkly/const.py | 5 + .../twinkly/fixtures/get_current_movie.json | 3 + .../twinkly/fixtures/get_details.json | 23 + .../fixtures/get_firmware_version.json | 1 + .../twinkly/fixtures/get_saved_movies.json | 4 + .../twinkly/snapshots/test_diagnostics.ambr | 55 ++- .../twinkly/snapshots/test_light.ambr | 75 ++++ tests/components/twinkly/test_config_flow.py | 306 ++++++------- tests/components/twinkly/test_diagnostics.py | 22 +- tests/components/twinkly/test_init.py | 80 ++-- tests/components/twinkly/test_light.py | 412 ++++++++---------- 14 files changed, 575 insertions(+), 623 deletions(-) create mode 100644 tests/components/twinkly/const.py create mode 100644 tests/components/twinkly/fixtures/get_current_movie.json create mode 100644 tests/components/twinkly/fixtures/get_details.json create mode 100644 tests/components/twinkly/fixtures/get_firmware_version.json create mode 100644 tests/components/twinkly/fixtures/get_saved_movies.json create mode 100644 tests/components/twinkly/snapshots/test_light.ambr diff --git a/homeassistant/components/twinkly/strings.json b/homeassistant/components/twinkly/strings.json index 88bc67abbbd..d27de8a75de 100644 --- a/homeassistant/components/twinkly/strings.json +++ b/homeassistant/components/twinkly/strings.json @@ -17,7 +17,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "device_exists": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } } } diff --git a/tests/components/twinkly/__init__.py b/tests/components/twinkly/__init__.py index 192a5c0e220..7b0ca20fbe1 100644 --- a/tests/components/twinkly/__init__.py +++ b/tests/components/twinkly/__init__.py @@ -1,120 +1,13 @@ """Constants and mock for the twinkly component tests.""" -from aiohttp.client_exceptions import ClientConnectionError +from homeassistant.core import HomeAssistant -from homeassistant.components.twinkly.const import DEV_NAME - -TEST_HOST = "test.twinkly.com" -TEST_ID = "twinkly_test_device_id" -TEST_UID = "4c8fccf5-e08a-4173-92d5-49bf479252a2" -TEST_MAC = "aa:bb:cc:dd:ee:ff" -TEST_NAME = "twinkly_test_device_name" -TEST_NAME_ORIGINAL = "twinkly_test_original_device_name" # the original (deprecated) name stored in the conf -TEST_MODEL = "twinkly_test_device_model" +from tests.common import MockConfigEntry -class ClientMock: - """A mock of the ttls.client.Twinkly.""" +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) - def __init__(self) -> None: - """Create a mocked client.""" - self.is_offline = False - self.state = True - self.brightness = {"mode": "enabled", "value": 10} - self.color = None - self.movies = [{"id": 1, "name": "Rainbow"}, {"id": 2, "name": "Flare"}] - self.current_movie = {} - self.default_mode = "movie" - self.mode = None - self.version = "2.8.10" - - self.id = TEST_UID - self.device_info = { - "uuid": self.id, - "device_name": TEST_NAME, - "mac": TEST_MAC, - "product_code": TEST_MODEL, - } - - @property - def host(self) -> str: - """Get the mocked host.""" - return TEST_HOST - - async def get_details(self): - """Get the mocked device info.""" - if self.is_offline: - raise ClientConnectionError - return self.device_info - - async def is_on(self) -> bool: - """Get the mocked on/off state.""" - if self.is_offline: - raise ClientConnectionError - return self.state - - async def turn_on(self) -> None: - """Set the mocked on state.""" - if self.is_offline: - raise ClientConnectionError - self.state = True - self.mode = self.default_mode - - async def turn_off(self) -> None: - """Set the mocked off state.""" - if self.is_offline: - raise ClientConnectionError - self.state = False - - async def get_brightness(self) -> int: - """Get the mocked brightness.""" - if self.is_offline: - raise ClientConnectionError - return self.brightness - - async def set_brightness(self, brightness: int) -> None: - """Set the mocked brightness.""" - if self.is_offline: - raise ClientConnectionError - self.brightness = {"mode": "enabled", "value": brightness} - - def change_name(self, new_name: str) -> None: - """Change the name of this virtual device.""" - self.device_info[DEV_NAME] = new_name - - async def set_static_colour(self, colour) -> None: - """Set static color.""" - self.color = colour - self.default_mode = "color" - - async def set_cycle_colours(self, colour) -> None: - """Set static color.""" - self.color = colour - self.default_mode = "movie" - - async def interview(self) -> None: - """Interview.""" - - async def get_saved_movies(self) -> dict: - """Get saved movies.""" - return self.movies - - async def get_current_movie(self) -> dict: - """Get current movie.""" - return self.current_movie - - async def set_current_movie(self, movie_id: int) -> dict: - """Set current movie.""" - self.current_movie = {"id": movie_id} - - async def set_mode(self, mode: str) -> None: - """Set mode.""" - if mode == "off": - await self.turn_off() - else: - await self.turn_on() - self.mode = mode - - async def get_firmware_version(self) -> dict: - """Get firmware version.""" - return {"version": self.version} + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/twinkly/conftest.py b/tests/components/twinkly/conftest.py index 19361af2003..6b32c786c99 100644 --- a/tests/components/twinkly/conftest.py +++ b/tests/components/twinkly/conftest.py @@ -1,55 +1,74 @@ """Configure tests for the Twinkly integration.""" -from collections.abc import Awaitable, Callable, Coroutine -from typing import Any -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import AsyncMock, patch import pytest -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.components.twinkly import DOMAIN +from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME -from . import TEST_MODEL, TEST_NAME, TEST_UID, ClientMock +from .const import TEST_MAC, TEST_MODEL, TEST_NAME -from tests.common import MockConfigEntry - -type ComponentSetup = Callable[[], Awaitable[ClientMock]] - -DOMAIN = "twinkly" -TITLE = "Twinkly" +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) -@pytest.fixture(name="config_entry") +@pytest.fixture def mock_config_entry() -> MockConfigEntry: """Create Twinkly entry in Home Assistant.""" - client = ClientMock() return MockConfigEntry( domain=DOMAIN, - title=TITLE, - unique_id=TEST_UID, - entry_id=TEST_UID, + title="Twinkly", + unique_id=TEST_MAC, data={ - "host": client.host, - "id": client.id, - "name": TEST_NAME, - "model": TEST_MODEL, - "device_name": TEST_NAME, + CONF_HOST: "192.168.0.123", + CONF_ID: "497dcba3-ecbf-4587-a2dd-5eb0665e6880", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, }, + entry_id="01JFMME2P6RA38V5AMPCJ2JYYV", + minor_version=2, ) -@pytest.fixture(name="setup_integration") -async def mock_setup_integration( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> Callable[[], Coroutine[Any, Any, ClientMock]]: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) +@pytest.fixture +def mock_twinkly_client() -> Generator[AsyncMock]: + """Mock the Twinkly client.""" + with ( + patch( + "homeassistant.components.twinkly.Twinkly", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.twinkly.config_flow.Twinkly", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_details.return_value = load_json_object_fixture( + "get_details.json", DOMAIN + ) + client.get_firmware_version.return_value = load_json_object_fixture( + "get_firmware_version.json", DOMAIN + ) + client.get_saved_movies.return_value = load_json_array_fixture( + "get_saved_movies.json", DOMAIN + ) + client.get_current_movie.return_value = load_json_object_fixture( + "get_current_movie.json", DOMAIN + ) + client.is_on.return_value = True + client.get_brightness.return_value = {"mode": "enabled", "value": 10} + client.host = "192.168.0.123" + yield client - async def func() -> ClientMock: - mock = ClientMock() - with patch("homeassistant.components.twinkly.Twinkly", return_value=mock): - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - return mock - return func +@pytest.fixture +def mock_setup_entry() -> Generator[None]: + """Mock setting up a config entry.""" + with patch("homeassistant.components.twinkly.async_setup_entry", return_value=True): + yield diff --git a/tests/components/twinkly/const.py b/tests/components/twinkly/const.py new file mode 100644 index 00000000000..c2530f1a19d --- /dev/null +++ b/tests/components/twinkly/const.py @@ -0,0 +1,5 @@ +"""Constants for the Twinkly tests.""" + +TEST_MAC = "00:2d:13:3b:aa:bb" +TEST_NAME = "Tree 1" +TEST_MODEL = "TW2016" diff --git a/tests/components/twinkly/fixtures/get_current_movie.json b/tests/components/twinkly/fixtures/get_current_movie.json new file mode 100644 index 00000000000..2572ae5fe7c --- /dev/null +++ b/tests/components/twinkly/fixtures/get_current_movie.json @@ -0,0 +1,3 @@ +{ + "id": 1 +} diff --git a/tests/components/twinkly/fixtures/get_details.json b/tests/components/twinkly/fixtures/get_details.json new file mode 100644 index 00000000000..1519520b0b9 --- /dev/null +++ b/tests/components/twinkly/fixtures/get_details.json @@ -0,0 +1,23 @@ +{ + "product_name": "Twinkly", + "product_version": "1", + "hardware_version": "1", + "flash_size": 4, + "led_type": 1, + "led_version": "1", + "product_code": "TW2016", + "device_name": "Tree 1", + "uptime": "4087441", + "rssi": -78, + "hw_id": "002d133b", + "mac": "00:2d:13:3b:aa:bb", + "uuid": "00000000-0000-0000-0000-000000000000", + "max_supported_led": 100, + "base_leds_number": 100, + "number_of_led": 100, + "led_profile": "RGB", + "frame_rate": 14, + "movie_capacity": 708, + "copyright": "LEDWORKS 2017", + "code": 1000 +} diff --git a/tests/components/twinkly/fixtures/get_firmware_version.json b/tests/components/twinkly/fixtures/get_firmware_version.json new file mode 100644 index 00000000000..4f3df8b9ed1 --- /dev/null +++ b/tests/components/twinkly/fixtures/get_firmware_version.json @@ -0,0 +1 @@ +{ "version": "2.7.2" } diff --git a/tests/components/twinkly/fixtures/get_saved_movies.json b/tests/components/twinkly/fixtures/get_saved_movies.json new file mode 100644 index 00000000000..0ee21f3254d --- /dev/null +++ b/tests/components/twinkly/fixtures/get_saved_movies.json @@ -0,0 +1,4 @@ +[ + { "id": 1, "name": "Rainbow" }, + { "id": 2, "name": "Flare" } +] diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index abd923dcb83..e9c89754ab7 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -3,35 +3,64 @@ dict({ 'attributes': dict({ 'brightness': 26, - 'color_mode': 'brightness', + 'color_mode': 'rgb', 'effect': None, 'effect_list': list([ ]), - 'friendly_name': 'twinkly_test_device_name', + 'friendly_name': 'Tree 1', + 'hs_color': list([ + 0.0, + 0.0, + ]), + 'rgb_color': list([ + 255, + 255, + 255, + ]), 'supported_color_modes': list([ - 'brightness', + 'rgb', ]), 'supported_features': 4, + 'xy_color': list([ + 0.323, + 0.329, + ]), }), 'device_info': dict({ - 'device_name': 'twinkly_test_device_name', + 'base_leds_number': 100, + 'code': 1000, + 'copyright': 'LEDWORKS 2017', + 'device_name': 'Tree 1', + 'flash_size': 4, + 'frame_rate': 14, + 'hardware_version': '1', + 'hw_id': '002d133b', + 'led_profile': 'RGB', + 'led_type': 1, + 'led_version': '1', 'mac': '**REDACTED**', - 'product_code': 'twinkly_test_device_model', - 'uuid': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'max_supported_led': 100, + 'movie_capacity': 708, + 'number_of_led': 100, + 'product_code': 'TW2016', + 'product_name': 'Twinkly', + 'product_version': '1', + 'rssi': -78, + 'uptime': '4087441', + 'uuid': '00000000-0000-0000-0000-000000000000', }), 'entry': dict({ 'data': dict({ - 'device_name': 'twinkly_test_device_name', 'host': '**REDACTED**', - 'id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', - 'model': 'twinkly_test_device_model', - 'name': 'twinkly_test_device_name', + 'id': '497dcba3-ecbf-4587-a2dd-5eb0665e6880', + 'model': 'TW2016', + 'name': 'Tree 1', }), 'disabled_by': None, 'discovery_keys': dict({ }), 'domain': 'twinkly', - 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'entry_id': '01JFMME2P6RA38V5AMPCJ2JYYV', 'minor_version': 2, 'options': dict({ }), @@ -39,9 +68,9 @@ 'pref_disable_polling': False, 'source': 'user', 'title': 'Twinkly', - 'unique_id': 'aa:bb:cc:dd:ee:ff', + 'unique_id': '00:2d:13:3b:aa:bb', 'version': 1, }), - 'sw_version': '2.8.10', + 'sw_version': '2.7.2', }) # --- diff --git a/tests/components/twinkly/snapshots/test_light.ambr b/tests/components/twinkly/snapshots/test_light.ambr new file mode 100644 index 00000000000..ac4e275a0a1 --- /dev/null +++ b/tests/components/twinkly/snapshots/test_light.ambr @@ -0,0 +1,75 @@ +# serializer version: 1 +# name: test_entities[light.tree_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tree_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'twinkly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00:2d:13:3b:aa:bb', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.tree_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + ]), + 'friendly_name': 'Tree 1', + 'hs_color': tuple( + 0.0, + 0.0, + ), + 'rgb_color': tuple( + 255, + 255, + 255, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.323, + 0.329, + ), + }), + 'context': , + 'entity_id': 'light.tree_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/twinkly/test_config_flow.py b/tests/components/twinkly/test_config_flow.py index 8d8e955291e..2b61b26fe0c 100644 --- a/tests/components/twinkly/test_config_flow.py +++ b/tests/components/twinkly/test_config_flow.py @@ -1,196 +1,170 @@ """Tests for the config_flow of the twinly component.""" -from unittest.mock import patch +from unittest.mock import AsyncMock + +import pytest -from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.components.twinkly.const import DOMAIN +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import TEST_MODEL, TEST_NAME, ClientMock +from .const import TEST_MAC, TEST_MODEL, TEST_NAME from tests.common import MockConfigEntry -async def test_invalid_host(hass: HomeAssistant) -> None: - """Test the failure when invalid host provided.""" - client = ClientMock() - client.is_offline = True - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "dummy"}, - ) +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_full_flow(hass: HomeAssistant) -> None: + """Test the full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: "192.168.0.123", + CONF_ID: "00000000-0000-0000-0000-000000000000", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, + } + assert result["result"].unique_id == TEST_MAC + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_exceptions(hass: HomeAssistant, mock_twinkly_client: AsyncMock) -> None: + """Test the failure when raising exceptions.""" + mock_twinkly_client.get_details.side_effect = TimeoutError + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {CONF_HOST: "cannot_connect"} + mock_twinkly_client.get_details.side_effect = None -async def test_success_flow(hass: HomeAssistant) -> None: - """Test that an entity is created when the flow completes.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "dummy"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TEST_NAME - assert result["data"] == { - CONF_HOST: "dummy", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - } - - -async def test_dhcp_can_confirm(hass: HomeAssistant) -> None: - """Test DHCP discovery flow can confirm right away.""" - client = ClientMock() - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - -async def test_dhcp_success(hass: HomeAssistant) -> None: - """Test DHCP discovery flow success.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TEST_NAME - assert result["data"] == { - CONF_HOST: "1.2.3.4", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - } - - -async def test_dhcp_already_exists(hass: HomeAssistant) -> None: - """Test DHCP discovery flow that fails to connect.""" - client = ClientMock() - - entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, - data={ - CONF_HOST: "1.2.3.4", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - }, - unique_id=client.id, + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, ) - entry.add_to_hass(hass) + assert result["type"] is FlowResultType.CREATE_ENTRY - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the device is already configured.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "192.168.0.123"} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_dhcp_full_flow(hass: HomeAssistant) -> None: + """Test DHCP discovery flow can confirm right away.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: "1.2.3.4", + CONF_ID: "00000000-0000-0000-0000-000000000000", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, + } + assert result["result"].unique_id == TEST_MAC + + +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_dhcp_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test DHCP discovery flow aborts if entry already setup.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + + +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") async def test_user_flow_works_discovery(hass: HomeAssistant) -> None: """Test user flow can continue after discovery happened.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client + await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": SOURCE_USER}, - ) - assert len(hass.config_entries.flow.async_progress(TWINKLY_DOMAIN)) == 2 - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "10.0.0.131"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY - # Verify the discovery flow was aborted - assert not hass.config_entries.flow.async_progress(TWINKLY_DOMAIN) + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(DOMAIN) diff --git a/tests/components/twinkly/test_diagnostics.py b/tests/components/twinkly/test_diagnostics.py index f9cf0bc562c..d7ef4dd9b11 100644 --- a/tests/components/twinkly/test_diagnostics.py +++ b/tests/components/twinkly/test_diagnostics.py @@ -1,32 +1,28 @@ """Tests for the diagnostics of the twinkly component.""" -from collections.abc import Awaitable, Callable - +import pytest from syrupy import SnapshotAssertion from syrupy.filters import props from homeassistant.core import HomeAssistant -from . import ClientMock +from . import setup_integration +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -type ComponentSetup = Callable[[], Awaitable[ClientMock]] - -DOMAIN = "twinkly" - +@pytest.mark.usefixtures("mock_twinkly_client") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup_integration: ComponentSetup, + mock_config_entry: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - await setup_integration() - entry = hass.config_entries.async_entries(DOMAIN)[0] + await setup_integration(hass, mock_config_entry) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 60ebe65b445..0a76a399b63 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -1,7 +1,9 @@ """Tests of the initialization of the twinkly integration.""" -from unittest.mock import patch -from uuid import uuid4 +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError +import pytest from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.twinkly.const import DOMAIN @@ -10,82 +12,55 @@ from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import TEST_HOST, TEST_MAC, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock +from . import setup_integration +from .const import TEST_MAC, TEST_MODEL from tests.common import MockConfigEntry -async def test_load_unload_entry(hass: HomeAssistant) -> None: - """Validate that setup entry also configure the client.""" - client = ClientMock() +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_load_unload_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the load/unload of the config entry.""" - device_id = str(uuid4()) - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: TEST_HOST, - CONF_ID: device_id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - entry_id=device_id, - unique_id=TEST_MAC, - minor_version=2, - ) + await setup_integration(hass, mock_config_entry) - config_entry.add_to_hass(hass) + assert mock_config_entry.state is ConfigEntryState.LOADED - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_unload(mock_config_entry.entry_id) - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - - assert config_entry.state is ConfigEntryState.NOT_LOADED + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -async def test_config_entry_not_ready(hass: HomeAssistant) -> None: +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Validate that config entry is retried.""" - client = ClientMock() - client.is_offline = True + mock_twinkly_client.get_details.side_effect = ClientConnectionError - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: TEST_HOST, - CONF_ID: id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - minor_version=2, - unique_id=TEST_MAC, - ) + await setup_integration(hass, mock_config_entry) - config_entry.add_to_hass(hass) - - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) - - assert config_entry.state is ConfigEntryState.SETUP_RETRY + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY +@pytest.mark.usefixtures("mock_twinkly_client") async def test_mac_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, ) -> None: """Validate that the unique_id is migrated to the MAC address.""" - client = ClientMock() - config_entry = MockConfigEntry( domain=DOMAIN, minor_version=1, unique_id="unique_id", data={ - CONF_HOST: TEST_HOST, + CONF_HOST: "192.168.0.123", CONF_ID: id, - CONF_NAME: TEST_NAME_ORIGINAL, + CONF_NAME: "Tree 1", CONF_MODEL: TEST_MODEL, }, ) @@ -100,8 +75,7 @@ async def test_mac_migration( identifiers={(DOMAIN, config_entry.unique_id)}, ) - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index 26df83aebe0..c008ab51ef7 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -3,290 +3,287 @@ from __future__ import annotations from datetime import timedelta -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.light import ATTR_BRIGHTNESS, LightEntityFeature -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN -from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + DOMAIN as LIGHT_DOMAIN, + LightEntityFeature, +) +from homeassistant.components.twinkly import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.entity_registry import RegistryEntry -from . import TEST_MAC, TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock +from . import setup_integration +from .const import TEST_MAC -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_initial_state(hass: HomeAssistant) -> None: - """Validate that entity and device states are updated on startup.""" - entity, device, _, _ = await _create_entries(hass) +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the created entities.""" + with patch("homeassistant.components.twinkly.PLATFORMS", [Platform.LIGHT]): + await setup_integration(hass, mock_config_entry) - state = hass.states.get(entity.entity_id) - - # Basic state properties - assert state.name == TEST_NAME - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 26 - assert state.attributes["friendly_name"] == TEST_NAME - - assert device.name == TEST_NAME - assert device.model == TEST_MODEL - assert device.manufacturer == "LEDWORKS" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_turn_on_off(hass: HomeAssistant) -> None: +async def test_turn_on_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service.""" - client = ClientMock() - client.state = False - client.brightness = {"mode": "enabled", "value": 20} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.tree_1").state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": entity.entity_id}, blocking=True + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1"}, + blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 51 + mock_twinkly_client.turn_on.assert_called_once_with() -async def test_turn_on_with_brightness(hass: HomeAssistant) -> None: +async def test_turn_on_with_brightness( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a brightness parameter.""" - client = ClientMock() - client.state = False - client.brightness = {"mode": "enabled", "value": 20} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "brightness": 255}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_BRIGHTNESS: 255}, blocking=True, ) - state = hass.states.get(entity.entity_id) + mock_twinkly_client.set_brightness.assert_called_once_with(100) + mock_twinkly_client.turn_on.assert_called_once_with() - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 255 + +async def test_brightness_to_zero( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: + """Test support of the light.turn_on service with a brightness parameter.""" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "brightness": 1}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_BRIGHTNESS: 1}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "off" + mock_twinkly_client.set_brightness.assert_not_called() + mock_twinkly_client.turn_off.assert_called_once_with() -async def test_turn_on_with_color_rgbw(hass: HomeAssistant) -> None: +async def test_turn_on_with_color_rgbw( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a rgbw parameter.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGBW" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGBW" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgbw_color": (128, 64, 32, 0)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ + ATTR_ENTITY_ID: "light.tree_1", + ATTR_RGBW_COLOR: (128, 64, 32, 0), + }, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.default_mode == "color" - assert client.mode == "color" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_static_colour.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("color") + assert mock_twinkly_client.default_mode == "color" -async def test_turn_on_with_color_rgb(hass: HomeAssistant) -> None: +async def test_turn_on_with_color_rgb( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a rgb parameter.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGB" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgb_color": (128, 64, 32)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_RGB_COLOR: (128, 64, 32)}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.default_mode == "color" - assert client.mode == "color" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_static_colour.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("color") + assert mock_twinkly_client.default_mode == "color" -async def test_turn_on_with_effect(hass: HomeAssistant) -> None: +async def test_turn_on_with_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with effects.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGB" - assert hass.states.get(entity.entity_id).state == "off" - assert not client.current_movie + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "effect": "1 Rainbow"}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_EFFECT: "2 Rainbow"}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.current_movie["id"] == 1 - assert client.default_mode == "movie" - assert client.mode == "movie" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_current_movie.assert_called_once_with(2) + mock_twinkly_client.set_mode.assert_called_once_with("movie") + assert mock_twinkly_client.default_mode == "movie" -async def test_turn_on_with_color_rgbw_and_missing_effect(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("data"), + [ + {ATTR_RGBW_COLOR: (128, 64, 32, 0)}, + {ATTR_RGB_COLOR: (128, 64, 32)}, + ], +) +async def test_turn_on_with_missing_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, + data: dict[str, Any], +) -> None: """Test support of the light.turn_on service with rgbw color and missing effect support.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGBW" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_firmware_version.return_value["version"] = "2.7.0" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + LightEntityFeature.EFFECT + ^ hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgbw_color": (128, 64, 32, 0)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1"} | data, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (0, 128, 64, 32) - assert client.mode == "movie" - assert client.default_mode == "movie" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_cycle_colours.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("movie") + assert mock_twinkly_client.default_mode == "movie" + mock_twinkly_client.set_current_movie.assert_not_called() -async def test_turn_on_with_color_rgb_and_missing_effect(hass: HomeAssistant) -> None: - """Test support of the light.turn_on service with rgb color and missing effect support.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) +async def test_turn_on_with_color_rgbw_and_missing_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: + """Test support of the light.turn_on service with missing effect support.""" + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_firmware_version.return_value["version"] = "2.7.0" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + LightEntityFeature.EFFECT + ^ hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgb_color": (128, 64, 32)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_EFFECT: "2 Rainbow"}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.mode == "movie" - assert client.default_mode == "movie" + mock_twinkly_client.set_current_movie.assert_not_called() -async def test_turn_on_with_effect_missing_effects(hass: HomeAssistant) -> None: - """Test support of the light.turn_on service with effect set even if effects are not supported.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) - - assert hass.states.get(entity.entity_id).state == "off" - assert not client.current_movie - assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] - ) - - await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "effect": "1 Rainbow"}, - blocking=True, - ) - - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert not client.current_movie - assert client.default_mode == "movie" - assert client.mode == "movie" - - -async def test_turn_off(hass: HomeAssistant) -> None: +async def test_turn_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_off service.""" - entity, _, _, _ = await _create_entries(hass) - - assert hass.states.get(entity.entity_id).state == "on" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": entity.entity_id}, blocking=True + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + service_data={ATTR_ENTITY_ID: "light.tree_1"}, + blocking=True, ) - - state = hass.states.get(entity.entity_id) - - assert state.state == "off" + mock_twinkly_client.turn_off.assert_called_once_with() async def test_update_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, ) -> None: """Validate device's name update behavior. @@ -294,56 +291,15 @@ async def test_update_name( then the name of the entity is updated and it's also persisted, so it can be restored when starting HA while Twinkly is offline. """ - entity, _, client, config_entry = await _create_entries(hass) - client.change_name("new_device_name") + await setup_integration(hass, mock_config_entry) + + mock_twinkly_client.get_details.return_value["device_name"] = "new_device_name" + freezer.tick(timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() - dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, TEST_MAC)}) + dev_entry = device_registry.async_get_device({(DOMAIN, TEST_MAC)}) assert dev_entry.name == "new_device_name" - assert config_entry.data[CONF_NAME] == "new_device_name" - - -async def test_unload(hass: HomeAssistant) -> None: - """Validate that entities can be unloaded from the UI.""" - - _, _, _, entry = await _create_entries(hass) - - assert await hass.config_entries.async_unload(entry.entry_id) - - -async def _create_entries( - hass: HomeAssistant, client=None -) -> tuple[RegistryEntry, DeviceEntry, ClientMock]: - client = ClientMock() if client is None else client - - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, - data={ - CONF_HOST: client, - CONF_ID: client.id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - unique_id=TEST_MAC, - minor_version=2, - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - device_registry = dr.async_get(hass) - entity_registry = er.async_get(hass) - - entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, TEST_MAC) - entity_entry = entity_registry.async_get(entity_id) - device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, TEST_MAC)}) - - assert entity_entry is not None - assert device is not None - - return entity_entry, device, client, config_entry From 88eb550ec165177d358685b1cfea125052ba1e03 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 12:01:13 +0100 Subject: [PATCH 0999/1198] Update quality-scale status for enphase_envoy config_flow missing data descriptions (#133726) --- homeassistant/components/enphase_envoy/quality_scale.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 171c07e9474..210491c031c 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -17,7 +17,6 @@ rules: status: todo comment: | - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow - - The config flow is missing data descriptions for the other fields dependency-transparency: done docs-actions: status: done From cdd73a5c5a0a0573182ce5e72294ee3551d5d4a0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 12:16:08 +0100 Subject: [PATCH 1000/1198] Set parallel updates for Peblar Rocksolid EV Chargers integration (#133786) --- homeassistant/components/peblar/binary_sensor.py | 2 ++ homeassistant/components/peblar/button.py | 2 ++ homeassistant/components/peblar/number.py | 2 ++ homeassistant/components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/select.py | 2 ++ homeassistant/components/peblar/sensor.py | 2 ++ homeassistant/components/peblar/switch.py | 2 ++ homeassistant/components/peblar/update.py | 2 ++ 8 files changed, 15 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py index f28a02422a9..5b65a8e976d 100644 --- a/homeassistant/components/peblar/binary_sensor.py +++ b/homeassistant/components/peblar/binary_sensor.py @@ -19,6 +19,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PeblarBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 0b0f12be1b3..68d871c8298 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -22,6 +22,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index a5e926714d9..32647199b8b 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -27,6 +27,8 @@ from .coordinator import ( PeblarRuntimeData, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 78ec3718caf..aea83da25ab 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -38,7 +38,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: todo # Gold diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index 95a87248804..d2c74f482c6 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -18,6 +18,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarSelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index 233417051cb..df8cac13bcc 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -34,6 +34,8 @@ from .const import ( ) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PeblarSensorDescription(SensorEntityDescription): diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 9a6788a62be..3b7ab591508 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -23,6 +23,8 @@ from .coordinator import ( PeblarRuntimeData, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarSwitchEntityDescription(SwitchEntityDescription): diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index cc0f1ee0c79..37f20722b98 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -22,6 +22,8 @@ from .coordinator import ( PeblarVersionInformation, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarUpdateEntityDescription(UpdateEntityDescription): From 56b58cec3e26f3485026f47bb45ec744e62636d0 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 12:17:09 +0100 Subject: [PATCH 1001/1198] Fix errors in HitachiDHW in Overkiz (#133765) * Small changes to fix errors in DHW * Update * Bugfix in float/int mistake * Fix typing * Fix code style * Fix mypy --- .../components/overkiz/water_heater/hitachi_dhw.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/overkiz/water_heater/hitachi_dhw.py b/homeassistant/components/overkiz/water_heater/hitachi_dhw.py index dc2a93a8d2f..988c66afdb0 100644 --- a/homeassistant/components/overkiz/water_heater/hitachi_dhw.py +++ b/homeassistant/components/overkiz/water_heater/hitachi_dhw.py @@ -48,8 +48,10 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): def current_temperature(self) -> float | None: """Return the current temperature.""" current_temperature = self.device.states[OverkizState.CORE_DHW_TEMPERATURE] - if current_temperature: - return current_temperature.value_as_float + + if current_temperature and current_temperature.value_as_int: + return float(current_temperature.value_as_int) + return None @property @@ -58,13 +60,14 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): target_temperature = self.device.states[ OverkizState.MODBUS_CONTROL_DHW_SETTING_TEMPERATURE ] - if target_temperature: - return target_temperature.value_as_float + + if target_temperature and target_temperature.value_as_int: + return float(target_temperature.value_as_int) + return None async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - await self.executor.async_execute_command( OverkizCommand.SET_CONTROL_DHW_SETTING_TEMPERATURE, int(kwargs[ATTR_TEMPERATURE]), From 5ef3901b440503941f73b1e71df718e3c9b2d60c Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 13:32:15 +0100 Subject: [PATCH 1002/1198] Add base entity for Niko Home Control (#133744) --- .../components/niko_home_control/entity.py | 50 +++++++++++++++++++ .../components/niko_home_control/light.py | 25 +++------- 2 files changed, 58 insertions(+), 17 deletions(-) create mode 100644 homeassistant/components/niko_home_control/entity.py diff --git a/homeassistant/components/niko_home_control/entity.py b/homeassistant/components/niko_home_control/entity.py new file mode 100644 index 00000000000..fe14e09d957 --- /dev/null +++ b/homeassistant/components/niko_home_control/entity.py @@ -0,0 +1,50 @@ +"""Base class for Niko Home Control entities.""" + +from abc import abstractmethod + +from nhc.action import NHCAction +from nhc.controller import NHCController + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class NikoHomeControlEntity(Entity): + """Base class for Niko Home Control entities.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, action: NHCAction, controller: NHCController, unique_id: str + ) -> None: + """Set up the Niko Home Control entity.""" + self._controller = controller + self._action = action + self._attr_unique_id = unique_id = f"{unique_id}-{action.id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="Niko", + name=action.name, + suggested_area=action.suggested_area, + ) + self.update_state() + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._controller.register_callback( + self._action.id, self.async_update_callback + ) + ) + + async def async_update_callback(self, state: int) -> None: + """Handle updates from the controller.""" + self.update_state() + self.async_write_ha_state() + + @abstractmethod + def update_state(self) -> None: + """Update the state of the entity.""" diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index 29b952fcb77..c9902cbf11b 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -25,6 +25,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import NHCController, NikoHomeControlConfigEntry from .const import DOMAIN +from .entity import NikoHomeControlEntity # delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -91,33 +92,23 @@ async def async_setup_entry( ) -class NikoHomeControlLight(LightEntity): +class NikoHomeControlLight(NikoHomeControlEntity, LightEntity): """Representation of a Niko Light.""" + _attr_name = None + _action: NHCLight + def __init__( self, action: NHCLight, controller: NHCController, unique_id: str ) -> None: """Set up the Niko Home Control light platform.""" - self._controller = controller - self._action = action - self._attr_unique_id = f"{unique_id}-{action.id}" - self._attr_name = action.name - self._attr_is_on = action.is_on + super().__init__(action, controller, unique_id) self._attr_color_mode = ColorMode.ONOFF self._attr_supported_color_modes = {ColorMode.ONOFF} - self._attr_should_poll = False if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self.async_on_remove( - self._controller.register_callback( - self._action.id, self.async_update_callback - ) - ) - def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) @@ -126,9 +117,9 @@ class NikoHomeControlLight(LightEntity): """Instruct the light to turn off.""" self._action.turn_off() - async def async_update_callback(self, state: int) -> None: + def update_state(self) -> None: """Handle updates from the controller.""" + state = self._action.state self._attr_is_on = state > 0 if brightness_supported(self.supported_color_modes): self._attr_brightness = round(state * 2.55) - self.async_write_ha_state() From 1e68ae1bb8f0811de4402e65f631dc0e1c989171 Mon Sep 17 00:00:00 2001 From: PierreAronnax Date: Sun, 22 Dec 2024 13:35:36 +0100 Subject: [PATCH 1003/1198] Remove myself from govee_ble codeowners (#133790) --- CODEOWNERS | 4 ++-- homeassistant/components/govee_ble/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 0e2934b1f49..8ab0994cdac 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -578,8 +578,8 @@ build.json @home-assistant/supervisor /tests/components/google_tasks/ @allenporter /homeassistant/components/google_travel_time/ @eifinger /tests/components/google_travel_time/ @eifinger -/homeassistant/components/govee_ble/ @bdraco @PierreAronnax -/tests/components/govee_ble/ @bdraco @PierreAronnax +/homeassistant/components/govee_ble/ @bdraco +/tests/components/govee_ble/ @bdraco /homeassistant/components/govee_light_local/ @Galorhallen /tests/components/govee_light_local/ @Galorhallen /homeassistant/components/gpsd/ @fabaff @jrieger diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json index d9827e9155c..39a66ad36a7 100644 --- a/homeassistant/components/govee_ble/manifest.json +++ b/homeassistant/components/govee_ble/manifest.json @@ -122,7 +122,7 @@ "connectable": false } ], - "codeowners": ["@bdraco", "@PierreAronnax"], + "codeowners": ["@bdraco"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/govee_ble", From 075f95b9c4e653f3ae1730a145ed03086fa1a473 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 14:01:31 +0100 Subject: [PATCH 1004/1198] Add base entity to Peblar Rocksolid EV Chargers integration (#133794) --- homeassistant/components/peblar/__init__.py | 23 +------- .../components/peblar/binary_sensor.py | 30 +++------- homeassistant/components/peblar/button.py | 25 ++------- .../components/peblar/coordinator.py | 2 +- .../components/peblar/diagnostics.py | 2 +- homeassistant/components/peblar/entity.py | 55 +++++++++++++++++++ homeassistant/components/peblar/number.py | 22 +++----- homeassistant/components/peblar/select.py | 27 ++------- homeassistant/components/peblar/sensor.py | 31 +++-------- homeassistant/components/peblar/switch.py | 29 +++------- homeassistant/components/peblar/update.py | 31 +++-------- .../peblar/snapshots/test_binary_sensor.ambr | 4 +- .../peblar/snapshots/test_select.ambr | 2 +- .../peblar/snapshots/test_switch.ambr | 2 +- 14 files changed, 111 insertions(+), 174 deletions(-) create mode 100644 homeassistant/components/peblar/entity.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index c185a0e2550..bf1b3ef7e66 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -16,10 +16,8 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarDataUpdateCoordinator, @@ -76,29 +74,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo entry.runtime_data = PeblarRuntimeData( data_coordinator=meter_coordinator, system_information=system_information, - user_configuraton_coordinator=user_configuration_coordinator, + user_configuration_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, ) - # Peblar is a single device integration. Setting up the device directly - # during setup. This way we only have to reference it in all entities. - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - configuration_url=f"http://{entry.data[CONF_HOST]}", - connections={ - (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), - (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), - }, - identifiers={(DOMAIN, system_information.product_serial_number)}, - manufacturer=system_information.product_vendor_name, - model_id=system_information.product_number, - model=system_information.product_model_name, - name="Peblar EV Charger", - serial_number=system_information.product_serial_number, - sw_version=version_coordinator.data.current.firmware, - ) - # Forward the setup to the platforms await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py index 5b65a8e976d..e8e5095f050 100644 --- a/homeassistant/components/peblar/binary_sensor.py +++ b/homeassistant/components/peblar/binary_sensor.py @@ -12,12 +12,10 @@ from homeassistant.components.binary_sensor import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 0 @@ -56,35 +54,23 @@ async def async_setup_entry( ) -> None: """Set up Peblar binary sensor based on a config entry.""" async_add_entities( - PeblarBinarySensorEntity(entry=entry, description=description) + PeblarBinarySensorEntity( + entry=entry, + coordinator=entry.runtime_data.data_coordinator, + description=description, + ) for description in DESCRIPTIONS ) class PeblarBinarySensorEntity( - CoordinatorEntity[PeblarDataUpdateCoordinator], BinarySensorEntity + PeblarEntity[PeblarDataUpdateCoordinator], + BinarySensorEntity, ): """Defines a Peblar binary sensor entity.""" entity_description: PeblarBinarySensorEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarBinarySensorEntityDescription, - ) -> None: - """Initialize the binary sensor entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def is_on(self) -> bool: """Return state of the binary sensor.""" diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 68d871c8298..66411daa228 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -15,12 +15,10 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -59,6 +57,7 @@ async def async_setup_entry( async_add_entities( PeblarButtonEntity( entry=entry, + coordinator=entry.runtime_data.user_configuration_coordinator, description=description, ) for description in DESCRIPTIONS @@ -66,29 +65,13 @@ async def async_setup_entry( class PeblarButtonEntity( - CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], ButtonEntity + PeblarEntity[PeblarUserConfigurationDataUpdateCoordinator], + ButtonEntity, ): """Defines an Peblar button.""" entity_description: PeblarButtonEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarButtonEntityDescription, - ) -> None: - """Initialize the button entity.""" - super().__init__(coordinator=entry.runtime_data.user_configuraton_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - async def async_press(self) -> None: """Trigger button press on the Peblar device.""" await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index e2b16e1e62a..4afc544cc1d 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -30,7 +30,7 @@ class PeblarRuntimeData: data_coordinator: PeblarDataUpdateCoordinator system_information: PeblarSystemInformation - user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator + user_configuration_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 32716148c3f..a8c7423f79a 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -15,7 +15,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { "system_information": entry.runtime_data.system_information.to_dict(), - "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), + "user_configuration": entry.runtime_data.user_configuration_coordinator.data.to_dict(), "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), "system": entry.runtime_data.data_coordinator.data.system.to_dict(), diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py new file mode 100644 index 00000000000..ecfd3e8232b --- /dev/null +++ b/homeassistant/components/peblar/entity.py @@ -0,0 +1,55 @@ +"""Base entity for the Peblar integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.const import CONF_HOST +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry + + +class PeblarEntity[_DataUpdateCoordinatorT: DataUpdateCoordinator[Any]]( + CoordinatorEntity[_DataUpdateCoordinatorT] +): + """Defines a Peblar entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + *, + entry: PeblarConfigEntry, + coordinator: _DataUpdateCoordinatorT, + description: EntityDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(coordinator=coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + + system_information = entry.runtime_data.system_information + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{entry.data[CONF_HOST]}", + connections={ + (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), + (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), + }, + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + manufacturer=system_information.product_vendor_name, + model=system_information.product_model_name, + model_id=system_information.product_number, + name="Peblar EV Charger", + serial_number=system_information.product_serial_number, + sw_version=entry.runtime_data.version_coordinator.data.current.firmware, + ) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index 32647199b8b..d17ff09eb94 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -15,17 +15,15 @@ from homeassistant.components.number import ( ) from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator, PeblarRuntimeData, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -64,33 +62,29 @@ async def async_setup_entry( async_add_entities( PeblarNumberEntity( entry=entry, + coordinator=entry.runtime_data.data_coordinator, description=description, ) for description in DESCRIPTIONS ) -class PeblarNumberEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], NumberEntity): +class PeblarNumberEntity( + PeblarEntity[PeblarDataUpdateCoordinator], + NumberEntity, +): """Defines a Peblar number.""" entity_description: PeblarNumberEntityDescription - _attr_has_entity_name = True - def __init__( self, entry: PeblarConfigEntry, + coordinator: PeblarDataUpdateCoordinator, description: PeblarNumberEntityDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) + super().__init__(entry=entry, coordinator=coordinator, description=description) self._attr_native_max_value = description.native_max_value_fn( entry.runtime_data ) diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index d2c74f482c6..e9c7da77bec 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -11,12 +11,10 @@ from peblar import Peblar, PeblarUserConfiguration, SmartChargingMode from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -56,6 +54,7 @@ async def async_setup_entry( async_add_entities( PeblarSelectEntity( entry=entry, + coordinator=entry.runtime_data.user_configuration_coordinator, description=description, ) for description in DESCRIPTIONS @@ -63,29 +62,13 @@ async def async_setup_entry( class PeblarSelectEntity( - CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], SelectEntity + PeblarEntity[PeblarUserConfigurationDataUpdateCoordinator], + SelectEntity, ): - """Defines a peblar select entity.""" + """Defines a Peblar select entity.""" entity_description: PeblarSelectEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSelectEntityDescription, - ) -> None: - """Initialize the select entity.""" - super().__init__(entry.runtime_data.user_configuraton_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index df8cac13bcc..e655253d75c 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -22,17 +22,15 @@ from homeassistant.const import ( UnitOfPower, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .const import ( - DOMAIN, PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT, PEBLAR_CP_STATE_TO_HOME_ASSISTANT, ) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 0 @@ -237,34 +235,21 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) + PeblarSensorEntity( + entry=entry, + coordinator=entry.runtime_data.data_coordinator, + description=description, + ) for description in DESCRIPTIONS - if description.has_fn(entry.runtime_data.user_configuraton_coordinator.data) + if description.has_fn(entry.runtime_data.user_configuration_coordinator.data) ) -class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorEntity): +class PeblarSensorEntity(PeblarEntity[PeblarDataUpdateCoordinator], SensorEntity): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSensorDescription, - ) -> None: - """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def native_value(self) -> datetime | int | str | None: """Return the state of the sensor.""" diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 3b7ab591508..88f52d01e3a 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -11,17 +11,15 @@ from peblar import PeblarApi from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator, PeblarRuntimeData, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -42,7 +40,7 @@ DESCRIPTIONS = [ entity_category=EntityCategory.CONFIG, has_fn=lambda x: ( x.data_coordinator.data.system.force_single_phase_allowed - and x.user_configuraton_coordinator.data.connected_phases > 1 + and x.user_configuration_coordinator.data.connected_phases > 1 ), is_on_fn=lambda x: x.ev.force_single_phase, set_fn=lambda x, on: x.ev_interface(force_single_phase=on), @@ -59,6 +57,7 @@ async def async_setup_entry( async_add_entities( PeblarSwitchEntity( entry=entry, + coordinator=entry.runtime_data.data_coordinator, description=description, ) for description in DESCRIPTIONS @@ -66,28 +65,14 @@ async def async_setup_entry( ) -class PeblarSwitchEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SwitchEntity): +class PeblarSwitchEntity( + PeblarEntity[PeblarDataUpdateCoordinator], + SwitchEntity, +): """Defines a Peblar switch entity.""" entity_description: PeblarSwitchEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSwitchEntityDescription, - ) -> None: - """Initialize the select entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def is_on(self) -> bool: """Return state of the switch.""" diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index 37f20722b98..67ce30a89a6 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -11,16 +11,14 @@ from homeassistant.components.update import ( UpdateEntityDescription, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarVersionDataUpdateCoordinator, PeblarVersionInformation, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -56,34 +54,23 @@ async def async_setup_entry( ) -> None: """Set up Peblar update based on a config entry.""" async_add_entities( - PeblarUpdateEntity(entry, description) for description in DESCRIPTIONS + PeblarUpdateEntity( + entry=entry, + coordinator=entry.runtime_data.version_coordinator, + description=description, + ) + for description in DESCRIPTIONS ) class PeblarUpdateEntity( - CoordinatorEntity[PeblarVersionDataUpdateCoordinator], UpdateEntity + PeblarEntity[PeblarVersionDataUpdateCoordinator], + UpdateEntity, ): """Defines a Peblar update entity.""" entity_description: PeblarUpdateEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarUpdateEntityDescription, - ) -> None: - """Initialize the update entity.""" - super().__init__(entry.runtime_data.version_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def installed_version(self) -> str | None: """Version currently installed and in use.""" diff --git a/tests/components/peblar/snapshots/test_binary_sensor.ambr b/tests/components/peblar/snapshots/test_binary_sensor.ambr index 5dd008dd320..72c3ac78a12 100644 --- a/tests/components/peblar/snapshots/test_binary_sensor.ambr +++ b/tests/components/peblar/snapshots/test_binary_sensor.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_error_codes', - 'unique_id': '23-45-A4O-MOF-active_error_codes', + 'unique_id': '23-45-A4O-MOF_active_error_codes', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_warning_codes', - 'unique_id': '23-45-A4O-MOF-active_warning_codes', + 'unique_id': '23-45-A4O-MOF_active_warning_codes', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/peblar/snapshots/test_select.ambr b/tests/components/peblar/snapshots/test_select.ambr index 9f0852d7cf4..62e09325601 100644 --- a/tests/components/peblar/snapshots/test_select.ambr +++ b/tests/components/peblar/snapshots/test_select.ambr @@ -36,7 +36,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'smart_charging', - 'unique_id': '23-45-A4O-MOF-smart_charging', + 'unique_id': '23-45-A4O-MOF_smart_charging', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/peblar/snapshots/test_switch.ambr b/tests/components/peblar/snapshots/test_switch.ambr index f4fc768030f..53829278593 100644 --- a/tests/components/peblar/snapshots/test_switch.ambr +++ b/tests/components/peblar/snapshots/test_switch.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'force_single_phase', - 'unique_id': '23-45-A4O-MOF-force_single_phase', + 'unique_id': '23-45-A4O-MOF_force_single_phase', 'unit_of_measurement': None, }) # --- From 959f20c523705f71bf72640bd2987c03f03a64bb Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 14:23:12 +0100 Subject: [PATCH 1005/1198] Add reconfigure flow to Peblar Rocksolid EV Chargers integration (#133785) --- .../components/peblar/config_flow.py | 51 ++++++++ .../components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/strings.json | 15 ++- tests/components/peblar/test_config_flow.py | 109 ++++++++++++++++++ 4 files changed, 175 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index 809cb13746e..29bf456b7ea 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -76,6 +76,57 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of a Peblar device.""" + errors = {} + reconfigure_entry = self._get_reconfigure_entry() + + if user_input is not None: + peblar = Peblar( + host=user_input[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + info = await peblar.system_information() + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors[CONF_HOST] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(info.product_serial_number) + self._abort_if_unique_id_mismatch(reason="different_device") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=user_input, + ) + + host = reconfigure_entry.data[CONF_HOST] + if user_input is not None: + host = user_input[CONF_HOST] + + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=host): TextSelector( + TextSelectorConfig(autocomplete="off") + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) + async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index aea83da25ab..9de0031373f 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -66,7 +66,7 @@ rules: comment: | The coordinator needs translation when the update failed. icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 0cce7ed8191..f09a156dd1e 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -2,8 +2,10 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "different_device": "The information entered is from a different Peblar EV charger.", "no_serial_number": "The discovered Peblar device did not provide a serial number.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -20,6 +22,17 @@ }, "description": "Reauthenticate with your Peblar RV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar's device web interface." }, + "reconfigure": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::peblar::config::step::user::data_description::host%]", + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + }, + "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface." + }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index a4a461b6bba..a97e8d3b564 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -117,6 +117,115 @@ async def test_user_flow_already_configured( assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_peblar") +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the full happy path reconfigure flow from start to finish.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGSPIDERS", + } + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + + +@pytest.mark.usefixtures("mock_peblar") +async def test_reconfigure_to_different_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguring to a different device doesn't work.""" + mock_config_entry.add_to_hass(hass) + + # Change the unique ID of the entry, so we have a mismatch + hass.config_entries.async_update_entry(mock_config_entry, unique_id="mismatch") + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "different_device" + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {CONF_HOST: "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show user form on a connection error.""" + mock_config_entry.add_to_hass(hass) + mock_peblar.login.side_effect = side_effect + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.ABORT + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES", + } + + @pytest.mark.usefixtures("mock_peblar") async def test_zeroconf_flow(hass: HomeAssistant) -> None: """Test the zeroconf happy flow from start to finish.""" From 26d5c55d110c3675faf1497836acec0ed236b06e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 15:35:45 +0100 Subject: [PATCH 1006/1198] Add button error handling for Peblar Rocksolid EV Chargers (#133802) --- homeassistant/components/peblar/button.py | 2 + homeassistant/components/peblar/helpers.py | 55 ++++++++ homeassistant/components/peblar/strings.json | 11 ++ tests/components/peblar/test_button.py | 125 ++++++++++++++++++- 4 files changed, 189 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/peblar/helpers.py diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 66411daa228..22150c82649 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -19,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -72,6 +73,7 @@ class PeblarButtonEntity( entity_description: PeblarButtonEntityDescription + @peblar_exception_handler async def async_press(self) -> None: """Trigger button press on the Peblar device.""" await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/homeassistant/components/peblar/helpers.py b/homeassistant/components/peblar/helpers.py new file mode 100644 index 00000000000..cc1eb228803 --- /dev/null +++ b/homeassistant/components/peblar/helpers.py @@ -0,0 +1,55 @@ +"""Helpers for Peblar.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import Any, Concatenate + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError + +from homeassistant.exceptions import HomeAssistantError + +from .const import DOMAIN +from .entity import PeblarEntity + + +def peblar_exception_handler[_PeblarEntityT: PeblarEntity, **_P]( + func: Callable[Concatenate[_PeblarEntityT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_PeblarEntityT, _P], Coroutine[Any, Any, None]]: + """Decorate Peblar calls to handle exceptions. + + A decorator that wraps the passed in function, catches Peblar errors. + """ + + async def handler( + self: _PeblarEntityT, *args: _P.args, **kwargs: _P.kwargs + ) -> None: + try: + await func(self, *args, **kwargs) + self.coordinator.async_update_listeners() + + except PeblarAuthenticationError as error: + # Reload the config entry to trigger reauth flow + self.hass.config_entries.async_schedule_reload( + self.coordinator.config_entry.entry_id + ) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + + except PeblarConnectionError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(error)}, + ) from error + + except PeblarError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unknown_error", + translation_placeholders={"error": str(error)}, + ) from error + + return handler diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index f09a156dd1e..a6fa3acf457 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -161,5 +161,16 @@ "name": "Customization" } } + }, + "exceptions": { + "authentication_error": { + "message": "An authentication failure occurred while communicating with the Peblar device." + }, + "communication_error": { + "message": "An error occurred while communicating with the Peblar device: {error}" + }, + "unknown_error": { + "message": "An unknown error occurred while communicating with the Peblar device: {error}" + } } } diff --git a/tests/components/peblar/test_button.py b/tests/components/peblar/test_button.py index 7b271d3747a..e9ab377db67 100644 --- a/tests/components/peblar/test_button.py +++ b/tests/components/peblar/test_button.py @@ -1,19 +1,29 @@ """Tests for the Peblar button platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.freeze_time("2024-12-21 21:45:00"), + pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True), + pytest.mark.usefixtures("init_integration"), +] -@pytest.mark.freeze_time("2024-12-21 21:45:00") -@pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True) -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -34,3 +44,110 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("button.peblar_ev_charger_identify", "identify"), + ("button.peblar_ev_charger_restart", "reboot"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_buttons( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + method: str, +) -> None: + """Test the Peblar EV charger buttons.""" + mocked_method = getattr(mock_peblar, method) + + # Test normal happy path button press + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 1 + mocked_method.assert_called_with() + + # Test connection error handling + mocked_method.side_effect = PeblarConnectionError("Could not connect") + with pytest.raises( + HomeAssistantError, + match=( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "communication_error" + assert excinfo.value.translation_placeholders == {"error": "Could not connect"} + + # Test unknown error handling + mocked_method.side_effect = PeblarError("Unknown error") + with pytest.raises( + HomeAssistantError, + match=( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "unknown_error" + assert excinfo.value.translation_placeholders == {"error": "Unknown error"} + + # Test authentication error handling + mocked_method.side_effect = PeblarAuthenticationError("Authentication error") + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 3cc75c3cf62958488cdb64f4c23d659fa01c0e2a Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 15:17:23 +0000 Subject: [PATCH 1007/1198] Use feature checks in tplink integration (#133795) Clean up to use new upstream API: * Use Feature attributes to check for supported * Use color_temp range and update tests --- homeassistant/components/tplink/light.py | 28 +++-- tests/components/tplink/__init__.py | 30 +++-- .../components/tplink/fixtures/features.json | 4 +- .../tplink/snapshots/test_climate.ambr | 8 +- .../tplink/snapshots/test_number.ambr | 16 +-- tests/components/tplink/test_init.py | 19 ++- tests/components/tplink/test_light.py | 117 ++++++++++++++---- tests/components/tplink/test_sensor.py | 2 +- 8 files changed, 157 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index 8d6ec27f81c..f3207d754f3 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -200,14 +200,13 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): # If _attr_name is None the entity name will be the device name self._attr_name = None if parent is None else device.alias modes: set[ColorMode] = {ColorMode.ONOFF} - if light_module.is_variable_color_temp: + if color_temp_feat := light_module.get_feature("color_temp"): modes.add(ColorMode.COLOR_TEMP) - temp_range = light_module.valid_temperature_range - self._attr_min_color_temp_kelvin = temp_range.min - self._attr_max_color_temp_kelvin = temp_range.max - if light_module.is_color: + self._attr_min_color_temp_kelvin = color_temp_feat.minimum_value + self._attr_max_color_temp_kelvin = color_temp_feat.maximum_value + if light_module.has_feature("hsv"): modes.add(ColorMode.HS) - if light_module.is_dimmable: + if light_module.has_feature("brightness"): modes.add(ColorMode.BRIGHTNESS) self._attr_supported_color_modes = filter_supported_color_modes(modes) if len(self._attr_supported_color_modes) == 1: @@ -270,15 +269,17 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): self, color_temp: float, brightness: int | None, transition: int | None ) -> None: light_module = self._light_module - valid_temperature_range = light_module.valid_temperature_range + color_temp_feat = light_module.get_feature("color_temp") + assert color_temp_feat + requested_color_temp = round(color_temp) # Clamp color temp to valid range # since if the light in a group we will # get requests for color temps for the range # of the group and not the light clamped_color_temp = min( - valid_temperature_range.max, - max(valid_temperature_range.min, requested_color_temp), + color_temp_feat.maximum_value, + max(color_temp_feat.minimum_value, requested_color_temp), ) await light_module.set_color_temp( clamped_color_temp, @@ -325,8 +326,11 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): # The light supports only a single color mode, return it return self._fixed_color_mode - # The light supports both color temp and color, determine which on is active - if self._light_module.is_variable_color_temp and self._light_module.color_temp: + # The light supports both color temp and color, determine which one is active + if ( + self._light_module.has_feature("color_temp") + and self._light_module.color_temp + ): return ColorMode.COLOR_TEMP return ColorMode.HS @@ -335,7 +339,7 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): """Update the entity's attributes.""" light_module = self._light_module self._attr_is_on = light_module.state.light_on is True - if light_module.is_dimmable: + if light_module.has_feature("brightness"): self._attr_brightness = round((light_module.brightness * 255.0) / 100.0) color_mode = self._determine_color_mode() self._attr_color_mode = color_mode diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 809ab3bfd78..fdef5c35bfa 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -257,20 +257,27 @@ def _mocked_device( for module_name in modules } + device_features = {} if features: - device.features = { + device_features = { feature_id: _mocked_feature(feature_id, require_fixture=True) for feature_id in features if isinstance(feature_id, str) } - device.features.update( + device_features.update( { feature.id: feature for feature in features if isinstance(feature, Feature) } ) + device.features = device_features + + for mod in device.modules.values(): + mod.get_feature.side_effect = device_features.get + mod.has_feature.side_effect = lambda id: id in device_features + device.children = [] if children: for child in children: @@ -289,6 +296,7 @@ def _mocked_device( device.protocol = _mock_protocol() device.config = device_config device.credentials_hash = credentials_hash + return device @@ -303,8 +311,8 @@ def _mocked_feature( precision_hint=None, choices=None, unit=None, - minimum_value=0, - maximum_value=2**16, # Arbitrary max + minimum_value=None, + maximum_value=None, ) -> Feature: """Get a mocked feature. @@ -334,11 +342,14 @@ def _mocked_feature( feature.unit = unit or fixture.get("unit") # number - feature.minimum_value = minimum_value or fixture.get("minimum_value") - feature.maximum_value = maximum_value or fixture.get("maximum_value") + min_val = minimum_value or fixture.get("minimum_value") + feature.minimum_value = 0 if min_val is None else min_val + max_val = maximum_value or fixture.get("maximum_value") + feature.maximum_value = 2**16 if max_val is None else max_val # select feature.choices = choices or fixture.get("choices") + return feature @@ -350,13 +361,7 @@ def _mocked_light_module(device) -> Light: light.state = LightState( light_on=True, brightness=light.brightness, color_temp=light.color_temp ) - light.is_color = True - light.is_variable_color_temp = True - light.is_dimmable = True - light.is_brightness = True - light.has_effects = False light.hsv = (10, 30, 5) - light.valid_temperature_range = ColorTempRange(min=4000, max=9000) light.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} async def _set_state(state, *_, **__): @@ -389,7 +394,6 @@ def _mocked_light_module(device) -> Light: def _mocked_light_effect_module(device) -> LightEffect: effect = MagicMock(spec=LightEffect, name="Mocked light effect") - effect.has_effects = True effect.has_custom_effects = True effect.effect = "Effect1" effect.effect_list = ["Off", "Effect1", "Effect2"] diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index f60132fd2c2..d822bfc9b57 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -267,7 +267,9 @@ "target_temperature": { "value": false, "type": "Number", - "category": "Primary" + "category": "Primary", + "minimum_value": 5, + "maximum_value": 30 }, "fan_speed_level": { "value": 2, diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index 8236f332046..6823c373b68 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -9,8 +9,8 @@ , , ]), - 'max_temp': 65536, - 'min_temp': None, + 'max_temp': 30, + 'min_temp': 5, }), 'config_entry_id': , 'device_class': None, @@ -49,8 +49,8 @@ , , ]), - 'max_temp': 65536, - 'min_temp': None, + 'max_temp': 30, + 'min_temp': 5, 'supported_features': , 'temperature': 22.2, }), diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index 977d2098fb9..dbb58bac01b 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -41,7 +41,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -77,7 +77,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Smooth off', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -96,7 +96,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -132,7 +132,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Smooth on', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -151,7 +151,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 10, 'min': -10, 'mode': , 'step': 1.0, @@ -187,7 +187,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Temperature offset', - 'max': 65536, + 'max': 10, 'min': -10, 'mode': , 'step': 1.0, @@ -206,7 +206,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -242,7 +242,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Turn off in', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index 766e6784c8b..dd967e0e0d6 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -54,6 +54,7 @@ from . import ( MAC_ADDRESS, MODEL, _mocked_device, + _mocked_feature, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -335,7 +336,14 @@ async def test_update_attrs_fails_in_init( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) config_entry.add_to_hass(hass) - light = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] + light = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light_module = light.modules[Module.Light] p = PropertyMock(side_effect=KasaException) type(light_module).color_temp = p @@ -363,7 +371,14 @@ async def test_update_attrs_fails_on_update( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) config_entry.add_to_hass(hass) - light = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] + light = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light_module = light.modules[Module.Light] with _patch_discovery(device=light), _patch_connect(device=light): diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index b7f4ed6b8f4..6549711b7fc 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -54,6 +54,7 @@ from . import ( DEVICE_ID, MAC_ADDRESS, _mocked_device, + _mocked_feature, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -118,8 +119,32 @@ async def test_legacy_dimmer_unique_id(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("device", "transition"), [ - (_mocked_device(modules=[Module.Light]), 2.0), - (_mocked_device(modules=[Module.Light, Module.LightEffect]), None), + ( + _mocked_device( + modules=[Module.Light], + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + 2.0, + ), + ( + _mocked_device( + modules=[Module.Light, Module.LightEffect], + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + None, + ), ], ) async def test_color_light( @@ -131,7 +156,10 @@ async def test_color_light( ) already_migrated_config_entry.add_to_hass(hass) light = device.modules[Module.Light] + + # Setting color_temp to None emulates a device with active effects light.color_temp = None + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -220,9 +248,14 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + ] + + device = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light = device.modules[Module.Light] - light.is_variable_color_temp = False + type(light).color_temp = PropertyMock(side_effect=Exception) with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -272,25 +305,47 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - ("bulb", "is_color"), + ("device", "is_color"), [ - (_mocked_device(modules=[Module.Light], alias="my_light"), True), - (_mocked_device(modules=[Module.Light], alias="my_light"), False), + ( + _mocked_device( + modules=[Module.Light], + alias="my_light", + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + True, + ), + ( + _mocked_device( + modules=[Module.Light], + alias="my_light", + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + False, + ), ], ) async def test_color_temp_light( - hass: HomeAssistant, bulb: MagicMock, is_color: bool + hass: HomeAssistant, device: MagicMock, is_color: bool ) -> None: """Test a light.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + # device = _mocked_device(modules=[Module.Light], alias="my_light") light = device.modules[Module.Light] - light.is_color = is_color - light.color_temp = 4000 - light.is_variable_color_temp = True with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -303,7 +358,7 @@ async def test_color_temp_light( attributes = state.attributes assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" - if light.is_color: + if is_color: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] @@ -368,10 +423,11 @@ async def test_brightness_only_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + ] + device = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -414,11 +470,8 @@ async def test_on_off_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + device = _mocked_device(modules=[Module.Light], alias="my_light", features=[]) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False - light.is_dimmable = False with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -450,11 +503,9 @@ async def test_off_at_start_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + device = _mocked_device(modules=[Module.Light], alias="my_light", features=[]) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False - light.is_dimmable = False + light.state = LightState(light_on=False) with _patch_discovery(device=device), _patch_connect(device=device): @@ -513,8 +564,15 @@ async def test_smart_strip_effects( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] device = _mocked_device( - modules=[Module.Light, Module.LightEffect], alias="my_light" + modules=[Module.Light, Module.LightEffect], alias="my_light", features=features ) light = device.modules[Module.Light] light_effect = device.modules[Module.LightEffect] @@ -977,8 +1035,15 @@ async def test_scene_effect_light( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] device = _mocked_device( - modules=[Module.Light, Module.LightEffect], alias="my_light" + modules=[Module.Light, Module.LightEffect], alias="my_light", features=features ) light_effect = device.modules[Module.LightEffect] light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF diff --git a/tests/components/tplink/test_sensor.py b/tests/components/tplink/test_sensor.py index dda43c52430..a53b59df0dc 100644 --- a/tests/components/tplink/test_sensor.py +++ b/tests/components/tplink/test_sensor.py @@ -129,7 +129,7 @@ async def test_color_light_no_emeter(hass: HomeAssistant) -> None: ) already_migrated_config_entry.add_to_hass(hass) bulb = _mocked_device(alias="my_bulb", modules=[Module.Light]) - bulb.has_emeter = False + with _patch_discovery(device=bulb), _patch_connect(device=bulb): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() From 484f149e617bad189aa4774160573165a2868acc Mon Sep 17 00:00:00 2001 From: Dave T <17680170+davet2001@users.noreply.github.com> Date: Sun, 22 Dec 2024 16:31:03 +0000 Subject: [PATCH 1008/1198] Add config flow stream preview to generic camera (#122563) Co-authored-by: Allen Porter --- homeassistant/components/generic/camera.py | 5 +- .../components/generic/config_flow.py | 172 +++++++++++++----- .../components/generic/manifest.json | 2 +- homeassistant/components/generic/strings.json | 15 +- tests/components/generic/conftest.py | 8 +- tests/components/generic/test_config_flow.py | 134 ++++++++++---- 6 files changed, 243 insertions(+), 93 deletions(-) diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 3aac5145ca5..edefbc55ca6 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -96,10 +96,9 @@ class GenericCamera(Camera): self._stream_source = device_info.get(CONF_STREAM_SOURCE) if self._stream_source: self._stream_source = Template(self._stream_source, hass) - self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] - self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] - if self._stream_source: self._attr_supported_features = CameraEntityFeature.STREAM + self._limit_refetch = device_info.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False) + self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] self.content_type = device_info[CONF_CONTENT_TYPE] self.verify_ssl = device_info[CONF_VERIFY_SSL] if device_info.get(CONF_RTSP_TRANSPORT): diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index 84243101bd6..83894b489f0 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Mapping import contextlib -from datetime import datetime +from datetime import datetime, timedelta from errno import EHOSTUNREACH, EIO import io import logging @@ -17,18 +17,21 @@ import PIL.Image import voluptuous as vol import yarl +from homeassistant.components import websocket_api from homeassistant.components.camera import ( CAMERA_IMAGE_TIMEOUT, + DOMAIN as CAMERA_DOMAIN, DynamicStreamSettings, _async_get_image, ) -from homeassistant.components.http import HomeAssistantView +from homeassistant.components.http.view import HomeAssistantView from homeassistant.components.stream import ( CONF_RTSP_TRANSPORT, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, HLS_PROVIDER, RTSP_TRANSPORTS, SOURCE_TIMEOUT, + Stream, create_stream, ) from homeassistant.config_entries import ( @@ -49,7 +52,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, TemplateError from homeassistant.helpers import config_validation as cv, template as template_helper +from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.setup import async_prepare_setup_platform from homeassistant.util import slugify from .camera import GenericCamera, generate_auth @@ -79,6 +84,15 @@ SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"} IMAGE_PREVIEWS_ACTIVE = "previews" +class InvalidStreamException(HomeAssistantError): + """Error to indicate an invalid stream.""" + + def __init__(self, error: str, details: str | None = None) -> None: + """Initialize the error.""" + super().__init__(error) + self.details = details + + def build_schema( user_input: Mapping[str, Any], is_options_flow: bool = False, @@ -231,12 +245,16 @@ def slug( return None -async def async_test_stream( +async def async_test_and_preview_stream( hass: HomeAssistant, info: Mapping[str, Any] -) -> dict[str, str]: - """Verify that the stream is valid before we create an entity.""" +) -> Stream | None: + """Verify that the stream is valid before we create an entity. + + Returns the stream object if valid. Raises InvalidStreamException if not. + The stream object is used to preview the video in the UI. + """ if not (stream_source := info.get(CONF_STREAM_SOURCE)): - return {} + return None # Import from stream.worker as stream cannot reexport from worker # without forcing the av dependency on default_config # pylint: disable-next=import-outside-toplevel @@ -248,7 +266,7 @@ async def async_test_stream( stream_source = stream_source.async_render(parse_result=False) except TemplateError as err: _LOGGER.warning("Problem rendering template %s: %s", stream_source, err) - return {CONF_STREAM_SOURCE: "template_error"} + raise InvalidStreamException("template_error") from err stream_options: dict[str, str | bool | float] = {} if rtsp_transport := info.get(CONF_RTSP_TRANSPORT): stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport @@ -257,10 +275,10 @@ async def async_test_stream( try: url = yarl.URL(stream_source) - except ValueError: - return {CONF_STREAM_SOURCE: "malformed_url"} + except ValueError as err: + raise InvalidStreamException("malformed_url") from err if not url.is_absolute(): - return {CONF_STREAM_SOURCE: "relative_url"} + raise InvalidStreamException("relative_url") if not url.user and not url.password: username = info.get(CONF_USERNAME) password = info.get(CONF_PASSWORD) @@ -273,29 +291,28 @@ async def async_test_stream( stream_source, stream_options, DynamicStreamSettings(), - "test_stream", + f"{DOMAIN}.test_stream", ) hls_provider = stream.add_provider(HLS_PROVIDER) - await stream.start() - if not await hls_provider.part_recv(timeout=SOURCE_TIMEOUT): - hass.async_create_task(stream.stop()) - return {CONF_STREAM_SOURCE: "timeout"} - await stream.stop() except StreamWorkerError as err: - return {CONF_STREAM_SOURCE: "unknown_with_details", "error_details": str(err)} - except PermissionError: - return {CONF_STREAM_SOURCE: "stream_not_permitted"} + raise InvalidStreamException("unknown_with_details", str(err)) from err + except PermissionError as err: + raise InvalidStreamException("stream_not_permitted") from err except OSError as err: if err.errno == EHOSTUNREACH: - return {CONF_STREAM_SOURCE: "stream_no_route_to_host"} + raise InvalidStreamException("stream_no_route_to_host") from err if err.errno == EIO: # input/output error - return {CONF_STREAM_SOURCE: "stream_io_error"} + raise InvalidStreamException("stream_io_error") from err raise except HomeAssistantError as err: if "Stream integration is not set up" in str(err): - return {CONF_STREAM_SOURCE: "stream_not_set_up"} + raise InvalidStreamException("stream_not_set_up") from err raise - return {} + await stream.start() + if not await hls_provider.part_recv(timeout=SOURCE_TIMEOUT): + hass.async_create_task(stream.stop()) + raise InvalidStreamException("timeout") + return stream def register_preview(hass: HomeAssistant) -> None: @@ -316,6 +333,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize Generic ConfigFlow.""" self.preview_cam: dict[str, Any] = {} + self.preview_stream: Stream | None = None self.user_input: dict[str, Any] = {} self.title = "" @@ -326,14 +344,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return GenericOptionsFlowHandler() - def check_for_existing(self, options: dict[str, Any]) -> bool: - """Check whether an existing entry is using the same URLs.""" - return any( - entry.options.get(CONF_STILL_IMAGE_URL) == options.get(CONF_STILL_IMAGE_URL) - and entry.options.get(CONF_STREAM_SOURCE) == options.get(CONF_STREAM_SOURCE) - for entry in self._async_current_entries() - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -349,10 +359,17 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "no_still_image_or_stream_url" else: errors, still_format = await async_test_still(hass, user_input) - errors = errors | await async_test_stream(hass, user_input) + try: + self.preview_stream = await async_test_and_preview_stream( + hass, user_input + ) + except InvalidStreamException as err: + errors[CONF_STREAM_SOURCE] = str(err) + if err.details: + errors["error_details"] = err.details + self.preview_stream = None if not errors: user_input[CONF_CONTENT_TYPE] = still_format - user_input[CONF_LIMIT_REFETCH_TO_URL_CHANGE] = False still_url = user_input.get(CONF_STILL_IMAGE_URL) stream_url = user_input.get(CONF_STREAM_SOURCE) name = ( @@ -365,14 +382,9 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_CONTENT_TYPE] = "image/jpeg" self.user_input = user_input self.title = name - - if still_url is None: - return self.async_create_entry( - title=self.title, data={}, options=self.user_input - ) # temporary preview for user to check the image self.preview_cam = user_input - return await self.async_step_user_confirm_still() + return await self.async_step_user_confirm() if "error_details" in errors: description_placeholders["error"] = errors.pop("error_details") elif self.user_input: @@ -386,11 +398,14 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user_confirm_still( + async def async_step_user_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle user clicking confirm after still preview.""" if user_input: + if ha_stream := self.preview_stream: + # Kill off the temp stream we created. + await ha_stream.stop() if not user_input.get(CONF_CONFIRMED_OK): return await self.async_step_user() return self.async_create_entry( @@ -399,7 +414,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): register_preview(self.hass) preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}" return self.async_show_form( - step_id="user_confirm_still", + step_id="user_confirm", data_schema=vol.Schema( { vol.Required(CONF_CONFIRMED_OK, default=False): bool, @@ -407,8 +422,14 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): ), description_placeholders={"preview_url": preview_url}, errors=None, + preview="generic_camera", ) + @staticmethod + async def async_setup_preview(hass: HomeAssistant) -> None: + """Set up preview WS API.""" + websocket_api.async_register_command(hass, ws_start_preview) + class GenericOptionsFlowHandler(OptionsFlow): """Handle Generic IP Camera options.""" @@ -423,13 +444,21 @@ class GenericOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage Generic IP Camera options.""" errors: dict[str, str] = {} + description_placeholders = {} hass = self.hass if user_input is not None: errors, still_format = await async_test_still( hass, self.config_entry.options | user_input ) - errors = errors | await async_test_stream(hass, user_input) + try: + await async_test_and_preview_stream(hass, user_input) + except InvalidStreamException as err: + errors[CONF_STREAM_SOURCE] = str(err) + if err.details: + errors["error_details"] = err.details + # Stream preview during options flow not yet implemented + still_url = user_input.get(CONF_STILL_IMAGE_URL) if not errors: if still_url is None: @@ -449,6 +478,8 @@ class GenericOptionsFlowHandler(OptionsFlow): # temporary preview for user to check the image self.preview_cam = data return await self.async_step_confirm_still() + if "error_details" in errors: + description_placeholders["error"] = errors.pop("error_details") return self.async_show_form( step_id="init", data_schema=build_schema( @@ -456,6 +487,7 @@ class GenericOptionsFlowHandler(OptionsFlow): True, self.show_advanced_options, ), + description_placeholders=description_placeholders, errors=errors, ) @@ -518,3 +550,59 @@ class CameraImagePreview(HomeAssistantView): CAMERA_IMAGE_TIMEOUT, ) return web.Response(body=image.content, content_type=image.content_type) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "generic_camera/start_preview", + vol.Required("flow_id"): str, + vol.Optional("flow_type"): vol.Any("config_flow"), + vol.Optional("user_input"): dict, + } +) +@websocket_api.async_response +async def ws_start_preview( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate websocket handler for the camera still/stream preview.""" + _LOGGER.debug("Generating websocket handler for generic camera preview") + + flow_id = msg["flow_id"] + flow = cast( + GenericIPCamConfigFlow, + hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001 + ) + user_input = flow.preview_cam + + # Create an EntityPlatform, needed for name translations + platform = await async_prepare_setup_platform(hass, {}, CAMERA_DOMAIN, DOMAIN) + entity_platform = EntityPlatform( + hass=hass, + logger=_LOGGER, + domain=CAMERA_DOMAIN, + platform_name=DOMAIN, + platform=platform, + scan_interval=timedelta(seconds=3600), + entity_namespace=None, + ) + await entity_platform.async_load_translations() + + ha_still_url = None + ha_stream_url = None + + if user_input.get(CONF_STILL_IMAGE_URL): + ha_still_url = f"/api/generic/preview_flow_image/{msg['flow_id']}?t={datetime.now().isoformat()}" + _LOGGER.debug("Got preview still URL: %s", ha_still_url) + + if ha_stream := flow.preview_stream: + ha_stream_url = ha_stream.endpoint_url(HLS_PROVIDER) + _LOGGER.debug("Got preview stream URL: %s", ha_stream_url) + + connection.send_message( + websocket_api.event_message( + msg["id"], + {"attributes": {"still_url": ha_still_url, "stream_url": ha_stream_url}}, + ) + ) diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index c1fbc16d9be..0b6f07e8205 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -3,7 +3,7 @@ "name": "Generic Camera", "codeowners": ["@davet2001"], "config_flow": true, - "dependencies": ["http"], + "dependencies": ["http", "stream"], "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json index 94360a5b7c2..b3ecadacba5 100644 --- a/homeassistant/components/generic/strings.json +++ b/homeassistant/components/generic/strings.json @@ -39,11 +39,11 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" } }, - "user_confirm_still": { - "title": "Preview", - "description": "![Camera Still Image Preview]({preview_url})", + "user_confirm": { + "title": "Confirmation", + "description": "Please wait for previews to load...", "data": { - "confirmed_ok": "This image looks good." + "confirmed_ok": "Everything looks good." } } } @@ -68,15 +68,16 @@ } }, "confirm_still": { - "title": "[%key:component::generic::config::step::user_confirm_still::title%]", - "description": "[%key:component::generic::config::step::user_confirm_still::description%]", + "title": "Preview", + "description": "![Camera Still Image Preview]({preview_url})", "data": { - "confirmed_ok": "[%key:component::generic::config::step::user_confirm_still::data::confirmed_ok%]" + "confirmed_ok": "This image looks good." } } }, "error": { "unknown": "[%key:common::config_flow::error::unknown%]", + "unknown_with_details": "[%key:common::config_flow::error::unknown_with_details]", "already_exists": "[%key:component::generic::config::error::already_exists%]", "unable_still_load": "[%key:component::generic::config::error::unable_still_load%]", "unable_still_load_auth": "[%key:component::generic::config::error::unable_still_load_auth%]", diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 69e6cc6b696..cdea83b599c 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -71,16 +71,18 @@ def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: respx.pop("fake_img") -@pytest.fixture(scope="package") -def mock_create_stream() -> _patch[MagicMock]: +@pytest.fixture +def mock_create_stream(hass: HomeAssistant) -> _patch[MagicMock]: """Mock create stream.""" - mock_stream = Mock() + mock_stream = MagicMock() + mock_stream.hass = hass mock_provider = Mock() mock_provider.part_recv = AsyncMock() mock_provider.part_recv.return_value = True mock_stream.add_provider.return_value = mock_provider mock_stream.start = AsyncMock() mock_stream.stop = AsyncMock() + mock_stream.endpoint_url.return_value = "http://127.0.0.1/nothing" return patch( "homeassistant.components.generic.config_flow.create_stream", return_value=mock_stream, diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index a882ca4cd8d..f121b210c0c 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -9,6 +9,7 @@ import os.path from pathlib import Path from unittest.mock import AsyncMock, MagicMock, PropertyMock, _patch, patch +from freezegun.api import FrozenDateTimeFactory import httpx import pytest import respx @@ -44,8 +45,8 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import ClientSessionGenerator, WebSocketGenerator TESTDATA = { CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1", @@ -75,6 +76,7 @@ async def test_form( hass_client: ClientSessionGenerator, user_flow: ConfigFlowResult, mock_create_stream: _patch[MagicMock], + hass_ws_client: WebSocketGenerator, ) -> None: """Test the form with a normal set of settings.""" @@ -90,18 +92,29 @@ async def test_form( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" client = await hass_client() preview_url = result1["description_placeholders"]["preview_url"] # Check the preview image works. resp = await client.get(preview_url) assert resp.status == HTTPStatus.OK assert await resp.read() == fakeimgbytes_png + + # HA should now be serving a WS connection for a preview stream. + ws_client = await hass_ws_client() + flow_id = user_flow["flow_id"] + await ws_client.send_json_auto_id( + { + "type": "generic_camera/start_preview", + "flow_id": flow_id, + }, + ) + _ = await ws_client.receive_json() + result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "127_0_0_1" assert result2["options"] == { @@ -110,13 +123,11 @@ async def test_form( CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() # Check that the preview image is disabled after. resp = await client.get(preview_url) assert resp.status == HTTPStatus.NOT_FOUND @@ -145,7 +156,7 @@ async def test_form_only_stillimage( ) await hass.async_block_till_done() assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -157,9 +168,8 @@ async def test_form_only_stillimage( CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } @@ -167,13 +177,13 @@ async def test_form_only_stillimage( @respx.mock -async def test_form_reject_still_preview( +async def test_form_reject_preview( hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock], user_flow: ConfigFlowResult, ) -> None: - """Test we go back to the config screen if the user rejects the still preview.""" + """Test we go back to the config screen if the user rejects the preview.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) with mock_create_stream: result1 = await hass.config_entries.flow.async_configure( @@ -181,7 +191,7 @@ async def test_form_reject_still_preview( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: False}, @@ -211,7 +221,7 @@ async def test_form_still_preview_cam_off( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" preview_url = result1["description_placeholders"]["preview_url"] # Try to view the image, should be unavailable. client = await hass_client() @@ -233,7 +243,7 @@ async def test_form_only_stillimage_gif( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -258,7 +268,7 @@ async def test_form_only_svg_whitespace( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -293,7 +303,7 @@ async def test_form_only_still_sample( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -310,13 +320,13 @@ async def test_form_only_still_sample( ( "http://localhost:812{{3}}/static/icons/favicon-apple-180x180.png", "http://localhost:8123/static/icons/favicon-apple-180x180.png", - "user_confirm_still", + "user_confirm", None, ), ( "{% if 1 %}https://bla{% else %}https://yo{% endif %}", "https://bla/", - "user_confirm_still", + "user_confirm", None, ), ( @@ -385,7 +395,7 @@ async def test_form_rtsp_mode( user_flow["flow_id"], data ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -399,13 +409,11 @@ async def test_form_rtsp_mode( CONF_RTSP_TRANSPORT: "tcp", CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 @@ -419,33 +427,36 @@ async def test_form_only_stream( data = TESTDATA.copy() data.pop(CONF_STILL_IMAGE_URL) data[CONF_STREAM_SOURCE] = "rtsp://user:pass@127.0.0.1/testurl/2" - with mock_create_stream as mock_setup: + with mock_create_stream: result1 = await hass.config_entries.flow.async_configure( user_flow["flow_id"], data, ) - assert result1["type"] is FlowResultType.CREATE_ENTRY - assert result1["title"] == "127_0_0_1" - assert result1["options"] == { + + assert result1["type"] is FlowResultType.FORM + with mock_create_stream: + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={CONF_CONFIRMED_OK: True}, + ) + + assert result2["title"] == "127_0_0_1" + assert result2["options"] == { CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_STREAM_SOURCE: "rtsp://user:pass@127.0.0.1/testurl/2", CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/jpeg", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() - with patch( "homeassistant.components.camera._async_get_stream_image", return_value=fakeimgbytes_jpg, ): image_obj = await async_get_image(hass, "camera.127_0_0_1") assert image_obj.content == fakeimgbytes_jpg - assert len(mock_setup.mock_calls) == 1 async def test_form_still_and_stream_not_provided( @@ -512,7 +523,6 @@ async def test_form_image_http_exceptions( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == expected_message @@ -531,7 +541,6 @@ async def test_form_stream_invalidimage( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "invalid_still_image"} @@ -550,7 +559,6 @@ async def test_form_stream_invalidimage2( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "unable_still_load_no_image"} @@ -569,7 +577,6 @@ async def test_form_stream_invalidimage3( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "invalid_still_image"} @@ -585,6 +592,8 @@ async def test_form_stream_timeout( "homeassistant.components.generic.config_flow.create_stream" ) as create_stream: create_stream.return_value.start = AsyncMock() + create_stream.return_value.stop = AsyncMock() + create_stream.return_value.hass = hass create_stream.return_value.add_provider.return_value.part_recv = AsyncMock() create_stream.return_value.add_provider.return_value.part_recv.return_value = ( False @@ -727,6 +736,37 @@ async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> ) +@respx.mock +async def test_form_stream_preview_auto_timeout( + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], + freezer: FrozenDateTimeFactory, + fakeimgbytes_png: bytes, +) -> None: + """Test that the stream preview times out after 10mins.""" + respx.get("http://fred_flintstone:bambam@127.0.0.1/testurl/2").respond( + stream=fakeimgbytes_png + ) + data = TESTDATA.copy() + data.pop(CONF_STILL_IMAGE_URL) + + with mock_create_stream as mock_stream: + result1 = await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + data, + ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "user_confirm" + + freezer.tick(600 + 12) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_str = mock_stream.return_value + mock_str.start.assert_awaited_once() + + @respx.mock async def test_options_template_error( hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] @@ -842,7 +882,6 @@ async def test_options_only_stream( ) mock_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() result = await hass.config_entries.options.async_init(mock_entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -864,6 +903,27 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" +@respx.mock +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_options_stream_worker_error( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test we handle a StreamWorkerError and pass the message through.""" + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + with patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=StreamWorkerError("Some message"), + ): + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + TESTDATA, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"stream_source": "unknown_with_details"} + assert result2["description_placeholders"] == {"error": "Some message"} + + @pytest.mark.usefixtures("fakeimg_png") async def test_unload_entry(hass: HomeAssistant) -> None: """Test unloading the generic IP Camera entry.""" From 93c0eb73d29540afc24ee6fd8ebb68e14f269498 Mon Sep 17 00:00:00 2001 From: jesperraemaekers <146726232+jesperraemaekers@users.noreply.github.com> Date: Sun, 22 Dec 2024 17:44:15 +0100 Subject: [PATCH 1009/1198] Bump Weheat to 2024.12.22 (#133796) --- homeassistant/components/weheat/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json index 61d6a110dbd..1c6242de29c 100644 --- a/homeassistant/components/weheat/manifest.json +++ b/homeassistant/components/weheat/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/weheat", "iot_class": "cloud_polling", - "requirements": ["weheat==2024.11.26"] + "requirements": ["weheat==2024.12.22"] } diff --git a/requirements_all.txt b/requirements_all.txt index 56255fc997e..c0f40a189d1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3006,7 +3006,7 @@ webio-api==0.1.11 webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.26 +weheat==2024.12.22 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d80ad1320f5..928a887b08b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2410,7 +2410,7 @@ webio-api==0.1.11 webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.26 +weheat==2024.12.22 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 From 6179da43215d94b38a91f802acd79af69b61f628 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 07:16:47 -1000 Subject: [PATCH 1010/1198] Bump PySwitchbot to 0.55.3 (#133812) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 5c91a6e20a5..3153e181af9 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.55.2"] + "requirements": ["PySwitchbot==0.55.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index c0f40a189d1..41f0515c83f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.2 +PySwitchbot==0.55.3 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 928a887b08b..4eaa5cdadec 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.2 +PySwitchbot==0.55.3 # homeassistant.components.syncthru PySyncThru==0.7.10 From 0ba32e1d3ad45b7189d4ff05a70a6f27b2531de5 Mon Sep 17 00:00:00 2001 From: Steven Looman Date: Sun, 22 Dec 2024 18:18:05 +0100 Subject: [PATCH 1011/1198] Bump async-upnp-client to 0.42.0 (#133806) --- homeassistant/components/dlna_dmr/manifest.json | 2 +- homeassistant/components/dlna_dms/manifest.json | 2 +- homeassistant/components/samsungtv/manifest.json | 2 +- homeassistant/components/ssdp/manifest.json | 2 +- homeassistant/components/upnp/manifest.json | 2 +- homeassistant/components/yeelight/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index 84024d5bde1..af16379e9c9 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.4"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index 1913bb9d5d7..ac5bf3719e3 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,7 +7,7 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "requirements": ["async-upnp-client==0.41.0"], + "requirements": ["async-upnp-client==0.42.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 1a6b5ed5313..a1fda25589e 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -39,7 +39,7 @@ "samsungctl[websocket]==0.7.1", "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", - "async-upnp-client==0.41.0" + "async-upnp-client==0.42.0" ], "ssdp": [ { diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index e9d4f57d5fb..2632e37aa98 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.41.0"] + "requirements": ["async-upnp-client==0.42.0"] } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index b0b4fe35b39..08e0be2d712 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 4da2e0cfc3e..eba970dc2db 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,7 +16,7 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.41.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.42.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bfa479b9c13..d5731041d08 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -10,7 +10,7 @@ aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 atomicwrites-homeassistant==1.4.1 attrs==24.2.0 audioop-lts==0.2.1;python_version>='3.13' diff --git a/requirements_all.txt b/requirements_all.txt index 41f0515c83f..2381e18a42d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -499,7 +499,7 @@ asmog==0.0.6 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 # homeassistant.components.arve asyncarve==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4eaa5cdadec..db166894f00 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -463,7 +463,7 @@ arcam-fmj==1.5.2 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 # homeassistant.components.arve asyncarve==0.1.1 From feca7c28cfd056aa0b5de715d92c5cc023aa8be6 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Sun, 22 Dec 2024 18:45:33 +0100 Subject: [PATCH 1012/1198] Add Compressor, Inside Unit and Energy Output fields to Weheat (#129632) --- homeassistant/components/weheat/icons.json | 6 + homeassistant/components/weheat/sensor.py | 30 +++- homeassistant/components/weheat/strings.json | 9 ++ tests/components/weheat/conftest.py | 3 + .../weheat/snapshots/test_sensor.ambr | 151 ++++++++++++++++++ tests/components/weheat/test_sensor.py | 2 +- 6 files changed, 199 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json index 6fdae84cfff..7efd13b0dfb 100644 --- a/homeassistant/components/weheat/icons.json +++ b/homeassistant/components/weheat/icons.json @@ -27,6 +27,12 @@ }, "electricity_used": { "default": "mdi:flash" + }, + "compressor_rpm": { + "default": "mdi:fan" + }, + "compressor_percentage": { + "default": "mdi:fan" } } } diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py index ef5be9030b9..3e5d9376c34 100644 --- a/homeassistant/components/weheat/sensor.py +++ b/homeassistant/components/weheat/sensor.py @@ -11,7 +11,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTemperature +from homeassistant.const import ( + PERCENTAGE, + REVOLUTIONS_PER_MINUTE, + UnitOfEnergy, + UnitOfPower, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -142,6 +148,28 @@ SENSORS = [ state_class=SensorStateClass.TOTAL_INCREASING, value_fn=lambda status: status.energy_total, ), + WeHeatSensorEntityDescription( + translation_key="energy_output", + key="energy_output", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda status: status.energy_output, + ), + WeHeatSensorEntityDescription( + translation_key="compressor_rpm", + key="compressor_rpm", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda status: status.compressor_rpm, + ), + WeHeatSensorEntityDescription( + translation_key="compressor_percentage", + key="compressor_percentage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda status: status.compressor_percentage, + ), ] diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json index 0733024cbed..c993a6beefe 100644 --- a/homeassistant/components/weheat/strings.json +++ b/homeassistant/components/weheat/strings.json @@ -84,6 +84,15 @@ }, "electricity_used": { "name": "Electricity used" + }, + "energy_output": { + "name": "Total energy output" + }, + "compressor_rpm": { + "name": "Compressor speed" + }, + "compressor_percentage": { + "name": "Compressor usage" } } } diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py index 6ecb64ffdf4..7169a3b56c8 100644 --- a/tests/components/weheat/conftest.py +++ b/tests/components/weheat/conftest.py @@ -121,6 +121,9 @@ def mock_weheat_heat_pump_instance() -> MagicMock: mock_heat_pump_instance.cop = 4.5 mock_heat_pump_instance.heat_pump_state = HeatPump.State.HEATING mock_heat_pump_instance.energy_total = 12345 + mock_heat_pump_instance.energy_output = 56789 + mock_heat_pump_instance.compressor_rpm = 4500 + mock_heat_pump_instance.compressor_percentage = 100 return mock_heat_pump_instance diff --git a/tests/components/weheat/snapshots/test_sensor.ambr b/tests/components/weheat/snapshots/test_sensor.ambr index 3bd4a254598..1a54711d6c5 100644 --- a/tests/components/weheat/snapshots/test_sensor.ambr +++ b/tests/components/weheat/snapshots/test_sensor.ambr @@ -123,6 +123,106 @@ 'state': '33', }) # --- +# name: test_all_entities[sensor.test_model_compressor_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_compressor_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor speed', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_rpm', + 'unique_id': '0000-1111-2222-3333_compressor_rpm', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Compressor speed', + 'state_class': , + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.test_model_compressor_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4500', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_compressor_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor usage', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_percentage', + 'unique_id': '0000-1111-2222-3333_compressor_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Compressor usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_model_compressor_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- # name: test_all_entities[sensor.test_model_cop-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -604,6 +704,57 @@ 'state': '21', }) # --- +# name: test_all_entities[sensor.test_model_total_energy_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_total_energy_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy output', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_output', + 'unique_id': '0000-1111-2222-3333_energy_output', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_total_energy_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Model Total energy output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_total_energy_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56789', + }) +# --- # name: test_all_entities[sensor.test_model_water_inlet_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/weheat/test_sensor.py b/tests/components/weheat/test_sensor.py index d9055addc67..062b84d0423 100644 --- a/tests/components/weheat/test_sensor.py +++ b/tests/components/weheat/test_sensor.py @@ -34,7 +34,7 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 12), (True, 14)]) +@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 15), (True, 17)]) async def test_create_entities( hass: HomeAssistant, mock_weheat_discover: AsyncMock, From 0e9965150e7cc0fad81531b1bff84355b524608a Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 19:00:49 +0100 Subject: [PATCH 1013/1198] Show device name in Twinkly discovery (#133814) --- homeassistant/components/twinkly/config_flow.py | 3 +++ homeassistant/components/twinkly/strings.json | 1 + 2 files changed, 4 insertions(+) diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 4dec8809f07..53ba8f084c3 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -80,6 +80,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): return self._create_entry_from_device(device_info, host) self._set_confirm_only() + self.context["title_placeholders"] = { + "name": device_info[DEV_NAME], + } placeholders = { "model": device_info[DEV_MODEL], "name": device_info[DEV_NAME], diff --git a/homeassistant/components/twinkly/strings.json b/homeassistant/components/twinkly/strings.json index d27de8a75de..bbc3d67373d 100644 --- a/homeassistant/components/twinkly/strings.json +++ b/homeassistant/components/twinkly/strings.json @@ -1,5 +1,6 @@ { "config": { + "flow_title": "{name}", "step": { "user": { "data": { From d9948847260d22f25d7c144c10d09ba02a91d4f4 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Sun, 22 Dec 2024 19:07:01 +0100 Subject: [PATCH 1014/1198] Add binary states for Weheat indoor unit (#133811) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/weheat/__init__.py | 2 +- .../components/weheat/binary_sensor.py | 100 ++++++++++ homeassistant/components/weheat/icons.json | 17 ++ homeassistant/components/weheat/strings.json | 17 ++ tests/components/weheat/conftest.py | 5 + .../weheat/snapshots/test_binary_sensor.ambr | 188 ++++++++++++++++++ tests/components/weheat/test_binary_sensor.py | 52 +++++ 7 files changed, 380 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/weheat/binary_sensor.py create mode 100644 tests/components/weheat/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/weheat/test_binary_sensor.py diff --git a/homeassistant/components/weheat/__init__.py b/homeassistant/components/weheat/__init__.py index d924d6ceaab..a043a3a6845 100644 --- a/homeassistant/components/weheat/__init__.py +++ b/homeassistant/components/weheat/__init__.py @@ -17,7 +17,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( from .const import API_URL, LOGGER from .coordinator import WeheatDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] type WeheatConfigEntry = ConfigEntry[list[WeheatDataUpdateCoordinator]] diff --git a/homeassistant/components/weheat/binary_sensor.py b/homeassistant/components/weheat/binary_sensor.py new file mode 100644 index 00000000000..ea939227e77 --- /dev/null +++ b/homeassistant/components/weheat/binary_sensor.py @@ -0,0 +1,100 @@ +"""Binary sensor platform for Weheat integration.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from weheat.abstractions.heat_pump import HeatPump + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import WeheatConfigEntry +from .coordinator import WeheatDataUpdateCoordinator +from .entity import WeheatEntity + + +@dataclass(frozen=True, kw_only=True) +class WeHeatBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Weheat binary sensor entity.""" + + value_fn: Callable[[HeatPump], StateType] + + +BINARY_SENSORS = [ + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_water_pump_state", + key="indoor_unit_water_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_water_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_auxiliary_pump_state", + key="indoor_unit_auxiliary_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_auxiliary_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_dhw_valve_or_pump_state", + key="indoor_unit_dhw_valve_or_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_dhw_valve_or_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_gas_boiler_state", + key="indoor_unit_gas_boiler_state", + value_fn=lambda status: status.indoor_unit_gas_boiler_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_electric_heater_state", + key="indoor_unit_electric_heater_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_electric_heater_state, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: WeheatConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensors for weheat heat pump.""" + entities = [ + WeheatHeatPumpBinarySensor(coordinator, entity_description) + for entity_description in BINARY_SENSORS + for coordinator in entry.runtime_data + if entity_description.value_fn(coordinator.data) is not None + ] + + async_add_entities(entities) + + +class WeheatHeatPumpBinarySensor(WeheatEntity, BinarySensorEntity): + """Defines a Weheat heat pump binary sensor.""" + + coordinator: WeheatDataUpdateCoordinator + entity_description: WeHeatBinarySensorEntityDescription + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + entity_description: WeHeatBinarySensorEntityDescription, + ) -> None: + """Pass coordinator to CoordinatorEntity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + self._attr_unique_id = f"{coordinator.heatpump_id}_{entity_description.key}" + + @property + def is_on(self) -> bool | None: + """Return True if the binary sensor is on.""" + value = self.entity_description.value_fn(self.coordinator.data) + return bool(value) if value is not None else None diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json index 7efd13b0dfb..e7f54b478c6 100644 --- a/homeassistant/components/weheat/icons.json +++ b/homeassistant/components/weheat/icons.json @@ -1,5 +1,22 @@ { "entity": { + "binary_sensor": { + "indoor_unit_water_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_auxiliary_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_dhw_valve_or_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_gas_boiler_state": { + "default": "mdi:toggle-switch" + }, + "indoor_unit_electric_heater_state": { + "default": "mdi:heating-coil" + } + }, "sensor": { "power_output": { "default": "mdi:heat-wave" diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json index c993a6beefe..2a208c2f8ca 100644 --- a/homeassistant/components/weheat/strings.json +++ b/homeassistant/components/weheat/strings.json @@ -32,6 +32,23 @@ } }, "entity": { + "binary_sensor": { + "indoor_unit_water_pump_state": { + "name": "Indoor unit water pump" + }, + "indoor_unit_auxiliary_pump_state": { + "name": "Indoor unit auxilary water pump" + }, + "indoor_unit_dhw_valve_or_pump_state": { + "name": "Indoor unit DHW valve or water pump" + }, + "indoor_unit_gas_boiler_state": { + "name": "Indoor unit gas boiler heating allowed" + }, + "indoor_unit_electric_heater_state": { + "name": "Indoor unit electric heater" + } + }, "sensor": { "power_output": { "name": "Output power" diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py index 7169a3b56c8..1bbe91fc573 100644 --- a/tests/components/weheat/conftest.py +++ b/tests/components/weheat/conftest.py @@ -124,6 +124,11 @@ def mock_weheat_heat_pump_instance() -> MagicMock: mock_heat_pump_instance.energy_output = 56789 mock_heat_pump_instance.compressor_rpm = 4500 mock_heat_pump_instance.compressor_percentage = 100 + mock_heat_pump_instance.indoor_unit_water_pump_state = False + mock_heat_pump_instance.indoor_unit_auxiliary_pump_state = False + mock_heat_pump_instance.indoor_unit_dhw_valve_or_pump_state = None + mock_heat_pump_instance.indoor_unit_gas_boiler_state = False + mock_heat_pump_instance.indoor_unit_electric_heater_state = True return mock_heat_pump_instance diff --git a/tests/components/weheat/snapshots/test_binary_sensor.ambr b/tests/components/weheat/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..08d609ca610 --- /dev/null +++ b/tests/components/weheat/snapshots/test_binary_sensor.ambr @@ -0,0 +1,188 @@ +# serializer version: 1 +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_auxilary_water_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_auxilary_water_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit auxilary water pump', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_auxiliary_pump_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_auxiliary_pump_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_auxilary_water_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit auxilary water pump', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_auxilary_water_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_electric_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_electric_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit electric heater', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_electric_heater_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_electric_heater_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_electric_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit electric heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_electric_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Indoor unit gas boiler heating allowed', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_gas_boiler_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_gas_boiler_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Indoor unit gas boiler heating allowed', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_water_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_water_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit water pump', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_water_pump_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_water_pump_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_water_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit water pump', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_water_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/weheat/test_binary_sensor.py b/tests/components/weheat/test_binary_sensor.py new file mode 100644 index 00000000000..e75cb282e24 --- /dev/null +++ b/tests/components/weheat/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Tests for the weheat sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from weheat.abstractions.discovery import HeatPumpDiscovery + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_create_binary_entities( + hass: HomeAssistant, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_heat_pump_info: HeatPumpDiscovery.HeatPumpInfo, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test creating entities.""" + mock_weheat_discover.return_value = [mock_heat_pump_info] + + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 4 From 84a3a9d495e80ccbacd0d601c9153c0ede718a2e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:25:55 +0100 Subject: [PATCH 1015/1198] Add select error handling for Peblar Rocksolid EV Chargers (#133804) --- homeassistant/components/peblar/select.py | 2 + tests/components/peblar/test_select.py | 155 +++++++++++++++++++++- 2 files changed, 154 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index e9c7da77bec..a2a0997a797 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -15,6 +15,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -74,6 +75,7 @@ class PeblarSelectEntity( """Return the selected entity option to represent the entity state.""" return self.entity_description.current_fn(self.coordinator.data) + @peblar_exception_handler async def async_select_option(self, option: str) -> None: """Change the selected option.""" await self.entity_description.select_fn(self.coordinator.peblar, option) diff --git a/tests/components/peblar/test_select.py b/tests/components/peblar/test_select.py index e20d84da755..5e4ab4609d4 100644 --- a/tests/components/peblar/test_select.py +++ b/tests/components/peblar/test_select.py @@ -1,18 +1,36 @@ """Tests for the Peblar select platform.""" +from unittest.mock import MagicMock + +from peblar import ( + PeblarAuthenticationError, + PeblarConnectionError, + PeblarError, + SmartChargingMode, +) import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +51,134 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_option( + hass: HomeAssistant, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar EV charger selects.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mocked_method = mock_peblar.smart_charging + mocked_method.reset_mock() + + # Test normal happy path for changing the select option + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 1 + mocked_method.assert_called_with(SmartChargingMode.DEFAULT) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_option_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mock_peblar.smart_charging.side_effect = error + + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +async def test_select_option_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mock_peblar.smart_charging.side_effect = PeblarAuthenticationError( + "Authentication error" + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From a2aba77973acd4de93b3cee354b19ae951606933 Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Sun, 22 Dec 2024 18:26:15 +0000 Subject: [PATCH 1016/1198] Fix typo in ElevenLabs (#133819) --- homeassistant/components/elevenlabs/__init__.py | 10 ++++------ homeassistant/components/elevenlabs/config_flow.py | 6 +++--- homeassistant/components/elevenlabs/tts.py | 4 ++-- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index 84b2b61b8ed..e8a378d56c6 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -35,10 +35,10 @@ class ElevenLabsData: model: Model -type EleventLabsConfigEntry = ConfigEntry[ElevenLabsData] +type ElevenLabsConfigEntry = ConfigEntry[ElevenLabsData] -async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -> bool: """Set up ElevenLabs text-to-speech from a config entry.""" entry.add_update_listener(update_listener) httpx_client = get_async_client(hass) @@ -60,15 +60,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) return True -async def async_unload_entry( - hass: HomeAssistant, entry: EleventLabsConfigEntry -) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def update_listener( - hass: HomeAssistant, config_entry: EleventLabsConfigEntry + hass: HomeAssistant, config_entry: ElevenLabsConfigEntry ) -> None: """Handle options update.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 60df79d6eaa..227749bf82c 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -19,7 +19,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) -from . import EleventLabsConfigEntry +from . import ElevenLabsConfigEntry from .const import ( CONF_CONFIGURE_VOICE, CONF_MODEL, @@ -92,7 +92,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod def async_get_options_flow( - config_entry: EleventLabsConfigEntry, + config_entry: ElevenLabsConfigEntry, ) -> OptionsFlow: """Create the options flow.""" return ElevenLabsOptionsFlow(config_entry) @@ -101,7 +101,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" - def __init__(self, config_entry: EleventLabsConfigEntry) -> None: + def __init__(self, config_entry: ElevenLabsConfigEntry) -> None: """Initialize options flow.""" self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index c96a7161b72..b89e966593f 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -22,7 +22,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EleventLabsConfigEntry +from . import ElevenLabsConfigEntry from .const import ( CONF_OPTIMIZE_LATENCY, CONF_SIMILARITY, @@ -56,7 +56,7 @@ def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: async def async_setup_entry( hass: HomeAssistant, - config_entry: EleventLabsConfigEntry, + config_entry: ElevenLabsConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up ElevenLabs tts platform via config entry.""" From c6789d70a4e2d5a01d3938277577115c5e931bfd Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:26:35 +0100 Subject: [PATCH 1017/1198] Remove unneeded type for enphase_envoy coordinator in async_unload_entry (#133817) --- homeassistant/components/enphase_envoy/__init__.py | 2 +- homeassistant/components/enphase_envoy/quality_scale.yaml | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index f4fe4aff2cb..cdbb7080674 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -77,7 +77,7 @@ async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_unload_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> bool: """Unload a config entry.""" - coordinator: EnphaseUpdateCoordinator = entry.runtime_data + coordinator = entry.runtime_data coordinator.async_cancel_token_refresh() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 210491c031c..a7038b4e0da 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -35,11 +35,7 @@ rules: comment: no events used. entity-unique-id: done has-entity-name: done - runtime-data: - status: done - comment: | - async_unload_entry- coordinator: EnphaseUpdateCoordinator = entry.runtime_data - You can remove the EnphaseUpdateCoordinator as the type can now be inferred thanks to the typed config entry + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done From de5a49363e20b04bf459b9fb221e6e4adbf52463 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:47:31 +0100 Subject: [PATCH 1018/1198] Add switch error handling for Peblar Rocksolid EV Chargers (#133805) --- homeassistant/components/peblar/switch.py | 3 + tests/components/peblar/test_switch.py | 154 +++++++++++++++++++++- 2 files changed, 154 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 88f52d01e3a..e56c2fcdaec 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -20,6 +20,7 @@ from .coordinator import ( PeblarRuntimeData, ) from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -78,11 +79,13 @@ class PeblarSwitchEntity( """Return state of the switch.""" return self.entity_description.is_on_fn(self.coordinator.data) + @peblar_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" await self.entity_description.set_fn(self.coordinator.api, True) await self.coordinator.async_request_refresh() + @peblar_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" await self.entity_description.set_fn(self.coordinator.api, False) diff --git a/tests/components/peblar/test_switch.py b/tests/components/peblar/test_switch.py index 7a8fcf7705b..6436ac78109 100644 --- a/tests/components/peblar/test_switch.py +++ b/tests/components/peblar/test_switch.py @@ -1,18 +1,31 @@ """Tests for the Peblar switch platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +46,138 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.parametrize( + ("service", "force_single_phase"), + [ + (SERVICE_TURN_ON, True), + (SERVICE_TURN_OFF, False), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch( + hass: HomeAssistant, + mock_peblar: MagicMock, + service: str, + force_single_phase: bool, +) -> None: + """Test the Peblar EV charger switches.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mocked_method = mock_peblar.rest_api.return_value.ev_interface + mocked_method.reset_mock() + + # Test normal happy path for changing the switch state + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 2 + mocked_method.mock_calls[0].assert_called_with( + {"force_single_phase": force_single_phase} + ) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.parametrize("service", [SERVICE_TURN_ON, SERVICE_TURN_OFF]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, + service: str, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mock_peblar.rest_api.return_value.ev_interface.side_effect = error + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +@pytest.mark.parametrize("service", [SERVICE_TURN_ON, SERVICE_TURN_OFF]) +async def test_switch_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + service: str, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mock_peblar.rest_api.return_value.ev_interface.side_effect = ( + PeblarAuthenticationError("Authentication error") + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 0f1835139f04d75d6de56fc8b279e41743180e7f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:53:00 +0100 Subject: [PATCH 1019/1198] Add number error handling for Peblar Rocksolid EV Chargers (#133803) --- homeassistant/components/peblar/number.py | 2 + tests/components/peblar/test_number.py | 148 +++++++++++++++++++++- 2 files changed, 147 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index d17ff09eb94..d2983438a91 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -24,6 +24,7 @@ from .coordinator import ( PeblarRuntimeData, ) from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -94,6 +95,7 @@ class PeblarNumberEntity( """Return the number value.""" return self.entity_description.value_fn(self.coordinator.data) + @peblar_exception_handler async def async_set_native_value(self, value: float) -> None: """Change to new number value.""" await self.entity_description.set_value_fn(self.coordinator.api, value) diff --git a/tests/components/peblar/test_number.py b/tests/components/peblar/test_number.py index 4c2ff928210..2a8fca46e91 100644 --- a/tests/components/peblar/test_number.py +++ b/tests/components/peblar/test_number.py @@ -1,18 +1,31 @@ """Tests for the Peblar number platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +46,132 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_set_value( + hass: HomeAssistant, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar EV charger numbers.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mocked_method = mock_peblar.rest_api.return_value.ev_interface + mocked_method.reset_mock() + + # Test normal happy path number value change + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 2 + mocked_method.mock_calls[0].assert_called_with({"charge_current_limit": 10}) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_set_value_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mock_peblar.rest_api.return_value.ev_interface.side_effect = error + + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +async def test_number_set_value_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mock_peblar.rest_api.return_value.ev_interface.side_effect = ( + PeblarAuthenticationError("Authentication error") + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 6c70586f7e82d3d40d92881acd17e51f5b57f129 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:54:14 +0100 Subject: [PATCH 1020/1198] Add get_user_keyring_info service to UniFi Protect integration (#133138) Co-authored-by: J. Nick Koston --- .../components/unifiprotect/const.py | 7 ++ .../components/unifiprotect/icons.json | 3 + .../components/unifiprotect/services.py | 81 ++++++++++++++++++- .../components/unifiprotect/services.yaml | 7 ++ .../components/unifiprotect/strings.json | 10 +++ .../components/unifiprotect/test_services.py | 68 +++++++++++++++- 6 files changed, 171 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index d607f87b76a..d041b713125 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -83,3 +83,10 @@ EVENT_TYPE_FINGERPRINT_IDENTIFIED: Final = "identified" EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED: Final = "not_identified" EVENT_TYPE_NFC_SCANNED: Final = "scanned" EVENT_TYPE_DOORBELL_RING: Final = "ring" + +KEYRINGS_ULP_ID: Final = "ulp_id" +KEYRINGS_USER_STATUS: Final = "user_status" +KEYRINGS_USER_FULL_NAME: Final = "full_name" +KEYRINGS_KEY_TYPE: Final = "key_type" +KEYRINGS_KEY_TYPE_ID_FINGERPRINT: Final = "fingerprint_id" +KEYRINGS_KEY_TYPE_ID_NFC: Final = "nfc_id" diff --git a/homeassistant/components/unifiprotect/icons.json b/homeassistant/components/unifiprotect/icons.json index 5e80e3095b3..b5e8277d82a 100644 --- a/homeassistant/components/unifiprotect/icons.json +++ b/homeassistant/components/unifiprotect/icons.json @@ -11,6 +11,9 @@ }, "remove_privacy_zone": { "service": "mdi:eye-minus" + }, + "get_user_keyring_info": { + "service": "mdi:key-chain" } } } diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 35713efdf3d..6a1daef178e 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -13,7 +13,13 @@ import voluptuous as vol from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.const import ATTR_DEVICE_ID, ATTR_NAME, Platform -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, @@ -21,9 +27,19 @@ from homeassistant.helpers import ( entity_registry as er, ) from homeassistant.helpers.service import async_extract_referenced_entity_ids +from homeassistant.util.json import JsonValueType from homeassistant.util.read_only_dict import ReadOnlyDict -from .const import ATTR_MESSAGE, DOMAIN +from .const import ( + ATTR_MESSAGE, + DOMAIN, + KEYRINGS_KEY_TYPE, + KEYRINGS_KEY_TYPE_ID_FINGERPRINT, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) from .data import async_ufp_instance_for_config_entry_ids SERVICE_ADD_DOORBELL_TEXT = "add_doorbell_text" @@ -31,12 +47,14 @@ SERVICE_REMOVE_DOORBELL_TEXT = "remove_doorbell_text" SERVICE_SET_PRIVACY_ZONE = "set_privacy_zone" SERVICE_REMOVE_PRIVACY_ZONE = "remove_privacy_zone" SERVICE_SET_CHIME_PAIRED = "set_chime_paired_doorbells" +SERVICE_GET_USER_KEYRING_INFO = "get_user_keyring_info" ALL_GLOBAL_SERIVCES = [ SERVICE_ADD_DOORBELL_TEXT, SERVICE_REMOVE_DOORBELL_TEXT, SERVICE_SET_CHIME_PAIRED, SERVICE_REMOVE_PRIVACY_ZONE, + SERVICE_GET_USER_KEYRING_INFO, ] DOORBELL_TEXT_SCHEMA = vol.All( @@ -69,6 +87,15 @@ REMOVE_PRIVACY_ZONE_SCHEMA = vol.All( cv.has_at_least_one_key(ATTR_DEVICE_ID), ) +GET_USER_KEYRING_INFO_SCHEMA = vol.All( + vol.Schema( + { + **cv.ENTITY_SERVICE_FIELDS, + }, + ), + cv.has_at_least_one_key(ATTR_DEVICE_ID), +) + @callback def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiClient: @@ -205,26 +232,70 @@ async def set_chime_paired_doorbells(call: ServiceCall) -> None: await chime.save_device(data_before_changed) +async def get_user_keyring_info(call: ServiceCall) -> ServiceResponse: + """Get the user keyring info.""" + camera = _async_get_ufp_camera(call) + ulp_users = camera.api.bootstrap.ulp_users.as_list() + user_keyrings: list[JsonValueType] = [ + { + KEYRINGS_USER_FULL_NAME: user.full_name, + KEYRINGS_USER_STATUS: user.status, + KEYRINGS_ULP_ID: user.ulp_id, + "keys": [ + { + KEYRINGS_KEY_TYPE: key.registry_type, + **( + {KEYRINGS_KEY_TYPE_ID_FINGERPRINT: key.registry_id} + if key.registry_type == "fingerprint" + else {} + ), + **( + {KEYRINGS_KEY_TYPE_ID_NFC: key.registry_id} + if key.registry_type == "nfc" + else {} + ), + } + for key in camera.api.bootstrap.keyrings.as_list() + if key.ulp_user == user.ulp_id + ], + } + for user in ulp_users + ] + + response: ServiceResponse = {"users": user_keyrings} + return response + + SERVICES = [ ( SERVICE_ADD_DOORBELL_TEXT, add_doorbell_text, DOORBELL_TEXT_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_REMOVE_DOORBELL_TEXT, remove_doorbell_text, DOORBELL_TEXT_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_SET_CHIME_PAIRED, set_chime_paired_doorbells, CHIME_PAIRED_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_REMOVE_PRIVACY_ZONE, remove_privacy_zone, REMOVE_PRIVACY_ZONE_SCHEMA, + SupportsResponse.NONE, + ), + ( + SERVICE_GET_USER_KEYRING_INFO, + get_user_keyring_info, + GET_USER_KEYRING_INFO_SCHEMA, + SupportsResponse.ONLY, ), ] @@ -232,5 +303,7 @@ SERVICES = [ def async_setup_services(hass: HomeAssistant) -> None: """Set up the global UniFi Protect services.""" - for name, method, schema in SERVICES: - hass.services.async_register(DOMAIN, name, method, schema=schema) + for name, method, schema, supports_response in SERVICES: + hass.services.async_register( + DOMAIN, name, method, schema=schema, supports_response=supports_response + ) diff --git a/homeassistant/components/unifiprotect/services.yaml b/homeassistant/components/unifiprotect/services.yaml index 192dfd0566f..b620c195fc2 100644 --- a/homeassistant/components/unifiprotect/services.yaml +++ b/homeassistant/components/unifiprotect/services.yaml @@ -53,3 +53,10 @@ remove_privacy_zone: required: true selector: text: +get_user_keyring_info: + fields: + device_id: + required: true + selector: + device: + integration: unifiprotect diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index 8ecb4076409..cde8c88d169 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -225,6 +225,16 @@ "description": "The name of the zone to remove." } } + }, + "get_user_keyring_info": { + "name": "Retrieve Keyring Details for Users", + "description": "Fetch a detailed list of users with NFC and fingerprint associations for automations.", + "fields": { + "device_id": { + "name": "UniFi Protect NVR", + "description": "Any device from the UniFi Protect instance you want to retrieve keyring details. This is useful for systems with multiple Protect instances." + } + } } } } diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 84e0e74a492..efc9d1ace9e 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -9,9 +9,19 @@ from uiprotect.data import Camera, Chime, Color, Light, ModelType from uiprotect.data.devices import CameraZone from uiprotect.exceptions import BadRequest -from homeassistant.components.unifiprotect.const import ATTR_MESSAGE, DOMAIN +from homeassistant.components.unifiprotect.const import ( + ATTR_MESSAGE, + DOMAIN, + KEYRINGS_KEY_TYPE, + KEYRINGS_KEY_TYPE_ID_FINGERPRINT, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) from homeassistant.components.unifiprotect.services import ( SERVICE_ADD_DOORBELL_TEXT, + SERVICE_GET_USER_KEYRING_INFO, SERVICE_REMOVE_DOORBELL_TEXT, SERVICE_REMOVE_PRIVACY_ZONE, SERVICE_SET_CHIME_PAIRED, @@ -249,3 +259,59 @@ async def test_remove_privacy_zone( ) ufp.api.update_device.assert_called() assert not doorbell.privacy_zones + + +@pytest.mark.asyncio +async def test_get_doorbell_user( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + ufp: MockUFPFixture, + doorbell: Camera, +) -> None: + """Test get_doorbell_user service.""" + + ulp_user = Mock(full_name="Test User", status="active", ulp_id="user_ulp_id") + keyring = Mock( + registry_type="nfc", + registry_id="123456", + ulp_user="user_ulp_id", + ) + keyring_2 = Mock( + registry_type="fingerprint", + registry_id="2", + ulp_user="user_ulp_id", + ) + ufp.api.bootstrap.ulp_users.as_list = Mock(return_value=[ulp_user]) + ufp.api.bootstrap.keyrings.as_list = Mock(return_value=[keyring, keyring_2]) + + await init_entry(hass, ufp, [doorbell]) + + camera_entry = entity_registry.async_get("binary_sensor.test_camera_doorbell") + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_USER_KEYRING_INFO, + {ATTR_DEVICE_ID: camera_entry.device_id}, + blocking=True, + return_response=True, + ) + + assert response == { + "users": [ + { + KEYRINGS_USER_FULL_NAME: "Test User", + "keys": [ + { + KEYRINGS_KEY_TYPE: "nfc", + KEYRINGS_KEY_TYPE_ID_NFC: "123456", + }, + { + KEYRINGS_KEY_TYPE_ID_FINGERPRINT: "2", + KEYRINGS_KEY_TYPE: "fingerprint", + }, + ], + KEYRINGS_USER_STATUS: "active", + KEYRINGS_ULP_ID: "user_ulp_id", + }, + ], + } From 344a03d9ce060e240552fc358912e0d3b6e0642c Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sun, 22 Dec 2024 19:55:45 +0100 Subject: [PATCH 1021/1198] Remove unused fixture from LCN tests (#133821) --- tests/components/lcn/conftest.py | 10 -- tests/components/lcn/fixtures/config.json | 165 ---------------------- 2 files changed, 175 deletions(-) delete mode 100644 tests/components/lcn/fixtures/config.json diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 3c5979c3c36..d8dee472946 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -16,7 +16,6 @@ from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @@ -134,15 +133,6 @@ async def init_integration( return lcn_connection -async def setup_component(hass: HomeAssistant) -> None: - """Set up the LCN component.""" - fixture_filename = "lcn/config.json" - config_data = json.loads(load_fixture(fixture_filename)) - - await async_setup_component(hass, DOMAIN, config_data) - await hass.async_block_till_done() - - def get_device( hass: HomeAssistant, entry: MockConfigEntry, address: AddressType ) -> dr.DeviceEntry: diff --git a/tests/components/lcn/fixtures/config.json b/tests/components/lcn/fixtures/config.json deleted file mode 100644 index ed3e3500900..00000000000 --- a/tests/components/lcn/fixtures/config.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "lcn": { - "connections": [ - { - "host": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "steps200", - "name": "pchk" - }, - { - "name": "myhome", - "host": "192.168.2.42", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "steps200" - } - ], - "lights": [ - { - "name": "Light_Output1", - "address": "pchk.s0.m7", - "output": "output1", - "dimmable": true, - "transition": 5 - }, - { - "name": "Light_Output2", - "address": "pchk.s0.m7", - "output": "output2", - "dimmable": false, - "transition": 0 - }, - { - "name": "Light_Relay1", - "address": "s0.m7", - "output": "relay1" - }, - { - "name": "Light_Relay3", - "address": "myhome.s0.m7", - "output": "relay3" - }, - { - "name": "Light_Relay4", - "address": "myhome.s0.m7", - "output": "relay4" - } - ], - "switches": [ - { - "name": "Switch_Output1", - "address": "s0.m7", - "output": "output1" - }, - { - "name": "Switch_Output2", - "address": "s0.m7", - "output": "output2" - }, - { - "name": "Switch_Relay1", - "address": "s0.m7", - "output": "relay1" - }, - { - "name": "Switch_Relay2", - "address": "s0.m7", - "output": "relay2" - }, - { - "name": "Switch_Group5", - "address": "s0.g5", - "output": "relay1" - } - ], - "covers": [ - { - "name": "Cover_Ouputs", - "address": "s0.m7", - "motor": "outputs", - "reverse_time": "rt1200" - }, - { - "name": "Cover_Relays", - "address": "s0.m7", - "motor": "motor1" - } - ], - "climates": [ - { - "name": "Climate1", - "address": "s0.m7", - "source": "var1", - "setpoint": "r1varsetpoint", - "lockable": true, - "min_temp": 0, - "max_temp": 40, - "unit_of_measurement": "°C" - } - ], - "scenes": [ - { - "name": "Romantic", - "address": "s0.m7", - "register": 0, - "scene": 0, - "outputs": ["output1", "output2", "relay1"] - }, - { - "name": "Romantic Transition", - "address": "s0.m7", - "register": 0, - "scene": 1, - "outputs": ["output1", "output2", "relay1"], - "transition": 10 - } - ], - "binary_sensors": [ - { - "name": "Sensor_LockRegulator1", - "address": "s0.m7", - "source": "r1varsetpoint" - }, - { - "name": "Binary_Sensor1", - "address": "s0.m7", - "source": "binsensor1" - }, - { - "name": "Sensor_KeyLock", - "address": "s0.m7", - "source": "a5" - } - ], - "sensors": [ - { - "name": "Sensor_Var1", - "address": "s0.m7", - "source": "var1", - "unit_of_measurement": "°C" - }, - { - "name": "Sensor_Setpoint1", - "address": "s0.m7", - "source": "r1varsetpoint", - "unit_of_measurement": "°C" - }, - { - "name": "Sensor_Led6", - "address": "s0.m7", - "source": "led6" - }, - { - "name": "Sensor_LogicOp1", - "address": "s0.m7", - "source": "logicop1" - } - ] - } -} From 475f19c1409f0e07e40a88b27e5545cf8e9d90a1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 08:56:09 -1000 Subject: [PATCH 1022/1198] Bump flux_led to 1.1.0 (#133818) --- homeassistant/components/flux_led/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/flux_led/manifest.json b/homeassistant/components/flux_led/manifest.json index a55ae028342..962098a0bf8 100644 --- a/homeassistant/components/flux_led/manifest.json +++ b/homeassistant/components/flux_led/manifest.json @@ -53,5 +53,5 @@ "documentation": "https://www.home-assistant.io/integrations/flux_led", "iot_class": "local_push", "loggers": ["flux_led"], - "requirements": ["flux-led==1.0.4"] + "requirements": ["flux-led==1.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2381e18a42d..321f104fa1f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -924,7 +924,7 @@ flexit_bacnet==2.2.1 flipr-api==1.6.1 # homeassistant.components.flux_led -flux-led==1.0.4 +flux-led==1.1.0 # homeassistant.components.homekit # homeassistant.components.recorder diff --git a/requirements_test_all.txt b/requirements_test_all.txt index db166894f00..58004e5bd8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -783,7 +783,7 @@ flexit_bacnet==2.2.1 flipr-api==1.6.1 # homeassistant.components.flux_led -flux-led==1.0.4 +flux-led==1.1.0 # homeassistant.components.homekit # homeassistant.components.recorder From b1f6563fb2c3bfe6d75a4c2b26af102b6b7069ae Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 18:56:33 +0000 Subject: [PATCH 1023/1198] Add camera platform to tplink integration (#129180) Co-authored-by: Teemu R. --- homeassistant/components/tplink/__init__.py | 13 +- homeassistant/components/tplink/camera.py | 220 +++++ .../components/tplink/config_flow.py | 257 ++++- homeassistant/components/tplink/const.py | 3 + homeassistant/components/tplink/entity.py | 14 + homeassistant/components/tplink/manifest.json | 2 +- homeassistant/components/tplink/models.py | 4 + homeassistant/components/tplink/strings.json | 27 +- tests/components/tplink/__init__.py | 71 +- tests/components/tplink/conftest.py | 94 +- .../components/tplink/fixtures/features.json | 30 + .../tplink/snapshots/test_camera.ambr | 87 ++ tests/components/tplink/test_camera.py | 431 +++++++++ tests/components/tplink/test_config_flow.py | 899 ++++++++++++++++-- tests/components/tplink/test_init.py | 10 +- 15 files changed, 2012 insertions(+), 150 deletions(-) create mode 100644 homeassistant/components/tplink/camera.py create mode 100644 tests/components/tplink/snapshots/test_camera.ambr create mode 100644 tests/components/tplink/test_camera.py diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index a7ffce686be..e2a2f99517f 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -47,10 +47,12 @@ from homeassistant.helpers.typing import ConfigType from .const import ( CONF_AES_KEYS, + CONF_CAMERA_CREDENTIALS, CONF_CONFIG_ENTRY_MINOR_VERSION, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, + CONF_LIVE_VIEW, CONF_USES_HTTP, CONNECT_TIMEOUT, DISCOVERY_TIMEOUT, @@ -226,7 +228,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo for child in device.children ] - entry.runtime_data = TPLinkData(parent_coordinator, child_coordinators) + camera_creds: Credentials | None = None + if camera_creds_dict := entry.data.get(CONF_CAMERA_CREDENTIALS): + camera_creds = Credentials( + camera_creds_dict[CONF_USERNAME], camera_creds_dict[CONF_PASSWORD] + ) + live_view = entry.data.get(CONF_LIVE_VIEW) + + entry.runtime_data = TPLinkData( + parent_coordinator, child_coordinators, camera_creds, live_view + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py new file mode 100644 index 00000000000..5ed279909d6 --- /dev/null +++ b/homeassistant/components/tplink/camera.py @@ -0,0 +1,220 @@ +"""Support for TPLink camera entities.""" + +import asyncio +from dataclasses import dataclass +import logging +import time + +from aiohttp import web +from haffmpeg.camera import CameraMjpeg +from kasa import Credentials, Device, Module +from kasa.smartcam.modules import Camera as CameraModule + +from homeassistant.components import ffmpeg, stream +from homeassistant.components.camera import ( + Camera, + CameraEntityDescription, + CameraEntityFeature, +) +from homeassistant.config_entries import ConfigFlowContext +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TPLinkConfigEntry, legacy_device_id +from .const import CONF_CAMERA_CREDENTIALS +from .coordinator import TPLinkDataUpdateCoordinator +from .entity import CoordinatedTPLinkEntity, TPLinkModuleEntityDescription + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class TPLinkCameraEntityDescription( + CameraEntityDescription, TPLinkModuleEntityDescription +): + """Base class for camera entity description.""" + + +CAMERA_DESCRIPTIONS: tuple[TPLinkCameraEntityDescription, ...] = ( + TPLinkCameraEntityDescription( + key="live_view", + translation_key="live_view", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: TPLinkConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up camera entities.""" + data = config_entry.runtime_data + parent_coordinator = data.parent_coordinator + device = parent_coordinator.device + camera_credentials = data.camera_credentials + live_view = data.live_view + ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) + + async_add_entities( + TPLinkCameraEntity( + device, + parent_coordinator, + description, + camera_module=camera_module, + parent=None, + ffmpeg_manager=ffmpeg_manager, + camera_credentials=camera_credentials, + ) + for description in CAMERA_DESCRIPTIONS + if (camera_module := device.modules.get(Module.Camera)) and live_view + ) + + +class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): + """Representation of a TPLink camera.""" + + IMAGE_INTERVAL = 5 * 60 + + _attr_supported_features = CameraEntityFeature.STREAM | CameraEntityFeature.ON_OFF + + entity_description: TPLinkCameraEntityDescription + + def __init__( + self, + device: Device, + coordinator: TPLinkDataUpdateCoordinator, + description: TPLinkCameraEntityDescription, + *, + camera_module: CameraModule, + parent: Device | None = None, + ffmpeg_manager: ffmpeg.FFmpegManager, + camera_credentials: Credentials | None, + ) -> None: + """Initialize a TPlink camera.""" + self.entity_description = description + self._camera_module = camera_module + self._video_url = camera_module.stream_rtsp_url(camera_credentials) + self._image: bytes | None = None + super().__init__(device, coordinator, parent=parent) + Camera.__init__(self) + self._ffmpeg_manager = ffmpeg_manager + self._image_lock = asyncio.Lock() + self._last_update: float = 0 + self._camera_credentials = camera_credentials + self._can_stream = True + self._http_mpeg_stream_running = False + + def _get_unique_id(self) -> str: + """Return unique ID for the entity.""" + return f"{legacy_device_id(self._device)}-{self.entity_description}" + + @callback + def _async_update_attrs(self) -> None: + """Update the entity's attributes.""" + self._attr_is_on = self._camera_module.is_on + + async def stream_source(self) -> str | None: + """Return the source of the stream.""" + return self._video_url + + async def _async_check_stream_auth(self, video_url: str) -> None: + """Check for an auth error and start reauth flow.""" + try: + await stream.async_check_stream_client_error(self.hass, video_url) + except stream.StreamOpenClientError as ex: + if ex.stream_client_error is stream.StreamClientError.Unauthorized: + _LOGGER.debug( + "Camera stream failed authentication for %s", + self._device.host, + ) + self._can_stream = False + self.coordinator.config_entry.async_start_reauth( + self.hass, + ConfigFlowContext( + reauth_source=CONF_CAMERA_CREDENTIALS, # type: ignore[typeddict-unknown-key] + ), + {"device": self._device}, + ) + + async def async_camera_image( + self, width: int | None = None, height: int | None = None + ) -> bytes | None: + """Return a still image response from the camera.""" + now = time.monotonic() + + if self._image and now - self._last_update < self.IMAGE_INTERVAL: + return self._image + + # Don't try to capture a new image if a stream is running + if (self.stream and self.stream.available) or self._http_mpeg_stream_running: + return self._image + + if self._can_stream and (video_url := self._video_url): + # Sometimes the front end makes multiple image requests + async with self._image_lock: + if self._image and (now - self._last_update) < self.IMAGE_INTERVAL: + return self._image + + _LOGGER.debug("Updating camera image for %s", self._device.host) + image = await ffmpeg.async_get_image( + self.hass, + video_url, + width=width, + height=height, + ) + if image: + self._image = image + self._last_update = now + _LOGGER.debug("Updated camera image for %s", self._device.host) + # This coroutine is called by camera with an asyncio.timeout + # so image could be None whereas an auth issue returns b'' + elif image == b"": + _LOGGER.debug( + "Empty camera image returned for %s", self._device.host + ) + # image could be empty if a stream is running so check for explicit auth error + await self._async_check_stream_auth(video_url) + else: + _LOGGER.debug( + "None camera image returned for %s", self._device.host + ) + + return self._image + + async def handle_async_mjpeg_stream( + self, request: web.Request + ) -> web.StreamResponse | None: + """Generate an HTTP MJPEG stream from the camera. + + The frontend falls back to calling this method if the HLS + stream fails. + """ + _LOGGER.debug("Starting http mjpeg stream for %s", self._device.host) + if self._video_url is None or self._can_stream is False: + return None + + mjpeg_stream = CameraMjpeg(self._ffmpeg_manager.binary) + await mjpeg_stream.open_camera(self._video_url) + self._http_mpeg_stream_running = True + try: + stream_reader = await mjpeg_stream.get_reader() + return await async_aiohttp_proxy_stream( + self.hass, + request, + stream_reader, + self._ffmpeg_manager.ffmpeg_stream_content_type, + ) + finally: + self._http_mpeg_stream_running = False + await mjpeg_stream.close() + _LOGGER.debug("Stopped http mjpeg stream for %s", self._device.host) + + async def async_turn_on(self) -> None: + """Turn on camera.""" + await self._camera_module.set_state(True) + + async def async_turn_off(self) -> None: + """Turn off camera.""" + await self._camera_module.set_state(False) diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index 63f1b4e125b..db6f9a58ba5 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any, Self +from typing import TYPE_CHECKING, Any, Self, cast from kasa import ( AuthenticationError, @@ -13,13 +13,15 @@ from kasa import ( DeviceConfig, Discover, KasaException, + Module, TimeoutError, ) import voluptuous as vol -from homeassistant.components import dhcp +from homeassistant.components import dhcp, ffmpeg, stream from homeassistant.config_entries import ( SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigEntry, ConfigEntryState, ConfigFlow, @@ -31,6 +33,7 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_MODEL, + CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -48,9 +51,11 @@ from . import ( ) from .const import ( CONF_AES_KEYS, + CONF_CAMERA_CREDENTIALS, CONF_CONFIG_ENTRY_MINOR_VERSION, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, + CONF_LIVE_VIEW, CONF_USES_HTTP, CONNECT_TIMEOUT, DOMAIN, @@ -62,6 +67,16 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) +STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + +STEP_CAMERA_AUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_LIVE_VIEW): bool, + vol.Optional(CONF_USERNAME): str, + vol.Optional(CONF_PASSWORD): str, + } +) + class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for tplink.""" @@ -227,7 +242,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.hass.async_create_task( self._async_reload_requires_auth_entries(), eager_start=False ) - return self._async_create_entry_from_device(self._discovered_device) + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device( + self._discovered_device + ) self.context["title_placeholders"] = placeholders return self.async_show_form( @@ -253,7 +273,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Confirm discovery.""" assert self._discovered_device is not None if user_input is not None: - return self._async_create_entry_from_device(self._discovered_device) + if self._async_supports_camera_credentials(self._discovered_device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device( + self._discovered_device + ) self._set_confirm_only() placeholders = self._async_make_placeholders_from_discovery() @@ -282,6 +307,13 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return host, port + def _async_supports_camera_credentials(self, device: Device) -> bool: + """Return True if device could have separate camera credentials.""" + if camera_module := device.modules.get(Module.Camera): + self._discovered_device = device + return bool(camera_module.stream_rtsp_url()) + return False + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -324,7 +356,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): else: if not device: return await self.async_step_user_auth_confirm() - return self._async_create_entry_from_device(device) + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) return self.async_show_form( step_id="user", @@ -375,7 +411,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.hass.async_create_task( self._async_reload_requires_auth_entries(), eager_start=False ) - return self._async_create_entry_from_device(device) + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) return self.async_show_form( step_id="user_auth_confirm", @@ -384,6 +423,104 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders=placeholders, ) + def _create_camera_entry( + self, device: Device, un: str, pw: str + ) -> ConfigFlowResult: + entry_data: dict[str, bool | dict[str, str]] = {CONF_LIVE_VIEW: True} + entry_data[CONF_CAMERA_CREDENTIALS] = { + CONF_USERNAME: un, + CONF_PASSWORD: pw, + } + _LOGGER.debug("Creating camera account entry for device %s", device.host) + return self._async_create_or_update_entry_from_device( + device, camera_data=entry_data + ) + + async def async_step_camera_auth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that gives the user option to set camera credentials.""" + errors: dict[str, str] = {} + placeholders: dict[str, str] = {} + device = self._discovered_device + assert device + + if user_input: + live_view = user_input[CONF_LIVE_VIEW] + if not live_view: + return self._async_create_or_update_entry_from_device( + device, camera_data={CONF_LIVE_VIEW: False} + ) + + un = user_input.get(CONF_USERNAME) + pw = user_input.get(CONF_PASSWORD) + + if user_input and un and pw: + camera_creds = Credentials(un, cast(str, pw)) + + camera_module = device.modules[Module.Camera] + rtsp_url = camera_module.stream_rtsp_url(camera_creds) + assert rtsp_url + + # If camera fails to create HLS stream via 'stream' then try + # ffmpeg.async_get_image as some cameras do not work with HLS + # and the frontend will fallback to mpeg on error + try: + await stream.async_check_stream_client_error(self.hass, rtsp_url) + except stream.StreamOpenClientError as ex: + if ex.stream_client_error is stream.StreamClientError.Unauthorized: + errors["base"] = "invalid_camera_auth" + else: + _LOGGER.debug( + "Device %s client error checking stream: %s", device.host, ex + ) + if await ffmpeg.async_get_image(self.hass, rtsp_url): + return self._create_camera_entry(device, un, pw) + + errors["base"] = "cannot_connect_camera" + placeholders["error"] = str(ex) + except Exception as ex: # noqa: BLE001 + _LOGGER.debug("Device %s error checking stream: %s", device.host, ex) + if await ffmpeg.async_get_image(self.hass, rtsp_url): + return self._create_camera_entry(device, un, pw) + + errors["base"] = "cannot_connect_camera" + placeholders["error"] = str(ex) + else: + return self._create_camera_entry(device, un, pw) + + elif user_input: + errors["base"] = "camera_creds" + + entry = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + elif self.source == SOURCE_REAUTH: + entry = self._get_reauth_entry() + + if entry: + placeholders[CONF_NAME] = entry.data[CONF_ALIAS] + placeholders[CONF_MODEL] = entry.data[CONF_MODEL] + placeholders[CONF_HOST] = entry.data[CONF_HOST] + + if user_input: + form_data = {**user_input} + elif entry: + form_data = {**entry.data.get(CONF_CAMERA_CREDENTIALS, {})} + form_data[CONF_LIVE_VIEW] = entry.data.get(CONF_LIVE_VIEW, False) + else: + form_data = {} + + self.context["title_placeholders"] = placeholders + return self.async_show_form( + step_id="camera_auth_confirm", + data_schema=self.add_suggested_values_to_schema( + STEP_CAMERA_AUTH_DATA_SCHEMA, form_data + ), + errors=errors, + description_placeholders=placeholders, + ) + async def async_step_pick_device( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -403,7 +540,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user_auth_confirm() except KasaException: return self.async_abort(reason="cannot_connect") - return self._async_create_entry_from_device(device) + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) configured_devices = { entry.unique_id for entry in self._async_current_entries() @@ -444,11 +585,19 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): _config_entries.flow.async_abort(flow["flow_id"]) @callback - def _async_create_entry_from_device(self, device: Device) -> ConfigFlowResult: + def _async_create_or_update_entry_from_device( + self, device: Device, *, camera_data: dict | None = None + ) -> ConfigFlowResult: """Create a config entry from a smart device.""" - # This is only ever called after a successful device update so we know that - # the credential_hash is correct and should be saved. - self._abort_if_unique_id_configured(updates={CONF_HOST: device.host}) + entry = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + elif self.source == SOURCE_REAUTH: + entry = self._get_reauth_entry() + + if not entry: + self._abort_if_unique_id_configured(updates={CONF_HOST: device.host}) + data: dict[str, Any] = { CONF_HOST: device.host, CONF_ALIAS: device.alias, @@ -456,16 +605,28 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): CONF_CONNECTION_PARAMETERS: device.config.connection_type.to_dict(), CONF_USES_HTTP: device.config.uses_http, } + if camera_data is not None: + data[CONF_LIVE_VIEW] = camera_data[CONF_LIVE_VIEW] + if camera_creds := camera_data.get(CONF_CAMERA_CREDENTIALS): + data[CONF_CAMERA_CREDENTIALS] = camera_creds + if device.config.aes_keys: data[CONF_AES_KEYS] = device.config.aes_keys + + # This is only ever called after a successful device update so we know that + # the credential_hash is correct and should be saved. if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash if port := device.config.port_override: data[CONF_PORT] = port - return self.async_create_entry( - title=f"{device.alias} {device.model}", - data=data, - ) + + if not entry: + return self.async_create_entry( + title=f"{device.alias} {device.model}", + data=data, + ) + + return self.async_update_reload_and_abort(entry, data=data) async def _async_try_connect_all( self, @@ -546,7 +707,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials: Credentials | None, ) -> Device: """Try to connect.""" - self._async_abort_entries_match({CONF_HOST: discovered_device.host}) + if self.source not in {SOURCE_RECONFIGURE, SOURCE_REAUTH}: + self._async_abort_entries_match({CONF_HOST: discovered_device.host}) config = discovered_device.config if credentials: @@ -566,6 +728,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Start the reauthentication flow if the device needs updated credentials.""" + if self.context.get("reauth_source") == CONF_CAMERA_CREDENTIALS: + self._discovered_device = entry_data["device"] + return await self.async_step_camera_auth_confirm() + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -634,3 +800,62 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, description_placeholders=placeholders, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Trigger a reconfiguration flow.""" + errors: dict[str, str] = {} + placeholders: dict[str, str] = {} + + reconfigure_entry = self._get_reconfigure_entry() + assert reconfigure_entry.unique_id + await self.async_set_unique_id(reconfigure_entry.unique_id) + + host = reconfigure_entry.data[CONF_HOST] + port = reconfigure_entry.data.get(CONF_PORT) + + if user_input is not None: + host, port = self._async_get_host_port(host) + + self.host = host + credentials = await get_credentials(self.hass) + try: + device = await self._async_try_discover_and_update( + host, + credentials, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, + ) + except AuthenticationError: # Error from the update() + return await self.async_step_user_auth_confirm() + except KasaException as ex: + errors["base"] = "cannot_connect" + placeholders["error"] = str(ex) + else: + if not device: + return await self.async_step_user_auth_confirm() + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + STEP_RECONFIGURE_DATA_SCHEMA, + {CONF_HOST: f"{host}:{port}" if port else host}, + ), + errors=errors, + description_placeholders={ + **placeholders, + CONF_MAC: reconfigure_entry.unique_id, + }, + ) diff --git a/homeassistant/components/tplink/const.py b/homeassistant/components/tplink/const.py index 28e4b04bcf9..61c1bf1cb9b 100644 --- a/homeassistant/components/tplink/const.py +++ b/homeassistant/components/tplink/const.py @@ -24,12 +24,15 @@ CONF_CREDENTIALS_HASH: Final = "credentials_hash" CONF_CONNECTION_PARAMETERS: Final = "connection_parameters" CONF_USES_HTTP: Final = "uses_http" CONF_AES_KEYS: Final = "aes_keys" +CONF_CAMERA_CREDENTIALS = "camera_credentials" +CONF_LIVE_VIEW = "live_view" CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 5 PLATFORMS: Final = [ Platform.BINARY_SENSOR, Platform.BUTTON, + Platform.CAMERA, Platform.CLIMATE, Platform.FAN, Platform.LIGHT, diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index ef9e2ad5eee..60d066012a2 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -73,6 +73,13 @@ EXCLUDED_FEATURES = { "check_latest_firmware", # siren "alarm", + # camera + "pan_left", + "pan_right", + "pan_step", + "tilt_up", + "tilt_down", + "tilt_step", } @@ -91,6 +98,13 @@ class TPLinkFeatureEntityDescription(EntityDescription): deprecated_info: DeprecatedInfo | None = None +@dataclass(frozen=True, kw_only=True) +class TPLinkModuleEntityDescription(EntityDescription): + """Base class for a TPLink module based entity description.""" + + deprecated_info: DeprecatedInfo | None = None + + def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P]( func: Callable[Concatenate[_T, _P], Awaitable[None]], ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 65061882027..7797f0a36a3 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -3,7 +3,7 @@ "name": "TP-Link Smart Home", "codeowners": ["@rytilahti", "@bdraco", "@sdb9696"], "config_flow": true, - "dependencies": ["network"], + "dependencies": ["network", "ffmpeg", "stream"], "dhcp": [ { "registered_devices": true diff --git a/homeassistant/components/tplink/models.py b/homeassistant/components/tplink/models.py index ced58d3d21f..389260a388b 100644 --- a/homeassistant/components/tplink/models.py +++ b/homeassistant/components/tplink/models.py @@ -4,6 +4,8 @@ from __future__ import annotations from dataclasses import dataclass +from kasa import Credentials + from .coordinator import TPLinkDataUpdateCoordinator @@ -13,3 +15,5 @@ class TPLinkData: parent_coordinator: TPLinkDataUpdateCoordinator children_coordinators: list[TPLinkDataUpdateCoordinator] + camera_credentials: Credentials | None + live_view: bool | None diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 8e5118c2720..7443636c3c0 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -42,16 +42,36 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure": { + "title": "Reconfigure TPLink entry", + "description": "Update your configuration for device {mac}", + "data": { + "host": "[%key:common::config_flow::data::host%]" + } + }, + "camera_auth_confirm": { + "title": "Set camera account credentials", + "description": "Input device camera account credentials. Leave blank if they are the same as your TPLink cloud credentials.", + "data": { + "live_view": "Enable camera live view", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { "cannot_connect": "Connection error: {error}", - "invalid_auth": "Invalid authentication: {error}" + "invalid_auth": "Unable to authenticate: {error}", + "invalid_camera_auth": "Camera stream authentication failed", + "cannot_connect_camera": "Unable to access the camera stream, verify that you have set up the camera account: {error}", + "camera_creds": "You have to set both username and password" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, @@ -102,6 +122,11 @@ "name": "Stop alarm" } }, + "camera": { + "live_view": { + "name": "Live view" + } + }, "select": { "light_preset": { "name": "Light preset" diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index fdef5c35bfa..e322cf9f5de 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -1,6 +1,7 @@ """Tests for the TP-Link component.""" from collections import namedtuple +from dataclasses import replace from datetime import datetime from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -19,15 +20,18 @@ from kasa import ( ) from kasa.interfaces import Fan, Light, LightEffect, LightState from kasa.smart.modules.alarm import Alarm +from kasa.smartcam.modules.camera import LOCAL_STREAMING_PORT, Camera from syrupy import SnapshotAssertion from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.tplink import ( CONF_AES_KEYS, CONF_ALIAS, + CONF_CAMERA_CREDENTIALS, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_HOST, + CONF_LIVE_VIEW, CONF_MODEL, CONF_USES_HTTP, Credentials, @@ -49,14 +53,19 @@ MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" IP_ADDRESS = "127.0.0.1" IP_ADDRESS2 = "127.0.0.2" +IP_ADDRESS3 = "127.0.0.3" ALIAS = "My Bulb" +ALIAS_CAMERA = "My Camera" MODEL = "HS100" +MODEL_CAMERA = "C210" MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" DEVICE_ID = "123456789ABCDEFGH" DEVICE_ID_MAC = "AA:BB:CC:DD:EE:FF" DHCP_FORMATTED_MAC_ADDRESS = MAC_ADDRESS.replace(":", "") MAC_ADDRESS2 = "11:22:33:44:55:66" +MAC_ADDRESS3 = "66:55:44:33:22:11" DEFAULT_ENTRY_TITLE = f"{ALIAS} {MODEL}" +DEFAULT_ENTRY_TITLE_CAMERA = f"{ALIAS_CAMERA} {MODEL_CAMERA}" CREDENTIALS_HASH_LEGACY = "" CONN_PARAMS_LEGACY = DeviceConnectionParameters( DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Xor @@ -80,7 +89,26 @@ DEVICE_CONFIG_KLAP = DeviceConfig( CONN_PARAMS_AES = DeviceConnectionParameters( DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes ) -AES_KEYS = {"private": "foo", "public": "bar"} +_test_privkey = ( + "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKLJKmBWGj6WYo9sewI8vkqar" + "Ed5H1JUr8Jj/LEWLTtV6+Mm4mfyEk6YKFHSmIG4AGgrVsGK/EbEkTZk9CwtixNQpBVc36oN2R" + "vuWWV38YnP4vI63mNxTA/gQonCsahjN4HfwE87pM7O5z39aeunoYm6Be663t33DbJH1ZUbZjm" + "tAgMBAAECgYB1Bn1KaFvRprcQOIJt51E9vNghQbf8rhj0fIEKpdC6mVhNIoUdCO+URNqnh+hP" + "SQIx4QYreUlHbsSeABFxOQSDJm6/kqyQsp59nCVDo/bXTtlvcSJ/sU3riqJNxYqEU1iJ0xMvU" + "N1VKKTmik89J8e5sN9R0AFfUSJIk7MpdOoD2QJBANTbV27nenyvbqee/ul4frdt2rrPGcGpcV" + "QmY87qbbrZgqgL5LMHHD7T/v/I8D1wRog1sBz/AiZGcnv/ox8dHKsCQQDDx8DCGPySSVqKVua" + "yUkBNpglN83wiCXZjyEtWIt+aB1A2n5ektE/o8oHnnOuvMdooxvtid7Mdapi2VLHV7VMHAkAE" + "d0GjWwnv2cJpk+VnQpbuBEkFiFjS/loZWODZM4Pv2qZqHi3DL9AA5XPBLBcWQufH7dBvG06RP" + "QMj5N4oRfUXAkEAuJJkVliqHNvM4OkGewzyFII4+WVYHNqg43dcFuuvtA27AJQ6qYtYXrvp3k" + "phI3yzOIhHTNCea1goepSkR5ODFwJBAJCTRbB+P47aEr/xA51ZFHE6VefDBJG9yg6yK4jcOxg" + "5ficXEpx8442okNtlzwa+QHpm/L3JOFrHwiEeVqXtiqY=" +) +_test_pubkey = ( + "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCiySpgVho+lmKPbHsCPL5KmqxHeR9SVK/CY" + "/yxFi07VevjJuJn8hJOmChR0piBuABoK1bBivxGxJE2ZPQsLYsTUKQVXN+qDdkb7llld/GJz+" + "LyOt5jcUwP4EKJwrGoYzeB38BPO6TOzuc9/Wnrp6GJugXuut7d9w2yR9WVG2Y5rQIDAQAB" +) +AES_KEYS = {"private": _test_privkey, "public": _test_pubkey} DEVICE_CONFIG_AES = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, @@ -88,6 +116,16 @@ DEVICE_CONFIG_AES = DeviceConfig( uses_http=True, aes_keys=AES_KEYS, ) +CONN_PARAMS_AES_CAMERA = DeviceConnectionParameters( + DeviceFamily.SmartIpCamera, DeviceEncryptionType.Aes, https=True, login_version=2 +) +DEVICE_CONFIG_AES_CAMERA = DeviceConfig( + IP_ADDRESS3, + credentials=CREDENTIALS, + connection_type=CONN_PARAMS_AES_CAMERA, + uses_http=True, +) + DEVICE_CONFIG_DICT_KLAP = { k: v for k, v in DEVICE_CONFIG_KLAP.to_dict().items() if k != "credentials" } @@ -119,6 +157,22 @@ CREATE_ENTRY_DATA_AES = { CONF_USES_HTTP: True, CONF_AES_KEYS: AES_KEYS, } +CREATE_ENTRY_DATA_AES_CAMERA = { + CONF_HOST: IP_ADDRESS3, + CONF_ALIAS: ALIAS_CAMERA, + CONF_MODEL: MODEL_CAMERA, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, + CONF_CONNECTION_PARAMETERS: CONN_PARAMS_AES_CAMERA.to_dict(), + CONF_USES_HTTP: True, + CONF_LIVE_VIEW: True, + CONF_CAMERA_CREDENTIALS: {"username": "camuser", "password": "campass"}, +} +SMALLEST_VALID_JPEG = ( + "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" + "6050609080a0a090809090a0c0f0c0a0b0e0b09090d110d0e0f101011100a0c12131210130f101010ffc9000b08000100" + "0101011100ffcc000600101005ffda0008010100003f00d2cf20ffd9" +) +SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) def _load_feature_fixtures(): @@ -245,6 +299,9 @@ def _mocked_device( device.modules = {} device.features = {} + # replace device_config to prevent changes affecting between tests + device_config = replace(device_config) + if not ip_address: ip_address = IP_ADDRESS else: @@ -429,6 +486,17 @@ def _mocked_alarm_module(device): return alarm +def _mocked_camera_module(device): + camera = MagicMock(auto_spec=Camera, name="Mocked camera") + camera.is_on = True + camera.set_state = AsyncMock() + camera.stream_rtsp_url.return_value = ( + f"rtsp://user:pass@{device.host}:{LOCAL_STREAMING_PORT}/stream1" + ) + + return camera + + def _mocked_strip_children(features=None, alias=None) -> list[Device]: plug0 = _mocked_device( alias="Plug0" if alias is None else alias, @@ -496,6 +564,7 @@ MODULE_TO_MOCK_GEN = { Module.LightEffect: _mocked_light_effect_module, Module.Fan: _mocked_fan_module, Module.Alarm: _mocked_alarm_module, + Module.Camera: _mocked_camera_module, } diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 25a4bd20270..f1bbb80b80c 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,30 +1,73 @@ """tplink conftest.""" from collections.abc import Generator +from contextlib import contextmanager from unittest.mock import DEFAULT, AsyncMock, patch -from kasa import DeviceConfig +from kasa import DeviceConfig, Module import pytest from homeassistant.components.tplink import DOMAIN from homeassistant.core import HomeAssistant from . import ( + ALIAS_CAMERA, + CREATE_ENTRY_DATA_AES_CAMERA, CREATE_ENTRY_DATA_LEGACY, CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEVICE_CONFIG_AES, + DEVICE_CONFIG_AES_CAMERA, DEVICE_CONFIG_KLAP, IP_ADDRESS, IP_ADDRESS2, + IP_ADDRESS3, MAC_ADDRESS, MAC_ADDRESS2, + MAC_ADDRESS3, + MODEL_CAMERA, _mocked_device, ) from tests.common import MockConfigEntry +@contextmanager +def override_side_effect(mock: AsyncMock, effect): + """Temporarily override a mock side effect and replace afterwards.""" + try: + default_side_effect = mock.side_effect + mock.side_effect = effect + yield mock + finally: + mock.side_effect = default_side_effect + + +def _get_mock_devices(): + return { + IP_ADDRESS: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), + credentials_hash=CREDENTIALS_HASH_KLAP, + ip_address=IP_ADDRESS, + ), + IP_ADDRESS2: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), + credentials_hash=CREDENTIALS_HASH_AES, + mac=MAC_ADDRESS2, + ip_address=IP_ADDRESS2, + ), + IP_ADDRESS3: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES_CAMERA.to_dict()), + credentials_hash=CREDENTIALS_HASH_AES, + mac=MAC_ADDRESS3, + ip_address=IP_ADDRESS3, + modules=[Module.Camera], + alias=ALIAS_CAMERA, + model=MODEL_CAMERA, + ), + } + + @pytest.fixture def mock_discovery(): """Mock python-kasa discovery.""" @@ -34,22 +77,15 @@ def mock_discovery(): discover_single=DEFAULT, try_connect_all=DEFAULT, ) as mock_discovery: - device = _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - alias="My Bulb", - ) - devices = { - "127.0.0.1": _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - alias=None, - ) - } + devices = _get_mock_devices() + + def get_device(host, **kwargs): + return devices[host] + mock_discovery["discover"].return_value = devices - mock_discovery["discover_single"].return_value = device - mock_discovery["try_connect_all"].return_value = device - mock_discovery["mock_device"] = device + mock_discovery["discover_single"].side_effect = get_device + mock_discovery["try_connect_all"].side_effect = get_device + mock_discovery["mock_devices"] = devices yield mock_discovery @@ -57,22 +93,9 @@ def mock_discovery(): def mock_connect(): """Mock python-kasa connect.""" with patch("homeassistant.components.tplink.Device.connect") as mock_connect: - devices = { - IP_ADDRESS: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - ip_address=IP_ADDRESS, - ), - IP_ADDRESS2: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), - credentials_hash=CREDENTIALS_HASH_AES, - mac=MAC_ADDRESS2, - ip_address=IP_ADDRESS2, - ), - } + devices = _get_mock_devices() def get_device(config): - nonlocal devices return devices[config.host] mock_connect.side_effect = get_device @@ -117,6 +140,17 @@ def mock_config_entry() -> MockConfigEntry: ) +@pytest.fixture +def mock_camera_config_entry() -> MockConfigEntry: + """Mock camera ConfigEntry.""" + return MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_AES_CAMERA}, + unique_id=MAC_ADDRESS3, + ) + + @pytest.fixture async def mock_added_config_entry( hass: HomeAssistant, diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index d822bfc9b57..a54edf56c62 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -320,5 +320,35 @@ "type": "Sensor", "category": "Info", "value": "2024-06-24 10:03:11.046643+01:00" + }, + "pan_left": { + "value": "", + "type": "Action", + "category": "Config" + }, + "pan_right": { + "value": "", + "type": "Action", + "category": "Config" + }, + "pan_step": { + "value": 10, + "type": "Number", + "category": "Config" + }, + "tilt_up": { + "value": "", + "type": "Action", + "category": "Config" + }, + "tilt_down": { + "value": "", + "type": "Action", + "category": "Config" + }, + "tilt_step": { + "value": 10, + "type": "Number", + "category": "Config" } } diff --git a/tests/components/tplink/snapshots/test_camera.ambr b/tests/components/tplink/snapshots/test_camera.ambr new file mode 100644 index 00000000000..4ce1813d704 --- /dev/null +++ b/tests/components/tplink/snapshots/test_camera.ambr @@ -0,0 +1,87 @@ +# serializer version: 1 +# name: test_states[camera.my_camera_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.my_camera_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': "123456789ABCDEFGH-TPLinkCameraEntityDescription(key='live_view', device_class=None, entity_category=None, entity_registry_enabled_default=True, entity_registry_visible_default=True, force_update=False, icon=None, has_entity_name=False, name=, translation_key='live_view', translation_placeholders=None, unit_of_measurement=None, deprecated_info=None)", + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.my_camera_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'entity_picture': '/api/camera_proxy/camera.my_camera_live_view?token=1caab5c3b3', + 'friendly_name': 'my_camera Live view', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.my_camera_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[my_camera-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '66:55:44:33:22:11', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_camera', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/test_camera.py b/tests/components/tplink/test_camera.py new file mode 100644 index 00000000000..d8b0f82e32a --- /dev/null +++ b/tests/components/tplink/test_camera.py @@ -0,0 +1,431 @@ +"""The tests for the tplink camera platform.""" + +import asyncio +from unittest.mock import AsyncMock, patch + +from aiohttp.test_utils import make_mocked_request +from freezegun.api import FrozenDateTimeFactory +from kasa import Module +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import stream +from homeassistant.components.camera import ( + CameraEntityFeature, + StreamType, + async_get_image, + async_get_mjpeg_stream, + get_camera_from_entity_id, +) +from homeassistant.components.tplink.camera import TPLinkCameraEntity +from homeassistant.components.websocket_api import TYPE_RESULT +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import ( + IP_ADDRESS3, + MAC_ADDRESS3, + SMALLEST_VALID_JPEG_BYTES, + _mocked_device, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator + + +async def test_states( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test states.""" + mock_camera_config_entry.add_to_hass(hass) + + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + # Patch getrandbits so the access_token doesn't change on camera attributes + with patch("random.SystemRandom.getrandbits", return_value=123123123123): + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + await snapshot_platform( + hass, + entity_registry, + device_registry, + snapshot, + mock_camera_config_entry.entry_id, + ) + + +async def test_handle_mjpeg_stream( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handle_async_mjpeg_stream.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + assert stream is not None + + +async def test_handle_mjpeg_stream_not_supported( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handle_async_mjpeg_stream.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + mock_camera = mock_device.modules[Module.Camera] + + mock_camera.stream_rtsp_url.return_value = None + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + assert stream is None + + +async def test_camera_image( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ) as mock_get_image: + image = await async_get_image(hass, "camera.my_camera_live_view") + assert image + assert image.content == SMALLEST_VALID_JPEG_BYTES + mock_get_image.assert_called_once() + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_not_called() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # Test image returns None + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=None, + ) as mock_get_image: + msg = f"None camera image returned for {IP_ADDRESS3}" + assert msg not in caplog.text + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + assert msg in caplog.text + + +async def test_no_camera_image_when_streaming( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ) as mock_get_image: + await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + start_event = asyncio.Event() + finish_event = asyncio.Event() + + async def _waiter(*_, **__): + start_event.set() + await finish_event.wait() + + async def _get_stream(): + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + + mock_get_image.reset_mock() + with patch( + "homeassistant.components.tplink.camera.async_aiohttp_proxy_stream", + new=_waiter, + ): + task = asyncio.create_task(_get_stream()) + await start_event.wait() + await async_get_image(hass, "camera.my_camera_live_view") + finish_event.set() + await task + + mock_get_image.assert_not_called() + + +async def test_no_concurrent_camera_image( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + finish_event = asyncio.Event() + call_count = 0 + + async def _waiter(*_, **__): + nonlocal call_count + call_count += 1 + await finish_event.wait() + return SMALLEST_VALID_JPEG_BYTES + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + new=_waiter, + ): + tasks = asyncio.gather( + async_get_image(hass, "camera.my_camera_live_view"), + async_get_image(hass, "camera.my_camera_live_view"), + ) + # Sleep to give both tasks chance to get to th asyncio.Lock() + await asyncio.sleep(0) + finish_event.set() + results = await tasks + assert len(results) == 2 + assert all(img and img.content == SMALLEST_VALID_JPEG_BYTES for img in results) + assert call_count == 1 + + +async def test_camera_image_auth_error( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + with ( + patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=b"", + ), + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.Unauthorized + ), + ), + pytest.raises(HomeAssistantError), + ): + await async_get_image(hass, "camera.my_camera_live_view") + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + + assert result["step_id"] == "camera_auth_confirm" + + +async def test_camera_stream_source( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test async_get_image. + + This test would fail if the integration didn't properly + put stream in the dependencies. + """ + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/stream", "entity_id": "camera.my_camera_live_view"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert "url" in msg["result"] + + +async def test_camera_stream_attributes( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, +) -> None: + """Test stream attributes.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature.STREAM | CameraEntityFeature.ON_OFF + camera = get_camera_from_entity_id(hass, "camera.my_camera_live_view") + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + +async def test_camera_turn_on_off( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, +) -> None: + """Test camera turn on and off.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + mock_camera = mock_device.modules[Module.Camera] + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + await hass.services.async_call( + "camera", + "turn_on", + {"entity_id": "camera.my_camera_live_view"}, + blocking=True, + ) + mock_camera.set_state.assert_called_with(True) + + await hass.services.async_call( + "camera", + "turn_off", + {"entity_id": "camera.my_camera_live_view"}, + blocking=True, + ) + mock_camera.set_state.assert_called_with(False) diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 2697696c667..980fd0a3f51 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,14 +1,13 @@ """Test the tplink config flow.""" -from contextlib import contextmanager import logging from unittest.mock import ANY, AsyncMock, patch -from kasa import TimeoutError +from kasa import Module, TimeoutError import pytest from homeassistant import config_entries -from homeassistant.components import dhcp +from homeassistant.components import dhcp, stream from homeassistant.components.tplink import ( DOMAIN, AuthenticationError, @@ -19,9 +18,11 @@ from homeassistant.components.tplink import ( ) from homeassistant.components.tplink.config_flow import TPLinkConfigFlow from homeassistant.components.tplink.const import ( + CONF_CAMERA_CREDENTIALS, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, + CONF_LIVE_VIEW, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( @@ -39,44 +40,43 @@ from homeassistant.data_entry_flow import FlowResultType from . import ( AES_KEYS, ALIAS, + ALIAS_CAMERA, CONN_PARAMS_AES, CONN_PARAMS_KLAP, CONN_PARAMS_LEGACY, CREATE_ENTRY_DATA_AES, + CREATE_ENTRY_DATA_AES_CAMERA, CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEFAULT_ENTRY_TITLE, + DEFAULT_ENTRY_TITLE_CAMERA, DEVICE_CONFIG_AES, + DEVICE_CONFIG_AES_CAMERA, DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_KLAP, DEVICE_CONFIG_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, + IP_ADDRESS2, + IP_ADDRESS3, MAC_ADDRESS, MAC_ADDRESS2, + MAC_ADDRESS3, + MODEL_CAMERA, MODULE, + SMALLEST_VALID_JPEG_BYTES, _mocked_device, _patch_connect, _patch_discovery, _patch_single_discovery, ) +from .conftest import override_side_effect from tests.common import MockConfigEntry -@contextmanager -def override_side_effect(mock: AsyncMock, effect): - """Temporarily override a mock side effect and replace afterwards.""" - try: - default_side_effect = mock.side_effect - mock.side_effect = effect - yield mock - finally: - mock.side_effect = default_side_effect - - @pytest.mark.parametrize( ("device_config", "expected_entry_data", "credentials_hash"), [ @@ -98,6 +98,7 @@ async def test_discovery( device_config=device_config, credentials_hash=credentials_hash, ip_address=ip_address, + mac=MAC_ADDRESS, ) with ( _patch_discovery(device, ip_address=ip_address), @@ -143,7 +144,7 @@ async def test_discovery( result["flow_id"], {CONF_DEVICE: MAC_ADDRESS}, ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE @@ -167,13 +168,142 @@ async def test_discovery( assert result2["reason"] == "no_devices_found" +async def test_discovery_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS3, + CONF_MAC: MAC_ADDRESS3, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + +async def test_discovery_pick_device_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + with override_side_effect( + mock_discovery["discover"], lambda *_, **__: {IP_ADDRESS3: mock_device} + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pick_device" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_DEVICE: MAC_ADDRESS3}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + async def test_discovery_auth( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init ) -> None: """Test authenticated discovery.""" - - mock_device = mock_connect["mock_devices"][IP_ADDRESS] - assert mock_device.config == DEVICE_CONFIG_KLAP + mock_device = _mocked_device( + alias=ALIAS, + ip_address=IP_ADDRESS, + mac=MAC_ADDRESS, + device_config=DEVICE_CONFIG_KLAP, + credentials_hash=CREDENTIALS_HASH_KLAP, + ) with override_side_effect(mock_connect["connect"], AuthenticationError): result = await hass.config_entries.flow.async_init( @@ -191,13 +321,14 @@ async def test_discovery_auth( assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == DEFAULT_ENTRY_TITLE @@ -205,6 +336,69 @@ async def test_discovery_auth( assert result2["context"]["unique_id"] == MAC_ADDRESS +async def test_discovery_auth_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + with override_side_effect(mock_connect["connect"], AuthenticationError): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS3, + CONF_MAC: MAC_ADDRESS3, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_auth_confirm" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ @@ -385,7 +579,7 @@ async def test_discovery_new_credentials_invalid( async def test_discovery_with_existing_device_present(hass: HomeAssistant) -> None: """Test setting up discovery.""" config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.2"}, unique_id="dd:dd:dd:dd:dd:dd" + domain=DOMAIN, data={CONF_HOST: IP_ADDRESS2}, unique_id="dd:dd:dd:dd:dd:dd" ) config_entry.add_to_hass(hass) @@ -535,6 +729,227 @@ async def test_manual(hass: HomeAssistant) -> None: assert result2["reason"] == "already_configured" +async def test_manual_camera( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + # Test no username or pass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "camera_creds"} + + # Test unknown error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.NotFound + ), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "cannot_connect_camera"} + assert "error" in result["description_placeholders"] + + # Test unknown error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.Unauthorized + ), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "invalid_camera_auth"} + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + +@pytest.mark.parametrize( + "exception", + [ + pytest.param( + stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.NotFound + ), + id="open_client_error", + ), + pytest.param(Exception(), id="other_error"), + ], +) +async def test_manual_camera_no_hls( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + exception: Exception, +) -> None: + """Test manual camera when hls stream fails but mpeg stream works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + # Test stream error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=exception, + ), + patch("homeassistant.components.ffmpeg.async_get_image", return_value=None), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "cannot_connect_camera"} + assert "error" in result["description_placeholders"] + + # async_get_image will succeed + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=exception, + ), + patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + +async def test_manual_camera_no_live_view( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: False, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert CONF_CAMERA_CREDENTIALS not in result["data"] + assert result["data"][CONF_LIVE_VIEW] is False + + async def test_manual_no_capabilities(hass: HomeAssistant) -> None: """Test manually setup without successful get_capabilities.""" result = await hass.config_entries.flow.async_init( @@ -575,7 +990,7 @@ async def test_manual_auth( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_discovery["mock_devices"][IP_ADDRESS].update.side_effect = AuthenticationError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} @@ -586,7 +1001,7 @@ async def test_manual_auth( assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] - mock_discovery["mock_device"].update.reset_mock(side_effect=True) + mock_discovery["mock_devices"][IP_ADDRESS].update.reset_mock(side_effect=True) result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], @@ -602,6 +1017,63 @@ async def test_manual_auth( assert result3["context"]["unique_id"] == MAC_ADDRESS +async def test_manual_auth_camera( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + with override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS3].update, AuthenticationError + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "foobar", + CONF_PASSWORD: "foobar", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ @@ -627,7 +1099,7 @@ async def test_manual_auth_errors( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_discovery["mock_devices"][IP_ADDRESS].update.side_effect = AuthenticationError with override_side_effect(mock_connect["connect"], error_type): result2 = await hass.config_entries.flow.async_configure( @@ -682,11 +1154,27 @@ async def test_manual_port_override( port, ) -> None: """Test manually setup.""" - mock_discovery["mock_device"].config.port_override = port - mock_discovery["mock_device"].host = host - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + config = DeviceConfig( + host, + credentials=None, + port_override=port, + uses_http=True, + connection_type=CONN_PARAMS_KLAP, ) + mock_device = _mocked_device( + alias=ALIAS, + ip_address=host, + mac=MAC_ADDRESS, + device_config=config, + credentials_hash=CREDENTIALS_HASH_KLAP, + ) + + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert not result["errors"] @@ -696,23 +1184,29 @@ async def test_manual_port_override( mock_discovery["discover_single"].side_effect = TimeoutError mock_connect["connect"].side_effect = AuthenticationError - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: host_str} - ) - await hass.async_block_till_done() + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: host_str} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] creds = Credentials("fake_username", "fake_password") - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) await hass.async_block_till_done() mock_discovery["try_connect_all"].assert_called_once_with( host, credentials=creds, port=port, http_client=ANY @@ -744,7 +1238,7 @@ async def test_manual_port_override_invalid( await hass.async_block_till_done() mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=None, port=None + IP_ADDRESS, credentials=None, port=None ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -941,7 +1435,7 @@ async def test_integration_discovery_with_ip_change( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_LEGACY.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS mocked_device = _mocked_device(device_config=DEVICE_CONFIG_KLAP) with override_side_effect(mock_connect["connect"], lambda *_, **__: mocked_device): @@ -949,7 +1443,7 @@ async def test_integration_discovery_with_ip_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, CONF_DEVICE: mocked_device, @@ -961,7 +1455,7 @@ async def test_integration_discovery_with_ip_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) @@ -984,8 +1478,8 @@ async def test_integration_discovery_with_ip_change( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED # Check that init set the new host correctly before calling connect - assert config.host == "127.0.0.1" - config.host = "127.0.0.2" + assert config.host == IP_ADDRESS + config.host = IP_ADDRESS2 config.uses_http = False # Not passed in to new config class config.http_client = "Foo" mock_connect["connect"].assert_awaited_once_with(config=config) @@ -1024,7 +1518,7 @@ async def test_integration_discovery_with_connection_change( ) == 0 ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() ) @@ -1034,7 +1528,7 @@ async def test_integration_discovery_with_connection_change( NEW_DEVICE_CONFIG = { **DEVICE_CONFIG_DICT_KLAP, "connection_type": CONN_PARAMS_KLAP.to_dict(), - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, } config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) # Reset the connect mock so when the config flow reloads the entry it succeeds @@ -1055,7 +1549,7 @@ async def test_integration_discovery_with_connection_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS2, CONF_ALIAS: ALIAS, CONF_DEVICE: bulb, @@ -1067,12 +1561,12 @@ async def test_integration_discovery_with_connection_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert CREDENTIALS_HASH_AES not in mock_config_entry.data assert mock_config_entry.state is ConfigEntryState.LOADED - config.host = "127.0.0.2" + config.host = IP_ADDRESS2 config.uses_http = False # Not passed in to new config class config.http_client = "Foo" config.aes_keys = AES_KEYS @@ -1097,18 +1591,18 @@ async def test_dhcp_discovery_with_ip_change( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS discovery_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp.DhcpServiceInfo( - ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ip=IP_ADDRESS2, macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS ), ) assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 async def test_dhcp_discovery_discover_fail( @@ -1121,14 +1615,14 @@ async def test_dhcp_discovery_discover_fail( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS with override_side_effect(mock_discovery["discover_single"], TimeoutError): discovery_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp.DhcpServiceInfo( - ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ip=IP_ADDRESS2, macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS ), ) assert discovery_result["type"] is FlowResultType.ABORT @@ -1160,15 +1654,58 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_discovery["mock_devices"][IP_ADDRESS].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" await hass.async_block_till_done() +async def test_reauth_camera( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test async_get_image.""" + mock_device = mock_connect["mock_devices"][IP_ADDRESS3] + mock_camera_config_entry.add_to_hass(hass) + mock_camera_config_entry.async_start_reauth( + hass, + config_entries.ConfigFlowContext( + reauth_source=CONF_CAMERA_CREDENTIALS, # type: ignore[typeddict-unknown-key] + ), + {"device": mock_device}, + ) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser2", + CONF_PASSWORD: "campass2", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert dict(mock_camera_config_entry.data) == { + **CREATE_ENTRY_DATA_AES_CAMERA, + CONF_CAMERA_CREDENTIALS: {CONF_USERNAME: "camuser2", CONF_PASSWORD: "campass2"}, + } + + async def test_reauth_try_connect_all( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, @@ -1195,7 +1732,7 @@ async def test_reauth_try_connect_all( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["type"] is FlowResultType.ABORT @@ -1233,7 +1770,7 @@ async def test_reauth_try_connect_all_fail( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["errors"] == {"base": "cannot_connect"} @@ -1278,40 +1815,48 @@ async def test_reauth_update_with_encryption_change( assert CONF_CREDENTIALS_HASH not in mock_config_entry.data new_config = DeviceConfig( - "127.0.0.2", + IP_ADDRESS2, credentials=None, connection_type=Device.ConnectionParameters( Device.Family.SmartTapoPlug, Device.EncryptionType.Klap ), uses_http=True, ) - mock_discovery["mock_device"].host = "127.0.0.2" - mock_discovery["mock_device"].config = new_config - mock_discovery["mock_device"].credentials_hash = None - mock_connect["mock_devices"]["127.0.0.2"].config = new_config - mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, + mock_device = _mocked_device( + alias="my_device", + ip_address=IP_ADDRESS2, + mac=MAC_ADDRESS2, + device_config=new_config, + credentials_hash=CREDENTIALS_HASH_KLAP, ) - await hass.async_block_till_done(wait_background_tasks=True) + + with ( + override_side_effect( + mock_discovery["discover_single"], lambda *_, **__: mock_device + ), + override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials, port=None + IP_ADDRESS2, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert mock_config_entry.state is ConfigEntryState.LOADED assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP @@ -1398,7 +1943,7 @@ async def test_reauth_update_from_discovery_with_ip_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, CONF_DEVICE: device, @@ -1410,7 +1955,7 @@ async def test_reauth_update_from_discovery_with_ip_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 async def test_reauth_no_update_if_config_and_ip_the_same( @@ -1493,26 +2038,27 @@ async def test_reauth_errors( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_discovery["mock_device"].update.side_effect = error_type - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + mock_device = mock_discovery["mock_devices"][IP_ADDRESS] + with override_side_effect(mock_device.update, error_type): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {error_placement: errors_msg} assert result2["description_placeholders"]["error"] == str(error_type) mock_discovery["discover_single"].reset_mock() - mock_discovery["mock_device"].update.reset_mock(side_effect=True) + mock_device.update.reset_mock(side_effect=True) result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -1522,9 +2068,9 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" @@ -1731,12 +2277,169 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_discovery["mock_devices"][IP_ADDRESS].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 + + +async def test_reconfigure( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure flow.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_auth_discovered( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure auth flow for device that's discovered.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # Simulate a bad host + with ( + override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS].update, KasaException + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "WRONG_IP", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {"base": "cannot_connect"} + assert "error" in result["description_placeholders"] + + with ( + override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS].update, AuthenticationError + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user", + CONF_PASSWORD: "pass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_auth_try_connect_all( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure auth flow for device that's not discovered.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with ( + override_side_effect(mock_discovery["discover_single"], TimeoutError), + override_side_effect(mock_connect["connect"], KasaException), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user", + CONF_PASSWORD: "pass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_camera( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure flow.""" + mock_camera_config_entry.add_to_hass(hass) + result = await mock_camera_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS3, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index dd967e0e0d6..8dad8881b9b 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -59,6 +59,7 @@ from . import ( _patch_discovery, _patch_single_discovery, ) +from .conftest import override_side_effect from tests.common import MockConfigEntry, async_fire_time_changed @@ -70,6 +71,7 @@ async def test_configuring_tplink_causes_discovery( with ( patch("homeassistant.components.tplink.Discover.discover") as discover, patch("homeassistant.components.tplink.Discover.discover_single"), + patch("homeassistant.components.tplink.Device.connect"), ): discover.return_value = {MagicMock(): MagicMock()} await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -221,8 +223,12 @@ async def test_config_entry_with_stored_credentials( hass.data.setdefault(DOMAIN, {})[CONF_AUTHENTICATION] = auth mock_config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.tplink.async_create_clientsession", return_value="Foo" + with ( + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), + override_side_effect(mock_discovery["discover"], lambda *_, **__: {}), ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() From 07322c69925dacf66016250e7963d06bb38e2ba8 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:57:34 +0100 Subject: [PATCH 1024/1198] Add reconfigure flow to slide_local (#133669) --- .../components/slide_local/config_flow.py | 41 ++++++++++++++++++- .../components/slide_local/quality_scale.yaml | 4 +- .../components/slide_local/strings.json | 15 ++++++- .../slide_local/test_config_flow.py | 30 ++++++++++++++ 4 files changed, 86 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py index 3ccc89be375..23c509a02dc 100644 --- a/homeassistant/components/slide_local/config_flow.py +++ b/homeassistant/components/slide_local/config_flow.py @@ -103,7 +103,7 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the user step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if not (errors := await self.async_test_connection(user_input)): await self.async_set_unique_id(self._mac) @@ -136,6 +136,45 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + + if user_input is not None: + if not (errors := await self.async_test_connection(user_input)): + await self.async_set_unique_id(self._mac) + self._abort_if_unique_id_mismatch( + description_placeholders={CONF_MAC: self._mac} + ) + user_input |= { + CONF_API_VERSION: self._api_version, + } + + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) + + entry: SlideConfigEntry = self._get_reconfigure_entry() + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_HOST): str, + } + ), + { + CONF_HOST: entry.data[CONF_HOST], + CONF_PASSWORD: entry.data.get(CONF_PASSWORD, ""), + }, + ), + errors=errors, + ) + async def async_step_zeroconf( self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 7a2be591927..54dfd87d98c 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -50,12 +50,12 @@ rules: diagnostics: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: status: exempt comment: | Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). - discovery-update-info: todo + discovery-update-info: done repair-issues: todo docs-use-cases: done docs-supported-devices: done diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 6aeda9f92fd..b5fe88255a7 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -12,6 +12,17 @@ "password": "The device code of your Slide (inside of the Slide or in the box, length is 8 characters). If your Slide runs firmware version 2 this is optional, as it is not used by the local API." } }, + "reconfigure": { + "description": "Reconfigure the information for your Slide device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::slide_local::config::step::user::data_description::host%]", + "password": "[%key:component::slide_local::config::step::user::data_description::password%]" + } + }, "zeroconf_confirm": { "title": "Confirm setup for Slide", "description": "Do you want to setup {host}?" @@ -19,7 +30,9 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually." + "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The mac address of the device ({mac}) does not match the previous mac address." }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 48be7dd7850..9f2923988ca 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -282,6 +282,36 @@ async def test_abort_if_already_setup( assert result["reason"] == "already_configured" +async def test_reconfigure( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test reconfigure flow options.""" + + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "127.0.0.3", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert len(mock_setup_entry.mock_calls) == 1 + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data[CONF_HOST] == "127.0.0.3" + + async def test_zeroconf( hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock ) -> None: From 0ad9af0febb080da2698e3951383e837d896627b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Sun, 22 Dec 2024 20:23:55 +0100 Subject: [PATCH 1025/1198] Add already exists config flow tests for Ecovacs (#133572) Co-authored-by: Michael <35783820+mib1185@users.noreply.github.com> Co-authored-by: Franck Nijhof --- tests/components/ecovacs/test_config_flow.py | 120 +++++++++++-------- 1 file changed, 72 insertions(+), 48 deletions(-) diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 5bf1144db0b..3a0cb188146 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -1,6 +1,7 @@ """Test Ecovacs config flow.""" from collections.abc import Awaitable, Callable +from dataclasses import dataclass, field import ssl from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -28,15 +29,20 @@ from .const import ( VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) +from tests.common import MockConfigEntry + _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} -_TEST_FN_AUTH_ARG = "user_input_auth" -_TEST_FN_USER_ARG = "user_input_user" + +@dataclass +class _TestFnUserInput: + auth: dict[str, Any] + user: dict[str, Any] = field(default_factory=dict) async def _test_user_flow( hass: HomeAssistant, - user_input_auth: dict[str, Any], + user_input: _TestFnUserInput, ) -> dict[str, Any]: """Test config flow.""" result = await hass.config_entries.flow.async_init( @@ -50,15 +56,13 @@ async def _test_user_flow( return await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_auth, + user_input=user_input.auth, ) async def _test_user_flow_show_advanced_options( hass: HomeAssistant, - *, - user_input_auth: dict[str, Any], - user_input_user: dict[str, Any] | None = None, + user_input: _TestFnUserInput, ) -> dict[str, Any]: """Test config flow.""" result = await hass.config_entries.flow.async_init( @@ -72,7 +76,7 @@ async def _test_user_flow_show_advanced_options( result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_user or {}, + user_input=user_input.user, ) assert result["type"] is FlowResultType.FORM @@ -81,29 +85,26 @@ async def _test_user_flow_show_advanced_options( return await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_auth, + user_input=user_input.auth, ) @pytest.mark.parametrize( - ("test_fn", "test_fn_args", "entry_data"), + ("test_fn", "test_fn_user_input", "entry_data"), [ ( _test_user_flow_show_advanced_options, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ( _test_user_flow_show_advanced_options, - { - _TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_SELF_HOSTED, - _TEST_FN_USER_ARG: _USER_STEP_SELF_HOSTED, - }, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), VALID_ENTRY_DATA_SELF_HOSTED, ), ( _test_user_flow, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ], @@ -114,18 +115,12 @@ async def test_user_flow( mock_setup_entry: AsyncMock, mock_authenticator_authenticate: AsyncMock, mock_mqtt_client: Mock, - test_fn: Callable[[HomeAssistant, dict[str, Any]], Awaitable[dict[str, Any]]] - | Callable[ - [HomeAssistant, dict[str, Any], dict[str, Any]], Awaitable[dict[str, Any]] - ], - test_fn_args: dict[str, Any], + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, entry_data: dict[str, Any], ) -> None: """Test the user config flow.""" - result = await test_fn( - hass, - **test_fn_args, - ) + result = await test_fn(hass, test_fn_user_input) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == entry_data[CONF_USERNAME] assert result["data"] == entry_data @@ -161,24 +156,21 @@ def _cannot_connect_error(user_input: dict[str, Any]) -> str: ids=["cannot_connect", "invalid_auth", "unknown"], ) @pytest.mark.parametrize( - ("test_fn", "test_fn_args", "entry_data"), + ("test_fn", "test_fn_user_input", "entry_data"), [ ( _test_user_flow_show_advanced_options, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ( _test_user_flow_show_advanced_options, - { - _TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_SELF_HOSTED, - _TEST_FN_USER_ARG: _USER_STEP_SELF_HOSTED, - }, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ), ( _test_user_flow, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ], @@ -193,22 +185,16 @@ async def test_user_flow_raise_error( reason_rest: str, side_effect_mqtt: Exception, errors_mqtt: Callable[[dict[str, Any]], str], - test_fn: Callable[[HomeAssistant, dict[str, Any]], Awaitable[dict[str, Any]]] - | Callable[ - [HomeAssistant, dict[str, Any], dict[str, Any]], Awaitable[dict[str, Any]] - ], - test_fn_args: dict[str, Any], + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, entry_data: dict[str, Any], ) -> None: """Test handling error on library calls.""" - user_input_auth = test_fn_args[_TEST_FN_AUTH_ARG] + user_input_auth = test_fn_user_input.auth # Authenticator raises error mock_authenticator_authenticate.side_effect = side_effect_rest - result = await test_fn( - hass, - **test_fn_args, - ) + result = await test_fn(hass, test_fn_user_input) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "auth" assert result["errors"] == {"base": reason_rest} @@ -256,12 +242,14 @@ async def test_user_flow_self_hosted_error( result = await _test_user_flow_show_advanced_options( hass, - user_input_auth=VALID_ENTRY_DATA_SELF_HOSTED - | { - CONF_OVERRIDE_REST_URL: "bla://localhost:8000", - CONF_OVERRIDE_MQTT_URL: "mqtt://", - }, - user_input_user=_USER_STEP_SELF_HOSTED, + _TestFnUserInput( + VALID_ENTRY_DATA_SELF_HOSTED + | { + CONF_OVERRIDE_REST_URL: "bla://localhost:8000", + CONF_OVERRIDE_MQTT_URL: "mqtt://", + }, + _USER_STEP_SELF_HOSTED, + ), ) assert result["type"] is FlowResultType.FORM @@ -298,3 +286,39 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() + + +@pytest.mark.parametrize( + ("test_fn", "test_fn_user_input"), + [ + ( + _test_user_flow_show_advanced_options, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), + ), + ( + _test_user_flow_show_advanced_options, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), + ), + ( + _test_user_flow, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), + ), + ], + ids=["advanced_cloud", "advanced_self_hosted", "cloud"], +) +async def test_already_exists( + hass: HomeAssistant, + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, +) -> None: + """Test we don't allow duplicated config entries.""" + MockConfigEntry(domain=DOMAIN, data=test_fn_user_input.auth).add_to_hass(hass) + + result = await test_fn( + hass, + test_fn_user_input, + ) + + assert result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 26180486e75186ddec03130088bd8b405814126b Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Sun, 22 Dec 2024 15:05:07 -0500 Subject: [PATCH 1026/1198] Add media browsing to Cambridge Audio (#129106) * Add media browsing to Cambridge Audio * Remove one folder logic * Remove class mapping for presets --- .../cambridge_audio/media_browser.py | 85 +++++++++++++++++++ .../cambridge_audio/media_player.py | 16 +++- .../fixtures/get_presets_list.json | 2 +- .../snapshots/test_diagnostics.ambr | 2 +- .../snapshots/test_media_browser.ambr | 39 +++++++++ .../cambridge_audio/test_media_browser.py | 61 +++++++++++++ 6 files changed, 201 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/cambridge_audio/media_browser.py create mode 100644 tests/components/cambridge_audio/snapshots/test_media_browser.ambr create mode 100644 tests/components/cambridge_audio/test_media_browser.py diff --git a/homeassistant/components/cambridge_audio/media_browser.py b/homeassistant/components/cambridge_audio/media_browser.py new file mode 100644 index 00000000000..efe55ee792e --- /dev/null +++ b/homeassistant/components/cambridge_audio/media_browser.py @@ -0,0 +1,85 @@ +"""Support for media browsing.""" + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import Preset + +from homeassistant.components.media_player import BrowseMedia, MediaClass +from homeassistant.core import HomeAssistant + + +async def async_browse_media( + hass: HomeAssistant, + client: StreamMagicClient, + media_content_id: str | None, + media_content_type: str | None, +) -> BrowseMedia: + """Browse media.""" + + if media_content_type == "presets": + return await _presets_payload(client.preset_list.presets) + + return await _root_payload( + hass, + client, + ) + + +async def _root_payload( + hass: HomeAssistant, + client: StreamMagicClient, +) -> BrowseMedia: + """Return root payload for Cambridge Audio.""" + children: list[BrowseMedia] = [] + + if client.preset_list.presets: + children.append( + BrowseMedia( + title="Presets", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="presets", + thumbnail="https://brands.home-assistant.io/_/cambridge_audio/logo.png", + can_play=False, + can_expand=True, + ) + ) + + return BrowseMedia( + title="Cambridge Audio", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="root", + can_play=False, + can_expand=True, + children=children, + ) + + +async def _presets_payload(presets: list[Preset]) -> BrowseMedia: + """Create payload to list presets.""" + + children: list[BrowseMedia] = [] + for preset in presets: + if preset.state != "OK": + continue + children.append( + BrowseMedia( + title=preset.name, + media_class=MediaClass.MUSIC, + media_content_id=str(preset.preset_id), + media_content_type="preset", + can_play=True, + can_expand=False, + thumbnail=preset.art_url, + ) + ) + + return BrowseMedia( + title="Presets", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="presets", + can_play=False, + can_expand=True, + children=children, + ) diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index 9896effb07d..042178d5781 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -13,6 +13,7 @@ from aiostreammagic import ( ) from homeassistant.components.media_player import ( + BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -24,7 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import CambridgeAudioConfigEntry +from . import CambridgeAudioConfigEntry, media_browser from .const import ( CAMBRIDGE_MEDIA_TYPE_AIRABLE, CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, @@ -34,7 +35,8 @@ from .const import ( from .entity import CambridgeAudioEntity, command BASE_FEATURES = ( - MediaPlayerEntityFeature.SELECT_SOURCE + MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.PLAY_MEDIA @@ -338,3 +340,13 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity): if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO: await self.client.play_radio_url("Radio", media_id) + + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: + """Implement the media browsing helper.""" + return await media_browser.async_browse_media( + self.hass, self.client, media_content_id, media_content_type + ) diff --git a/tests/components/cambridge_audio/fixtures/get_presets_list.json b/tests/components/cambridge_audio/fixtures/get_presets_list.json index 87d49e9fd30..6443b7dfbcf 100644 --- a/tests/components/cambridge_audio/fixtures/get_presets_list.json +++ b/tests/components/cambridge_audio/fixtures/get_presets_list.json @@ -28,7 +28,7 @@ "name": "Unknown Preset Type", "type": "Unknown", "class": "stream.unknown", - "state": "OK" + "state": "UNAVAILABLE" } ] } diff --git a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr index 1ba9c4093f6..8de3ccea746 100644 --- a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr +++ b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr @@ -78,7 +78,7 @@ 'name': 'Unknown Preset Type', 'preset_class': 'stream.unknown', 'preset_id': 3, - 'state': 'OK', + 'state': 'UNAVAILABLE', 'type': 'Unknown', }), ]), diff --git a/tests/components/cambridge_audio/snapshots/test_media_browser.ambr b/tests/components/cambridge_audio/snapshots/test_media_browser.ambr new file mode 100644 index 00000000000..180d5ed1bb0 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_media_browser.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_browse_media_root + list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': '', + 'media_content_type': 'presets', + 'thumbnail': 'https://brands.home-assistant.io/_/cambridge_audio/logo.png', + 'title': 'Presets', + }), + ]) +# --- +# name: test_browse_presets + list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': '1', + 'media_content_type': 'preset', + 'thumbnail': 'https://static.airable.io/43/68/432868.png', + 'title': 'Chicago House Radio', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': '2', + 'media_content_type': 'preset', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59', + 'title': 'Spotify: Good & Evil', + }), + ]) +# --- diff --git a/tests/components/cambridge_audio/test_media_browser.py b/tests/components/cambridge_audio/test_media_browser.py new file mode 100644 index 00000000000..da72cfab534 --- /dev/null +++ b/tests/components/cambridge_audio/test_media_browser.py @@ -0,0 +1,61 @@ +"""Tests for the Cambridge Audio media browser.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .const import ENTITY_ID + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_browse_media_root( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the root browse page.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": ENTITY_ID, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"]["children"] == snapshot + + +async def test_browse_presets( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the presets browse page.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": ENTITY_ID, + "media_content_type": "presets", + "media_content_id": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"]["children"] == snapshot From 368e958457a192a03fc1202f1e360659ef8c2afe Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 22 Dec 2024 21:10:12 +0100 Subject: [PATCH 1027/1198] Load data for multiple days in Nord Pool (#133371) * Load data for multiple days in Nord Pool * Fix current day * Fix tests * Fix services * Fix fixtures * Mod get_data_current_day * Mods * simplify further --- .../components/nordpool/coordinator.py | 31 +- homeassistant/components/nordpool/sensor.py | 86 +- tests/components/nordpool/conftest.py | 93 +- ...period.json => delivery_period_today.json} | 0 .../fixtures/delivery_period_tomorrow.json | 272 +++++ .../fixtures/delivery_period_yesterday.json | 272 +++++ .../nordpool/snapshots/test_diagnostics.ambr | 1048 +++++++++++++---- tests/components/nordpool/test_config_flow.py | 80 +- tests/components/nordpool/test_coordinator.py | 57 +- tests/components/nordpool/test_diagnostics.py | 6 +- tests/components/nordpool/test_init.py | 15 +- tests/components/nordpool/test_sensor.py | 15 +- tests/components/nordpool/test_services.py | 69 +- 13 files changed, 1582 insertions(+), 462 deletions(-) rename tests/components/nordpool/fixtures/{delivery_period.json => delivery_period_today.json} (100%) create mode 100644 tests/components/nordpool/fixtures/delivery_period_tomorrow.json create mode 100644 tests/components/nordpool/fixtures/delivery_period_yesterday.json diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py index e6b36f7deee..0c9a7e9f337 100644 --- a/homeassistant/components/nordpool/coordinator.py +++ b/homeassistant/components/nordpool/coordinator.py @@ -10,6 +10,8 @@ from typing import TYPE_CHECKING from pynordpool import ( Currency, DeliveryPeriodData, + DeliveryPeriodEntry, + DeliveryPeriodsData, NordPoolClient, NordPoolEmptyResponseError, NordPoolError, @@ -29,7 +31,7 @@ if TYPE_CHECKING: from . import NordPoolConfigEntry -class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): +class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]): """A Nord Pool Data Update Coordinator.""" config_entry: NordPoolConfigEntry @@ -74,12 +76,16 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): if data: self.async_set_updated_data(data) - async def api_call(self, retry: int = 3) -> DeliveryPeriodData | None: + async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None: """Make api call to retrieve data with retry if failure.""" data = None try: - data = await self.client.async_get_delivery_period( - dt_util.now(), + data = await self.client.async_get_delivery_periods( + [ + dt_util.now() - timedelta(days=1), + dt_util.now(), + dt_util.now() + timedelta(days=1), + ], Currency(self.config_entry.data[CONF_CURRENCY]), self.config_entry.data[CONF_AREAS], ) @@ -97,3 +103,20 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): self.async_set_update_error(error) return data + + def merge_price_entries(self) -> list[DeliveryPeriodEntry]: + """Return the merged price entries.""" + merged_entries: list[DeliveryPeriodEntry] = [] + for del_period in self.data.entries: + merged_entries.extend(del_period.entries) + return merged_entries + + def get_data_current_day(self) -> DeliveryPeriodData: + """Return the current day data.""" + current_day = dt_util.utcnow().strftime("%Y-%m-%d") + delivery_period: DeliveryPeriodData = self.data.entries[0] + for del_period in self.data.entries: + if del_period.requested_date == current_day: + delivery_period = del_period + break + return delivery_period diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index fe966e99168..30910f8e5f6 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -6,8 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta -from pynordpool import DeliveryPeriodData - from homeassistant.components.sensor import ( EntityCategory, SensorDeviceClass, @@ -29,34 +27,34 @@ PARALLEL_UPDATES = 0 def validate_prices( func: Callable[ - [DeliveryPeriodData], dict[str, tuple[float | None, float, float | None]] + [NordpoolPriceSensor], dict[str, tuple[float | None, float, float | None]] ], - data: DeliveryPeriodData, + entity: NordpoolPriceSensor, area: str, index: int, ) -> float | None: """Validate and return.""" - if result := func(data)[area][index]: + if result := func(entity)[area][index]: return result / 1000 return None def get_prices( - data: DeliveryPeriodData, + entity: NordpoolPriceSensor, ) -> dict[str, tuple[float | None, float, float | None]]: """Return previous, current and next prices. Output: {"SE3": (10.0, 10.5, 12.1)} """ + data = entity.coordinator.merge_price_entries() last_price_entries: dict[str, float] = {} current_price_entries: dict[str, float] = {} next_price_entries: dict[str, float] = {} current_time = dt_util.utcnow() previous_time = current_time - timedelta(hours=1) next_time = current_time + timedelta(hours=1) - price_data = data.entries - LOGGER.debug("Price data: %s", price_data) - for entry in price_data: + LOGGER.debug("Price data: %s", data) + for entry in data: if entry.start <= current_time <= entry.end: current_price_entries = entry.entry if entry.start <= previous_time <= entry.end: @@ -82,11 +80,12 @@ def get_prices( def get_min_max_price( - data: DeliveryPeriodData, - area: str, + entity: NordpoolPriceSensor, func: Callable[[float, float], float], ) -> tuple[float, datetime, datetime]: """Get the lowest price from the data.""" + data = entity.coordinator.get_data_current_day() + area = entity.area price_data = data.entries price: float = price_data[0].entry[area] start: datetime = price_data[0].start @@ -102,12 +101,13 @@ def get_min_max_price( def get_blockprices( - data: DeliveryPeriodData, + entity: NordpoolBlockPriceSensor, ) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: """Return average, min and max for block prices. Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} """ + data = entity.coordinator.get_data_current_day() result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} block_prices = data.block_prices for entry in block_prices: @@ -130,15 +130,15 @@ def get_blockprices( class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool default sensor entity.""" - value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] + value_fn: Callable[[NordpoolSensor], str | float | datetime | None] @dataclass(frozen=True, kw_only=True) class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[DeliveryPeriodData, str], float | None] - extra_fn: Callable[[DeliveryPeriodData, str], dict[str, str] | None] + value_fn: Callable[[NordpoolPriceSensor], float | None] + extra_fn: Callable[[NordpoolPriceSensor], dict[str, str] | None] @dataclass(frozen=True, kw_only=True) @@ -155,19 +155,19 @@ DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( key="updated_at", translation_key="updated_at", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: data.updated_at, + value_fn=lambda entity: entity.coordinator.get_data_current_day().updated_at, entity_category=EntityCategory.DIAGNOSTIC, ), NordpoolDefaultSensorEntityDescription( key="currency", translation_key="currency", - value_fn=lambda data: data.currency, + value_fn=lambda entity: entity.coordinator.get_data_current_day().currency, entity_category=EntityCategory.DIAGNOSTIC, ), NordpoolDefaultSensorEntityDescription( key="exchange_rate", translation_key="exchange_rate", - value_fn=lambda data: data.exchange_rate, + value_fn=lambda entity: entity.coordinator.get_data_current_day().exchange_rate, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, @@ -177,42 +177,42 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="current_price", translation_key="current_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 1), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 1), + extra_fn=lambda entity: None, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 0), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 0), + extra_fn=lambda entity: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 2), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 2), + extra_fn=lambda entity: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="lowest_price", translation_key="lowest_price", - value_fn=lambda data, area: get_min_max_price(data, area, min)[0] / 1000, - extra_fn=lambda data, area: { - "start": get_min_max_price(data, area, min)[1].isoformat(), - "end": get_min_max_price(data, area, min)[2].isoformat(), + value_fn=lambda entity: get_min_max_price(entity, min)[0] / 1000, + extra_fn=lambda entity: { + "start": get_min_max_price(entity, min)[1].isoformat(), + "end": get_min_max_price(entity, min)[2].isoformat(), }, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="highest_price", translation_key="highest_price", - value_fn=lambda data, area: get_min_max_price(data, area, max)[0] / 1000, - extra_fn=lambda data, area: { - "start": get_min_max_price(data, area, max)[1].isoformat(), - "end": get_min_max_price(data, area, max)[2].isoformat(), + value_fn=lambda entity: get_min_max_price(entity, max)[0] / 1000, + extra_fn=lambda entity: { + "start": get_min_max_price(entity, max)[1].isoformat(), + "end": get_min_max_price(entity, max)[2].isoformat(), }, suggested_display_precision=2, ), @@ -276,11 +276,12 @@ async def async_setup_entry( """Set up Nord Pool sensor platform.""" coordinator = entry.runtime_data + current_day_data = entry.runtime_data.get_data_current_day() entities: list[NordpoolBaseEntity] = [] - currency = entry.runtime_data.data.currency + currency = current_day_data.currency - for area in get_prices(entry.runtime_data.data): + for area in current_day_data.area_average: LOGGER.debug("Setting up base sensors for area %s", area) entities.extend( NordpoolSensor(coordinator, description, area) @@ -297,16 +298,16 @@ async def async_setup_entry( NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES ) - for block_name in get_blockprices(coordinator.data)[area]: + for block_prices in entry.runtime_data.get_data_current_day().block_prices: LOGGER.debug( "Setting up block price sensors for area %s with currency %s in block %s", area, currency, - block_name, + block_prices.name, ) entities.extend( NordpoolBlockPriceSensor( - coordinator, description, area, currency, block_name + coordinator, description, area, currency, block_prices.name ) for description in BLOCK_PRICES_SENSOR_TYPES ) @@ -321,7 +322,7 @@ class NordpoolSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> str | float | datetime | None: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self) class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @@ -343,12 +344,12 @@ class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data, self.area) + return self.entity_description.value_fn(self) @property def extra_state_attributes(self) -> dict[str, str] | None: """Return the extra state attributes.""" - return self.entity_description.extra_fn(self.coordinator.data, self.area) + return self.entity_description.extra_fn(self) class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): @@ -376,7 +377,7 @@ class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): def native_value(self) -> float | datetime | None: """Return value of sensor.""" return self.entity_description.value_fn( - get_blockprices(self.coordinator.data)[self.area][self.block_name] + get_blockprices(self)[self.area][self.block_name] ) @@ -399,4 +400,5 @@ class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.coordinator.data.area_average[self.area] / 1000 + data = self.coordinator.get_data_current_day() + return data.area_average[self.area] / 1000 diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py index 9b7ab4b2afa..1c26c7f84eb 100644 --- a/tests/components/nordpool/conftest.py +++ b/tests/components/nordpool/conftest.py @@ -3,20 +3,16 @@ from __future__ import annotations from collections.abc import AsyncGenerator -from datetime import datetime import json from typing import Any from unittest.mock import patch -from pynordpool import NordPoolClient -from pynordpool.const import Currency -from pynordpool.model import DeliveryPeriodData +from pynordpool import API, NordPoolClient import pytest from homeassistant.components.nordpool.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from . import ENTRY_CONFIG @@ -32,9 +28,7 @@ async def no_sleep() -> AsyncGenerator[None]: @pytest.fixture -async def load_int( - hass: HomeAssistant, get_data: DeliveryPeriodData -) -> MockConfigEntry: +async def load_int(hass: HomeAssistant, get_client: NordPoolClient) -> MockConfigEntry: """Set up the Nord Pool integration in Home Assistant.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -44,40 +38,83 @@ async def load_int( config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry -@pytest.fixture(name="get_data") +@pytest.fixture(name="get_client") async def get_data_from_library( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] -) -> DeliveryPeriodData: + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + load_json: list[dict[str, Any]], +) -> AsyncGenerator[NordPoolClient]: """Retrieve data from Nord Pool library.""" - + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[0], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3", + "currency": "EUR", + }, + json=load_json[0], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-04", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[1], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-06", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[2], + ) client = NordPoolClient(aioclient_mock.create_session(hass.loop)) - with patch("pynordpool.NordPoolClient._get", return_value=load_json): - output = await client.async_get_delivery_period( - datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] - ) + yield client await client._session.close() - return output @pytest.fixture(name="load_json") -def load_json_from_fixture(load_data: str) -> dict[str, Any]: +def load_json_from_fixture(load_data: list[str, str, str]) -> list[dict[str, Any]]: """Load fixture with json data and return.""" - return json.loads(load_data) + return [ + json.loads(load_data[0]), + json.loads(load_data[1]), + json.loads(load_data[2]), + ] @pytest.fixture(name="load_data", scope="package") -def load_data_from_fixture() -> str: +def load_data_from_fixture() -> list[str, str, str]: """Load fixture with fixture data and return.""" - return load_fixture("delivery_period.json", DOMAIN) + return [ + load_fixture("delivery_period_today.json", DOMAIN), + load_fixture("delivery_period_yesterday.json", DOMAIN), + load_fixture("delivery_period_tomorrow.json", DOMAIN), + ] diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period_today.json similarity index 100% rename from tests/components/nordpool/fixtures/delivery_period.json rename to tests/components/nordpool/fixtures/delivery_period_today.json diff --git a/tests/components/nordpool/fixtures/delivery_period_tomorrow.json b/tests/components/nordpool/fixtures/delivery_period_tomorrow.json new file mode 100644 index 00000000000..abaa24e93ed --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period_tomorrow.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-06", + "version": 3, + "updatedAt": "2024-11-05T12:12:51.9853434Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-05T23:00:00Z", + "deliveryEnd": "2024-11-06T00:00:00Z", + "entryPerArea": { + "SE3": 126.66, + "SE4": 275.6 + } + }, + { + "deliveryStart": "2024-11-06T00:00:00Z", + "deliveryEnd": "2024-11-06T01:00:00Z", + "entryPerArea": { + "SE3": 74.06, + "SE4": 157.34 + } + }, + { + "deliveryStart": "2024-11-06T01:00:00Z", + "deliveryEnd": "2024-11-06T02:00:00Z", + "entryPerArea": { + "SE3": 78.38, + "SE4": 165.62 + } + }, + { + "deliveryStart": "2024-11-06T02:00:00Z", + "deliveryEnd": "2024-11-06T03:00:00Z", + "entryPerArea": { + "SE3": 92.37, + "SE4": 196.17 + } + }, + { + "deliveryStart": "2024-11-06T03:00:00Z", + "deliveryEnd": "2024-11-06T04:00:00Z", + "entryPerArea": { + "SE3": 99.14, + "SE4": 190.58 + } + }, + { + "deliveryStart": "2024-11-06T04:00:00Z", + "deliveryEnd": "2024-11-06T05:00:00Z", + "entryPerArea": { + "SE3": 447.51, + "SE4": 932.93 + } + }, + { + "deliveryStart": "2024-11-06T05:00:00Z", + "deliveryEnd": "2024-11-06T06:00:00Z", + "entryPerArea": { + "SE3": 641.47, + "SE4": 1284.69 + } + }, + { + "deliveryStart": "2024-11-06T06:00:00Z", + "deliveryEnd": "2024-11-06T07:00:00Z", + "entryPerArea": { + "SE3": 1820.5, + "SE4": 2449.96 + } + }, + { + "deliveryStart": "2024-11-06T07:00:00Z", + "deliveryEnd": "2024-11-06T08:00:00Z", + "entryPerArea": { + "SE3": 1723.0, + "SE4": 2244.22 + } + }, + { + "deliveryStart": "2024-11-06T08:00:00Z", + "deliveryEnd": "2024-11-06T09:00:00Z", + "entryPerArea": { + "SE3": 1298.57, + "SE4": 1643.45 + } + }, + { + "deliveryStart": "2024-11-06T09:00:00Z", + "deliveryEnd": "2024-11-06T10:00:00Z", + "entryPerArea": { + "SE3": 1099.25, + "SE4": 1507.23 + } + }, + { + "deliveryStart": "2024-11-06T10:00:00Z", + "deliveryEnd": "2024-11-06T11:00:00Z", + "entryPerArea": { + "SE3": 903.31, + "SE4": 1362.84 + } + }, + { + "deliveryStart": "2024-11-06T11:00:00Z", + "deliveryEnd": "2024-11-06T12:00:00Z", + "entryPerArea": { + "SE3": 959.99, + "SE4": 1376.13 + } + }, + { + "deliveryStart": "2024-11-06T12:00:00Z", + "deliveryEnd": "2024-11-06T13:00:00Z", + "entryPerArea": { + "SE3": 1186.61, + "SE4": 1449.96 + } + }, + { + "deliveryStart": "2024-11-06T13:00:00Z", + "deliveryEnd": "2024-11-06T14:00:00Z", + "entryPerArea": { + "SE3": 1307.67, + "SE4": 1608.35 + } + }, + { + "deliveryStart": "2024-11-06T14:00:00Z", + "deliveryEnd": "2024-11-06T15:00:00Z", + "entryPerArea": { + "SE3": 1385.46, + "SE4": 2110.8 + } + }, + { + "deliveryStart": "2024-11-06T15:00:00Z", + "deliveryEnd": "2024-11-06T16:00:00Z", + "entryPerArea": { + "SE3": 1366.8, + "SE4": 3031.25 + } + }, + { + "deliveryStart": "2024-11-06T16:00:00Z", + "deliveryEnd": "2024-11-06T17:00:00Z", + "entryPerArea": { + "SE3": 2366.57, + "SE4": 5511.77 + } + }, + { + "deliveryStart": "2024-11-06T17:00:00Z", + "deliveryEnd": "2024-11-06T18:00:00Z", + "entryPerArea": { + "SE3": 1481.92, + "SE4": 3351.64 + } + }, + { + "deliveryStart": "2024-11-06T18:00:00Z", + "deliveryEnd": "2024-11-06T19:00:00Z", + "entryPerArea": { + "SE3": 1082.69, + "SE4": 2484.95 + } + }, + { + "deliveryStart": "2024-11-06T19:00:00Z", + "deliveryEnd": "2024-11-06T20:00:00Z", + "entryPerArea": { + "SE3": 716.82, + "SE4": 1624.33 + } + }, + { + "deliveryStart": "2024-11-06T20:00:00Z", + "deliveryEnd": "2024-11-06T21:00:00Z", + "entryPerArea": { + "SE3": 583.16, + "SE4": 1306.27 + } + }, + { + "deliveryStart": "2024-11-06T21:00:00Z", + "deliveryEnd": "2024-11-06T22:00:00Z", + "entryPerArea": { + "SE3": 523.09, + "SE4": 1142.99 + } + }, + { + "deliveryStart": "2024-11-06T22:00:00Z", + "deliveryEnd": "2024-11-06T23:00:00Z", + "entryPerArea": { + "SE3": 250.64, + "SE4": 539.42 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-05T23:00:00Z", + "deliveryEnd": "2024-11-06T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 422.51, + "min": 74.06, + "max": 1820.5 + }, + "SE4": { + "average": 706.61, + "min": 157.34, + "max": 2449.96 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-06T07:00:00Z", + "deliveryEnd": "2024-11-06T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1346.82, + "min": 903.31, + "max": 2366.57 + }, + "SE4": { + "average": 2306.88, + "min": 1362.84, + "max": 5511.77 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-06T19:00:00Z", + "deliveryEnd": "2024-11-06T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 518.43, + "min": 250.64, + "max": 716.82 + }, + "SE4": { + "average": 1153.25, + "min": 539.42, + "max": 1624.33 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.66314, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 900.65 + }, + { + "areaCode": "SE4", + "price": 1581.19 + } + ] +} diff --git a/tests/components/nordpool/fixtures/delivery_period_yesterday.json b/tests/components/nordpool/fixtures/delivery_period_yesterday.json new file mode 100644 index 00000000000..bc79aeb99f0 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period_yesterday.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-04", + "version": 3, + "updatedAt": "2024-11-04T08:09:11.1931991Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-03T23:00:00Z", + "deliveryEnd": "2024-11-04T00:00:00Z", + "entryPerArea": { + "SE3": 66.13, + "SE4": 78.59 + } + }, + { + "deliveryStart": "2024-11-04T00:00:00Z", + "deliveryEnd": "2024-11-04T01:00:00Z", + "entryPerArea": { + "SE3": 72.54, + "SE4": 86.51 + } + }, + { + "deliveryStart": "2024-11-04T01:00:00Z", + "deliveryEnd": "2024-11-04T02:00:00Z", + "entryPerArea": { + "SE3": 73.12, + "SE4": 84.88 + } + }, + { + "deliveryStart": "2024-11-04T02:00:00Z", + "deliveryEnd": "2024-11-04T03:00:00Z", + "entryPerArea": { + "SE3": 171.97, + "SE4": 217.26 + } + }, + { + "deliveryStart": "2024-11-04T03:00:00Z", + "deliveryEnd": "2024-11-04T04:00:00Z", + "entryPerArea": { + "SE3": 181.05, + "SE4": 227.74 + } + }, + { + "deliveryStart": "2024-11-04T04:00:00Z", + "deliveryEnd": "2024-11-04T05:00:00Z", + "entryPerArea": { + "SE3": 360.71, + "SE4": 414.61 + } + }, + { + "deliveryStart": "2024-11-04T05:00:00Z", + "deliveryEnd": "2024-11-04T06:00:00Z", + "entryPerArea": { + "SE3": 917.83, + "SE4": 1439.33 + } + }, + { + "deliveryStart": "2024-11-04T06:00:00Z", + "deliveryEnd": "2024-11-04T07:00:00Z", + "entryPerArea": { + "SE3": 1426.17, + "SE4": 1695.95 + } + }, + { + "deliveryStart": "2024-11-04T07:00:00Z", + "deliveryEnd": "2024-11-04T08:00:00Z", + "entryPerArea": { + "SE3": 1350.96, + "SE4": 1605.13 + } + }, + { + "deliveryStart": "2024-11-04T08:00:00Z", + "deliveryEnd": "2024-11-04T09:00:00Z", + "entryPerArea": { + "SE3": 1195.06, + "SE4": 1393.46 + } + }, + { + "deliveryStart": "2024-11-04T09:00:00Z", + "deliveryEnd": "2024-11-04T10:00:00Z", + "entryPerArea": { + "SE3": 992.35, + "SE4": 1126.71 + } + }, + { + "deliveryStart": "2024-11-04T10:00:00Z", + "deliveryEnd": "2024-11-04T11:00:00Z", + "entryPerArea": { + "SE3": 976.63, + "SE4": 1107.97 + } + }, + { + "deliveryStart": "2024-11-04T11:00:00Z", + "deliveryEnd": "2024-11-04T12:00:00Z", + "entryPerArea": { + "SE3": 952.76, + "SE4": 1085.73 + } + }, + { + "deliveryStart": "2024-11-04T12:00:00Z", + "deliveryEnd": "2024-11-04T13:00:00Z", + "entryPerArea": { + "SE3": 1029.37, + "SE4": 1177.71 + } + }, + { + "deliveryStart": "2024-11-04T13:00:00Z", + "deliveryEnd": "2024-11-04T14:00:00Z", + "entryPerArea": { + "SE3": 1043.35, + "SE4": 1194.59 + } + }, + { + "deliveryStart": "2024-11-04T14:00:00Z", + "deliveryEnd": "2024-11-04T15:00:00Z", + "entryPerArea": { + "SE3": 1359.57, + "SE4": 1561.12 + } + }, + { + "deliveryStart": "2024-11-04T15:00:00Z", + "deliveryEnd": "2024-11-04T16:00:00Z", + "entryPerArea": { + "SE3": 1848.35, + "SE4": 2145.84 + } + }, + { + "deliveryStart": "2024-11-04T16:00:00Z", + "deliveryEnd": "2024-11-04T17:00:00Z", + "entryPerArea": { + "SE3": 2812.53, + "SE4": 3313.53 + } + }, + { + "deliveryStart": "2024-11-04T17:00:00Z", + "deliveryEnd": "2024-11-04T18:00:00Z", + "entryPerArea": { + "SE3": 2351.69, + "SE4": 2751.87 + } + }, + { + "deliveryStart": "2024-11-04T18:00:00Z", + "deliveryEnd": "2024-11-04T19:00:00Z", + "entryPerArea": { + "SE3": 1553.08, + "SE4": 1842.77 + } + }, + { + "deliveryStart": "2024-11-04T19:00:00Z", + "deliveryEnd": "2024-11-04T20:00:00Z", + "entryPerArea": { + "SE3": 1165.02, + "SE4": 1398.35 + } + }, + { + "deliveryStart": "2024-11-04T20:00:00Z", + "deliveryEnd": "2024-11-04T21:00:00Z", + "entryPerArea": { + "SE3": 1007.48, + "SE4": 1172.35 + } + }, + { + "deliveryStart": "2024-11-04T21:00:00Z", + "deliveryEnd": "2024-11-04T22:00:00Z", + "entryPerArea": { + "SE3": 792.09, + "SE4": 920.28 + } + }, + { + "deliveryStart": "2024-11-04T22:00:00Z", + "deliveryEnd": "2024-11-04T23:00:00Z", + "entryPerArea": { + "SE3": 465.38, + "SE4": 528.83 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-03T23:00:00Z", + "deliveryEnd": "2024-11-04T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 408.69, + "min": 66.13, + "max": 1426.17 + }, + "SE4": { + "average": 530.61, + "min": 78.59, + "max": 1695.95 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-04T07:00:00Z", + "deliveryEnd": "2024-11-04T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1455.48, + "min": 952.76, + "max": 2812.53 + }, + "SE4": { + "average": 1692.2, + "min": 1085.73, + "max": 3313.53 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-04T19:00:00Z", + "deliveryEnd": "2024-11-04T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 857.49, + "min": 465.38, + "max": 1165.02 + }, + "SE4": { + "average": 1004.95, + "min": 528.83, + "max": 1398.35 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.64318, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 1006.88 + }, + { + "areaCode": "SE4", + "price": 1190.46 + } + ] +} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr index dde2eca0022..76a3dd96405 100644 --- a/tests/components/nordpool/snapshots/test_diagnostics.ambr +++ b/tests/components/nordpool/snapshots/test_diagnostics.ambr @@ -2,282 +2,840 @@ # name: test_diagnostics dict({ 'raw': dict({ - 'areaAverages': list([ - dict({ - 'areaCode': 'SE3', - 'price': 900.74, - }), - dict({ - 'areaCode': 'SE4', - 'price': 1166.12, - }), - ]), - 'areaStates': list([ - dict({ - 'areas': list([ - 'SE3', - 'SE4', - ]), - 'state': 'Final', - }), - ]), - 'blockPriceAggregates': list([ - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 422.87, - 'max': 1406.14, - 'min': 61.69, + '2024-11-04': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 1006.88, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1190.46, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 408.69, + 'max': 1426.17, + 'min': 66.13, + }), + 'SE4': dict({ + 'average': 530.61, + 'max': 1695.95, + 'min': 78.59, + }), }), - 'SE4': dict({ - 'average': 497.97, - 'max': 1648.25, - 'min': 65.19, + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-04T07:00:00Z', + 'deliveryStart': '2024-11-03T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1455.48, + 'max': 2812.53, + 'min': 952.76, + }), + 'SE4': dict({ + 'average': 1692.2, + 'max': 3313.53, + 'min': 1085.73, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-04T19:00:00Z', + 'deliveryStart': '2024-11-04T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 857.49, + 'max': 1165.02, + 'min': 465.38, + }), + 'SE4': dict({ + 'average': 1004.95, + 'max': 1398.35, + 'min': 528.83, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-04T23:00:00Z', + 'deliveryStart': '2024-11-04T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-04', + 'exchangeRate': 11.64318, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-04T00:00:00Z', + 'deliveryStart': '2024-11-03T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 66.13, + 'SE4': 78.59, }), }), - 'blockName': 'Off-peak 1', - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 1315.97, - 'max': 2512.65, - 'min': 925.05, - }), - 'SE4': dict({ - 'average': 1735.59, - 'max': 3533.03, - 'min': 1081.72, + dict({ + 'deliveryEnd': '2024-11-04T01:00:00Z', + 'deliveryStart': '2024-11-04T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 72.54, + 'SE4': 86.51, }), }), - 'blockName': 'Peak', - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 610.79, - 'max': 835.53, - 'min': 289.14, - }), - 'SE4': dict({ - 'average': 793.98, - 'max': 1112.57, - 'min': 349.21, + dict({ + 'deliveryEnd': '2024-11-04T02:00:00Z', + 'deliveryStart': '2024-11-04T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.12, + 'SE4': 84.88, }), }), - 'blockName': 'Off-peak 2', - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - }), - ]), - 'currency': 'SEK', - 'deliveryAreas': list([ - 'SE3', - 'SE4', - ]), - 'deliveryDateCET': '2024-11-05', - 'exchangeRate': 11.6402, - 'market': 'DayAhead', - 'multiAreaEntries': list([ - dict({ - 'deliveryEnd': '2024-11-05T00:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - 'entryPerArea': dict({ - 'SE3': 250.73, - 'SE4': 283.79, + dict({ + 'deliveryEnd': '2024-11-04T03:00:00Z', + 'deliveryStart': '2024-11-04T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 171.97, + 'SE4': 217.26, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T01:00:00Z', - 'deliveryStart': '2024-11-05T00:00:00Z', - 'entryPerArea': dict({ - 'SE3': 76.36, - 'SE4': 81.36, + dict({ + 'deliveryEnd': '2024-11-04T04:00:00Z', + 'deliveryStart': '2024-11-04T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 181.05, + 'SE4': 227.74, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T02:00:00Z', - 'deliveryStart': '2024-11-05T01:00:00Z', - 'entryPerArea': dict({ - 'SE3': 73.92, - 'SE4': 79.15, + dict({ + 'deliveryEnd': '2024-11-04T05:00:00Z', + 'deliveryStart': '2024-11-04T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 360.71, + 'SE4': 414.61, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T03:00:00Z', - 'deliveryStart': '2024-11-05T02:00:00Z', - 'entryPerArea': dict({ - 'SE3': 61.69, - 'SE4': 65.19, + dict({ + 'deliveryEnd': '2024-11-04T06:00:00Z', + 'deliveryStart': '2024-11-04T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 917.83, + 'SE4': 1439.33, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T04:00:00Z', - 'deliveryStart': '2024-11-05T03:00:00Z', - 'entryPerArea': dict({ - 'SE3': 64.6, - 'SE4': 68.44, + dict({ + 'deliveryEnd': '2024-11-04T07:00:00Z', + 'deliveryStart': '2024-11-04T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1426.17, + 'SE4': 1695.95, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T05:00:00Z', - 'deliveryStart': '2024-11-05T04:00:00Z', - 'entryPerArea': dict({ - 'SE3': 453.27, - 'SE4': 516.71, + dict({ + 'deliveryEnd': '2024-11-04T08:00:00Z', + 'deliveryStart': '2024-11-04T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1350.96, + 'SE4': 1605.13, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T06:00:00Z', - 'deliveryStart': '2024-11-05T05:00:00Z', - 'entryPerArea': dict({ - 'SE3': 996.28, - 'SE4': 1240.85, + dict({ + 'deliveryEnd': '2024-11-04T09:00:00Z', + 'deliveryStart': '2024-11-04T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1195.06, + 'SE4': 1393.46, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-05T06:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1406.14, - 'SE4': 1648.25, + dict({ + 'deliveryEnd': '2024-11-04T10:00:00Z', + 'deliveryStart': '2024-11-04T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 992.35, + 'SE4': 1126.71, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T08:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1346.54, - 'SE4': 1570.5, + dict({ + 'deliveryEnd': '2024-11-04T11:00:00Z', + 'deliveryStart': '2024-11-04T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 976.63, + 'SE4': 1107.97, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T09:00:00Z', - 'deliveryStart': '2024-11-05T08:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1150.28, - 'SE4': 1345.37, + dict({ + 'deliveryEnd': '2024-11-04T12:00:00Z', + 'deliveryStart': '2024-11-04T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 952.76, + 'SE4': 1085.73, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T10:00:00Z', - 'deliveryStart': '2024-11-05T09:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1031.32, - 'SE4': 1206.51, + dict({ + 'deliveryEnd': '2024-11-04T13:00:00Z', + 'deliveryStart': '2024-11-04T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1029.37, + 'SE4': 1177.71, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T11:00:00Z', - 'deliveryStart': '2024-11-05T10:00:00Z', - 'entryPerArea': dict({ - 'SE3': 927.37, - 'SE4': 1085.8, + dict({ + 'deliveryEnd': '2024-11-04T14:00:00Z', + 'deliveryStart': '2024-11-04T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1043.35, + 'SE4': 1194.59, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T12:00:00Z', - 'deliveryStart': '2024-11-05T11:00:00Z', - 'entryPerArea': dict({ - 'SE3': 925.05, - 'SE4': 1081.72, + dict({ + 'deliveryEnd': '2024-11-04T15:00:00Z', + 'deliveryStart': '2024-11-04T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1359.57, + 'SE4': 1561.12, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T13:00:00Z', - 'deliveryStart': '2024-11-05T12:00:00Z', - 'entryPerArea': dict({ - 'SE3': 949.49, - 'SE4': 1130.38, + dict({ + 'deliveryEnd': '2024-11-04T16:00:00Z', + 'deliveryStart': '2024-11-04T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1848.35, + 'SE4': 2145.84, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T14:00:00Z', - 'deliveryStart': '2024-11-05T13:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1042.03, - 'SE4': 1256.91, + dict({ + 'deliveryEnd': '2024-11-04T17:00:00Z', + 'deliveryStart': '2024-11-04T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2812.53, + 'SE4': 3313.53, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T15:00:00Z', - 'deliveryStart': '2024-11-05T14:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1258.89, - 'SE4': 1765.82, + dict({ + 'deliveryEnd': '2024-11-04T18:00:00Z', + 'deliveryStart': '2024-11-04T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2351.69, + 'SE4': 2751.87, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T16:00:00Z', - 'deliveryStart': '2024-11-05T15:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1816.45, - 'SE4': 2522.55, + dict({ + 'deliveryEnd': '2024-11-04T19:00:00Z', + 'deliveryStart': '2024-11-04T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1553.08, + 'SE4': 1842.77, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T17:00:00Z', - 'deliveryStart': '2024-11-05T16:00:00Z', - 'entryPerArea': dict({ - 'SE3': 2512.65, - 'SE4': 3533.03, + dict({ + 'deliveryEnd': '2024-11-04T20:00:00Z', + 'deliveryStart': '2024-11-04T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1165.02, + 'SE4': 1398.35, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T18:00:00Z', - 'deliveryStart': '2024-11-05T17:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1819.83, - 'SE4': 2524.06, + dict({ + 'deliveryEnd': '2024-11-04T21:00:00Z', + 'deliveryStart': '2024-11-04T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1007.48, + 'SE4': 1172.35, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T18:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1011.77, - 'SE4': 1804.46, + dict({ + 'deliveryEnd': '2024-11-04T22:00:00Z', + 'deliveryStart': '2024-11-04T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 792.09, + 'SE4': 920.28, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T20:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - 'entryPerArea': dict({ - 'SE3': 835.53, - 'SE4': 1112.57, + dict({ + 'deliveryEnd': '2024-11-04T23:00:00Z', + 'deliveryStart': '2024-11-04T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 465.38, + 'SE4': 528.83, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T21:00:00Z', - 'deliveryStart': '2024-11-05T20:00:00Z', - 'entryPerArea': dict({ - 'SE3': 796.19, - 'SE4': 1051.69, + ]), + 'updatedAt': '2024-11-04T08:09:11.1931991Z', + 'version': 3, + }), + '2024-11-05': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.74, }), - }), - dict({ - 'deliveryEnd': '2024-11-05T22:00:00Z', - 'deliveryStart': '2024-11-05T21:00:00Z', - 'entryPerArea': dict({ - 'SE3': 522.3, - 'SE4': 662.44, + dict({ + 'areaCode': 'SE4', + 'price': 1166.12, }), - }), - dict({ - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T22:00:00Z', - 'entryPerArea': dict({ - 'SE3': 289.14, - 'SE4': 349.21, + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', }), - }), - ]), - 'updatedAt': '2024-11-04T12:15:03.9456464Z', - 'version': 3, + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.87, + 'max': 1406.14, + 'min': 61.69, + }), + 'SE4': dict({ + 'average': 497.97, + 'max': 1648.25, + 'min': 65.19, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1315.97, + 'max': 2512.65, + 'min': 925.05, + }), + 'SE4': dict({ + 'average': 1735.59, + 'max': 3533.03, + 'min': 1081.72, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 610.79, + 'max': 835.53, + 'min': 289.14, + }), + 'SE4': dict({ + 'average': 793.98, + 'max': 1112.57, + 'min': 349.21, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-05', + 'exchangeRate': 11.6402, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-05T00:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.73, + 'SE4': 283.79, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T01:00:00Z', + 'deliveryStart': '2024-11-05T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 76.36, + 'SE4': 81.36, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T02:00:00Z', + 'deliveryStart': '2024-11-05T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.92, + 'SE4': 79.15, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T03:00:00Z', + 'deliveryStart': '2024-11-05T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 61.69, + 'SE4': 65.19, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T04:00:00Z', + 'deliveryStart': '2024-11-05T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 64.6, + 'SE4': 68.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T05:00:00Z', + 'deliveryStart': '2024-11-05T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 453.27, + 'SE4': 516.71, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T06:00:00Z', + 'deliveryStart': '2024-11-05T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 996.28, + 'SE4': 1240.85, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-05T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1406.14, + 'SE4': 1648.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T08:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1346.54, + 'SE4': 1570.5, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T09:00:00Z', + 'deliveryStart': '2024-11-05T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1150.28, + 'SE4': 1345.37, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T10:00:00Z', + 'deliveryStart': '2024-11-05T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1031.32, + 'SE4': 1206.51, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T11:00:00Z', + 'deliveryStart': '2024-11-05T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 927.37, + 'SE4': 1085.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T12:00:00Z', + 'deliveryStart': '2024-11-05T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 925.05, + 'SE4': 1081.72, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T13:00:00Z', + 'deliveryStart': '2024-11-05T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 949.49, + 'SE4': 1130.38, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T14:00:00Z', + 'deliveryStart': '2024-11-05T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1042.03, + 'SE4': 1256.91, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T15:00:00Z', + 'deliveryStart': '2024-11-05T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1258.89, + 'SE4': 1765.82, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T16:00:00Z', + 'deliveryStart': '2024-11-05T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1816.45, + 'SE4': 2522.55, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T17:00:00Z', + 'deliveryStart': '2024-11-05T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2512.65, + 'SE4': 3533.03, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T18:00:00Z', + 'deliveryStart': '2024-11-05T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1819.83, + 'SE4': 2524.06, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1011.77, + 'SE4': 1804.46, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T20:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 835.53, + 'SE4': 1112.57, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T21:00:00Z', + 'deliveryStart': '2024-11-05T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 796.19, + 'SE4': 1051.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T22:00:00Z', + 'deliveryStart': '2024-11-05T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 522.3, + 'SE4': 662.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 289.14, + 'SE4': 349.21, + }), + }), + ]), + 'updatedAt': '2024-11-04T12:15:03.9456464Z', + 'version': 3, + }), + '2024-11-06': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.65, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1581.19, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.51, + 'max': 1820.5, + 'min': 74.06, + }), + 'SE4': dict({ + 'average': 706.61, + 'max': 2449.96, + 'min': 157.34, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-06T07:00:00Z', + 'deliveryStart': '2024-11-05T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1346.82, + 'max': 2366.57, + 'min': 903.31, + }), + 'SE4': dict({ + 'average': 2306.88, + 'max': 5511.77, + 'min': 1362.84, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-06T19:00:00Z', + 'deliveryStart': '2024-11-06T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 518.43, + 'max': 716.82, + 'min': 250.64, + }), + 'SE4': dict({ + 'average': 1153.25, + 'max': 1624.33, + 'min': 539.42, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-06T23:00:00Z', + 'deliveryStart': '2024-11-06T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-06', + 'exchangeRate': 11.66314, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-06T00:00:00Z', + 'deliveryStart': '2024-11-05T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 126.66, + 'SE4': 275.6, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T01:00:00Z', + 'deliveryStart': '2024-11-06T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 74.06, + 'SE4': 157.34, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T02:00:00Z', + 'deliveryStart': '2024-11-06T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 78.38, + 'SE4': 165.62, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T03:00:00Z', + 'deliveryStart': '2024-11-06T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 92.37, + 'SE4': 196.17, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T04:00:00Z', + 'deliveryStart': '2024-11-06T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 99.14, + 'SE4': 190.58, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T05:00:00Z', + 'deliveryStart': '2024-11-06T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 447.51, + 'SE4': 932.93, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T06:00:00Z', + 'deliveryStart': '2024-11-06T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 641.47, + 'SE4': 1284.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T07:00:00Z', + 'deliveryStart': '2024-11-06T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1820.5, + 'SE4': 2449.96, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T08:00:00Z', + 'deliveryStart': '2024-11-06T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1723.0, + 'SE4': 2244.22, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T09:00:00Z', + 'deliveryStart': '2024-11-06T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1298.57, + 'SE4': 1643.45, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T10:00:00Z', + 'deliveryStart': '2024-11-06T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1099.25, + 'SE4': 1507.23, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T11:00:00Z', + 'deliveryStart': '2024-11-06T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 903.31, + 'SE4': 1362.84, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T12:00:00Z', + 'deliveryStart': '2024-11-06T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 959.99, + 'SE4': 1376.13, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T13:00:00Z', + 'deliveryStart': '2024-11-06T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1186.61, + 'SE4': 1449.96, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T14:00:00Z', + 'deliveryStart': '2024-11-06T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1307.67, + 'SE4': 1608.35, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T15:00:00Z', + 'deliveryStart': '2024-11-06T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1385.46, + 'SE4': 2110.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T16:00:00Z', + 'deliveryStart': '2024-11-06T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1366.8, + 'SE4': 3031.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T17:00:00Z', + 'deliveryStart': '2024-11-06T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2366.57, + 'SE4': 5511.77, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T18:00:00Z', + 'deliveryStart': '2024-11-06T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1481.92, + 'SE4': 3351.64, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T19:00:00Z', + 'deliveryStart': '2024-11-06T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1082.69, + 'SE4': 2484.95, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T20:00:00Z', + 'deliveryStart': '2024-11-06T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 716.82, + 'SE4': 1624.33, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T21:00:00Z', + 'deliveryStart': '2024-11-06T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 583.16, + 'SE4': 1306.27, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T22:00:00Z', + 'deliveryStart': '2024-11-06T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 523.09, + 'SE4': 1142.99, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T23:00:00Z', + 'deliveryStart': '2024-11-06T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.64, + 'SE4': 539.42, + }), + }), + ]), + 'updatedAt': '2024-11-05T12:12:51.9853434Z', + 'version': 3, + }), }), }) # --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py index cfdfc63aca7..1f0e99b65ff 100644 --- a/tests/components/nordpool/test_config_flow.py +++ b/tests/components/nordpool/test_config_flow.py @@ -2,10 +2,11 @@ from __future__ import annotations +from typing import Any from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, + NordPoolClient, NordPoolConnectionError, NordPoolEmptyResponseError, NordPoolError, @@ -22,10 +23,11 @@ from homeassistant.data_entry_flow import FlowResultType from . import ENTRY_CONFIG from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: +async def test_form(hass: HomeAssistant, get_client: NordPoolClient) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -34,17 +36,11 @@ async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: assert result["step_id"] == "user" assert result["type"] is FlowResultType.FORM - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - ENTRY_CONFIG, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + ENTRY_CONFIG, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["version"] == 1 @@ -54,7 +50,7 @@ async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: @pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") async def test_single_config_entry( - hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData + hass: HomeAssistant, load_int: None, get_client: NordPoolClient ) -> None: """Test abort for single config entry.""" @@ -77,7 +73,7 @@ async def test_single_config_entry( ) async def test_cannot_connect( hass: HomeAssistant, - get_data: DeliveryPeriodData, + get_client: NordPoolClient, error_message: Exception, p_error: str, ) -> None: @@ -101,14 +97,10 @@ async def test_cannot_connect( assert result["errors"] == {"base": p_error} - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Nord Pool" @@ -119,25 +111,18 @@ async def test_cannot_connect( async def test_reconfigure( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, ) -> None: """Test reconfiguration.""" result = await load_int.start_reconfigure_flow(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -162,7 +147,8 @@ async def test_reconfigure( async def test_reconfigure_cannot_connect( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, + aioclient_mock: AiohttpClientMocker, + load_json: list[dict[str, Any]], error_message: Exception, p_error: str, ) -> None: @@ -184,17 +170,13 @@ async def test_reconfigure_cannot_connect( assert result["errors"] == {"base": p_error} - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py index 68534237dee..7647fe4bdfe 100644 --- a/tests/components/nordpool/test_coordinator.py +++ b/tests/components/nordpool/test_coordinator.py @@ -7,8 +7,8 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory from pynordpool import ( - DeliveryPeriodData, NordPoolAuthenticationError, + NordPoolClient, NordPoolEmptyResponseError, NordPoolError, NordPoolResponseError, @@ -28,7 +28,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") async def test_coordinator( hass: HomeAssistant, - get_data: DeliveryPeriodData, + get_client: NordPoolClient, freezer: FrozenDateTimeFactory, caplog: pytest.LogCaptureFixture, ) -> None: @@ -41,30 +41,31 @@ async def test_coordinator( config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "0.92737" + with ( patch( "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolError("error"), ) as mock_data, ): - mock_data.return_value = get_data - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "0.92737" - mock_data.reset_mock() - - mock_data.side_effect = NordPoolError("error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolAuthenticationError("Authentication error"), + ) as mock_data, + ): assert "Authentication error" not in caplog.text - mock_data.side_effect = NordPoolAuthenticationError("Authentication error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -72,10 +73,14 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Authentication error" in caplog.text - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolEmptyResponseError("Empty response"), + ) as mock_data, + ): assert "Empty response" not in caplog.text - mock_data.side_effect = NordPoolEmptyResponseError("Empty response") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -83,10 +88,14 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Empty response" in caplog.text - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolResponseError("Response error"), + ) as mock_data, + ): assert "Response error" not in caplog.text - mock_data.side_effect = NordPoolResponseError("Response error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -94,13 +103,9 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Response error" in caplog.text - mock_data.reset_mock() - mock_data.return_value = get_data - mock_data.side_effect = None - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "1.81645" + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py index 4639186ecf1..a9dfdd5eca5 100644 --- a/tests/components/nordpool/test_diagnostics.py +++ b/tests/components/nordpool/test_diagnostics.py @@ -2,19 +2,21 @@ from __future__ import annotations +import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - load_int: ConfigEntry, + load_int: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test generating diagnostics for a config entry.""" diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py index ebebb8b60c1..3b1fc1fd8ec 100644 --- a/tests/components/nordpool/test_init.py +++ b/tests/components/nordpool/test_init.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, + NordPoolClient, NordPoolConnectionError, NordPoolEmptyResponseError, NordPoolError, @@ -22,7 +22,8 @@ from . import ENTRY_CONFIG from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_unload_entry(hass: HomeAssistant, get_client: NordPoolClient) -> None: """Test load and unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -31,13 +32,7 @@ async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) - ) entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) + await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) assert entry.state is ConfigEntryState.LOADED @@ -56,7 +51,7 @@ async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) - ], ) async def test_initial_startup_fails( - hass: HomeAssistant, get_data: DeliveryPeriodData, error: Exception + hass: HomeAssistant, get_client: NordPoolClient, error: Exception ) -> None: """Test load and unload an entry.""" entry = MockConfigEntry( diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py index 5c2d138cb34..a1a27b5feec 100644 --- a/tests/components/nordpool/test_sensor.py +++ b/tests/components/nordpool/test_sensor.py @@ -6,7 +6,6 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -38,12 +37,12 @@ async def test_sensor_no_next_price(hass: HomeAssistant, load_int: ConfigEntry) assert current_price is not None assert last_price is not None assert next_price is not None - assert current_price.state == "0.28914" - assert last_price.state == "0.28914" - assert next_price.state == STATE_UNKNOWN + assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z + assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z + assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z" -@pytest.mark.freeze_time("2024-11-05T00:00:00+01:00") +@pytest.mark.freeze_time("2024-11-06T00:00:00+01:00") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_no_previous_price( hass: HomeAssistant, load_int: ConfigEntry @@ -57,6 +56,6 @@ async def test_sensor_no_previous_price( assert current_price is not None assert last_price is not None assert next_price is not None - assert current_price.state == "0.25073" - assert last_price.state == STATE_UNKNOWN - assert next_price.state == "0.07636" + assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z + assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z + assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z diff --git a/tests/components/nordpool/test_services.py b/tests/components/nordpool/test_services.py index 224b4bc9981..6d6af685d28 100644 --- a/tests/components/nordpool/test_services.py +++ b/tests/components/nordpool/test_services.py @@ -3,7 +3,6 @@ from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, NordPoolAuthenticationError, NordPoolEmptyResponseError, NordPoolError, @@ -28,7 +27,7 @@ TEST_SERVICE_DATA = { ATTR_CONFIG_ENTRY: "to_replace", ATTR_DATE: "2024-11-05", ATTR_AREAS: "SE3", - ATTR_CURRENCY: "SEK", + ATTR_CURRENCY: "EUR", } TEST_SERVICE_DATA_USE_DEFAULTS = { ATTR_CONFIG_ENTRY: "to_replace", @@ -40,45 +39,32 @@ TEST_SERVICE_DATA_USE_DEFAULTS = { async def test_service_call( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, snapshot: SnapshotAssertion, ) -> None: """Test get_prices_for_date service call.""" - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - service_data = TEST_SERVICE_DATA.copy() - service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PRICES_FOR_DATE, - service_data, - blocking=True, - return_response=True, - ) + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) assert response == snapshot price_value = response["SE3"][0]["price"] - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() - service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PRICES_FOR_DATE, - service_data, - blocking=True, - return_response=True, - ) + service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) assert "SE3" in response assert response["SE3"][0]["price"] == price_value @@ -124,17 +110,10 @@ async def test_service_call_failures( async def test_service_call_config_entry_bad_state( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, ) -> None: """Test get_prices_for_date service call when config entry bad state.""" - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - pytest.raises(ServiceValidationError) as err, - ): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_GET_PRICES_FOR_DATE, @@ -149,13 +128,7 @@ async def test_service_call_config_entry_bad_state( await hass.config_entries.async_unload(load_int.entry_id) await hass.async_block_till_done() - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - pytest.raises(ServiceValidationError) as err, - ): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_GET_PRICES_FOR_DATE, From 0f18f128fda6384dacc71588db267cb6c934cc21 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 21:50:30 +0100 Subject: [PATCH 1028/1198] Unifiprotect Add user information retrieval for NFC and fingerprint events (#132604) Co-authored-by: J. Nick Koston --- .../components/unifiprotect/event.py | 62 ++- tests/components/unifiprotect/test_event.py | 368 +++++++++++++++++- 2 files changed, 417 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/unifiprotect/event.py b/homeassistant/components/unifiprotect/event.py index f126920fb18..c8bce183e34 100644 --- a/homeassistant/components/unifiprotect/event.py +++ b/homeassistant/components/unifiprotect/event.py @@ -4,8 +4,6 @@ from __future__ import annotations import dataclasses -from uiprotect.data import Camera, EventType, ProtectAdoptableDeviceModel - from homeassistant.components.event import ( EventDeviceClass, EventEntity, @@ -14,17 +12,43 @@ from homeassistant.components.event import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import Bootstrap from .const import ( ATTR_EVENT_ID, EVENT_TYPE_DOORBELL_RING, EVENT_TYPE_FINGERPRINT_IDENTIFIED, EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, EVENT_TYPE_NFC_SCANNED, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) +from .data import ( + Camera, + EventType, + ProtectAdoptableDeviceModel, + ProtectData, + ProtectDeviceType, + UFPConfigEntry, ) -from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin +def _add_ulp_user_infos( + bootstrap: Bootstrap, event_data: dict[str, str], ulp_id: str +) -> None: + """Add ULP user information to the event data.""" + if ulp_usr := bootstrap.ulp_users.by_ulp_id(ulp_id): + event_data.update( + { + KEYRINGS_ULP_ID: ulp_usr.ulp_id, + KEYRINGS_USER_FULL_NAME: ulp_usr.full_name, + KEYRINGS_USER_STATUS: ulp_usr.status, + } + ) + + @dataclasses.dataclass(frozen=True, kw_only=True) class ProtectEventEntityDescription(ProtectEventMixin, EventEntityDescription): """Describes UniFi Protect event entity.""" @@ -78,9 +102,22 @@ class ProtectDeviceNFCEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEn and not self._event_already_ended(prev_event, prev_event_end) and event.type is EventType.NFC_CARD_SCANNED ): - event_data = {ATTR_EVENT_ID: event.id} + event_data = { + ATTR_EVENT_ID: event.id, + KEYRINGS_USER_FULL_NAME: "", + KEYRINGS_ULP_ID: "", + KEYRINGS_USER_STATUS: "", + KEYRINGS_KEY_TYPE_ID_NFC: "", + } + if event.metadata and event.metadata.nfc and event.metadata.nfc.nfc_id: - event_data["nfc_id"] = event.metadata.nfc.nfc_id + nfc_id = event.metadata.nfc.nfc_id + event_data[KEYRINGS_KEY_TYPE_ID_NFC] = nfc_id + keyring = self.data.api.bootstrap.keyrings.by_registry_id(nfc_id) + if keyring and keyring.ulp_user: + _add_ulp_user_infos( + self.data.api.bootstrap, event_data, keyring.ulp_user + ) self._trigger_event(EVENT_TYPE_NFC_SCANNED, event_data) self.async_write_ha_state() @@ -109,17 +146,22 @@ class ProtectDeviceFingerprintEventEntity( and not self._event_already_ended(prev_event, prev_event_end) and event.type is EventType.FINGERPRINT_IDENTIFIED ): - event_data = {ATTR_EVENT_ID: event.id} + event_data = { + ATTR_EVENT_ID: event.id, + KEYRINGS_USER_FULL_NAME: "", + KEYRINGS_ULP_ID: "", + } + event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED if ( event.metadata and event.metadata.fingerprint and event.metadata.fingerprint.ulp_id ): - event_data["ulp_id"] = event.metadata.fingerprint.ulp_id event_identified = EVENT_TYPE_FINGERPRINT_IDENTIFIED - else: - event_data["ulp_id"] = "" - event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED + ulp_id = event.metadata.fingerprint.ulp_id + if ulp_id: + event_data[KEYRINGS_ULP_ID] = ulp_id + _add_ulp_user_infos(self.data.api.bootstrap, event_data, ulp_id) self._trigger_event(event_identified, event_data) self.async_write_ha_state() diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index 6a26738f5e8..f674e14b519 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -175,6 +175,10 @@ async def test_doorbell_nfc_scanned( Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] ) + ulp_id = "ulp_id" + test_user_full_name = "Test User" + test_nfc_id = "test_nfc_id" + unsub = async_track_state_change_event(hass, entity_id, _capture_event) event = Event( model=ModelType.EVENT, @@ -187,7 +191,224 @@ async def test_doorbell_nfc_scanned( smart_detect_event_ids=[], camera_id=doorbell.id, api=ufp.api, - metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "ACTIVE" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == test_user_full_name + + unsub() + + +async def test_doorbell_nfc_scanned_ulpusr_deactivated( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + ulp_id = "ulp_id" + test_user_full_name = "Test User" + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "DEACTIVATED" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "Test User" + assert state.attributes["user_status"] == "DEACTIVATED" + + unsub() + + +async def test_doorbell_nfc_scanned_no_ulpusr( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + ulp_id = "ulp_id" + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "" + + unsub() + + +async def test_doorbell_nfc_scanned_no_keyring( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, ) new_camera = doorbell.model_copy() @@ -208,6 +429,7 @@ async def test_doorbell_nfc_scanned( assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION assert state.attributes[ATTR_EVENT_ID] == "test_event_id" assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "" unsub() @@ -233,6 +455,9 @@ async def test_doorbell_fingerprint_identified( Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] ) + ulp_id = "ulp_id" + test_user_full_name = "Test User" + unsub = async_track_state_change_event(hass, entity_id, _capture_event) event = Event( model=ModelType.EVENT, @@ -245,7 +470,143 @@ async def test_doorbell_fingerprint_identified( smart_detect_event_ids=[], camera_id=doorbell.id, api=ufp.api, - metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, + metadata={"fingerprint": {"ulp_id": ulp_id}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "ACTIVE" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == test_user_full_name + + unsub() + + +async def test_doorbell_fingerprint_identified_user_deactivated( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + ulp_id = "ulp_id" + test_user_full_name = "Test User" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": ulp_id}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "DEACTIVATED" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == "Test User" + assert state.attributes["user_status"] == "DEACTIVATED" + + unsub() + + +async def test_doorbell_fingerprint_identified_no_user( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + ulp_id = "ulp_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": ulp_id}}, ) new_camera = doorbell.model_copy() @@ -265,7 +626,8 @@ async def test_doorbell_fingerprint_identified( assert state assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION assert state.attributes[ATTR_EVENT_ID] == "test_event_id" - assert state.attributes["ulp_id"] == "test_ulp_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == "" unsub() From ebcb478f5251ae3beb7960905b96d3bc2c4284f7 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 20:53:14 +0000 Subject: [PATCH 1029/1198] Add pan/tilt features to tplink integration (#133829) --- homeassistant/components/tplink/button.py | 12 ++ homeassistant/components/tplink/entity.py | 7 - homeassistant/components/tplink/icons.json | 18 ++ homeassistant/components/tplink/number.py | 8 + homeassistant/components/tplink/strings.json | 18 ++ .../tplink/snapshots/test_button.ambr | 184 ++++++++++++++++++ .../tplink/snapshots/test_number.ambr | 110 +++++++++++ 7 files changed, 350 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index 131325e489d..6e0d34864d9 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -50,6 +50,18 @@ BUTTON_DESCRIPTIONS: Final = [ key="reboot", device_class=ButtonDeviceClass.RESTART, ), + TPLinkButtonEntityDescription( + key="pan_left", + ), + TPLinkButtonEntityDescription( + key="pan_right", + ), + TPLinkButtonEntityDescription( + key="tilt_up", + ), + TPLinkButtonEntityDescription( + key="tilt_down", + ), ] BUTTON_DESCRIPTIONS_MAP = {desc.key: desc for desc in BUTTON_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index 60d066012a2..d7b02b80177 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -73,13 +73,6 @@ EXCLUDED_FEATURES = { "check_latest_firmware", # siren "alarm", - # camera - "pan_left", - "pan_right", - "pan_step", - "tilt_up", - "tilt_down", - "tilt_step", } diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 0abd68543c5..3f3a3b1233b 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -20,6 +20,18 @@ }, "stop_alarm": { "default": "mdi:bell-cancel" + }, + "pan_left": { + "default": "mdi:chevron-left" + }, + "pan_right": { + "default": "mdi:chevron-right" + }, + "tilt_up": { + "default": "mdi:chevron-up" + }, + "tilt_down": { + "default": "mdi:chevron-down" } }, "select": { @@ -117,6 +129,12 @@ }, "target_temperature": { "default": "mdi:thermometer" + }, + "pan_step": { + "default": "mdi:unfold-more-vertical" + }, + "tilt_step": { + "default": "mdi:unfold-more-horizontal" } } }, diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index b51c00db7c0..489805029ea 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -51,6 +51,14 @@ NUMBER_DESCRIPTIONS: Final = ( key="temperature_offset", mode=NumberMode.BOX, ), + TPLinkNumberEntityDescription( + key="pan_step", + mode=NumberMode.BOX, + ), + TPLinkNumberEntityDescription( + key="tilt_step", + mode=NumberMode.BOX, + ), ) NUMBER_DESCRIPTIONS_MAP = {desc.key: desc for desc in NUMBER_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 7443636c3c0..5aa7c37d612 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -120,6 +120,18 @@ }, "stop_alarm": { "name": "Stop alarm" + }, + "pan_left": { + "name": "Pan left" + }, + "pan_right": { + "name": "Pan right" + }, + "tilt_up": { + "name": "Tilt up" + }, + "tilt_down": { + "name": "Tilt down" } }, "camera": { @@ -235,6 +247,12 @@ }, "temperature_offset": { "name": "Temperature offset" + }, + "pan_step": { + "name": "Pan degrees" + }, + "tilt_step": { + "name": "Tilt degrees" } } }, diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index bb75f4642e1..de626cd5818 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_states[button.my_device_pan_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_pan_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan left', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_left', + 'unique_id': '123456789ABCDEFGH_pan_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_pan_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan left', + }), + 'context': , + 'entity_id': 'button.my_device_pan_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[button.my_device_pan_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_pan_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan right', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_right', + 'unique_id': '123456789ABCDEFGH_pan_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_pan_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan right', + }), + 'context': , + 'entity_id': 'button.my_device_pan_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_states[button.my_device_restart-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -124,6 +216,98 @@ 'state': 'unknown', }) # --- +# name: test_states[button.my_device_tilt_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_tilt_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt down', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_down', + 'unique_id': '123456789ABCDEFGH_tilt_down', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_tilt_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt down', + }), + 'context': , + 'entity_id': 'button.my_device_tilt_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[button.my_device_tilt_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_tilt_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt up', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_up', + 'unique_id': '123456789ABCDEFGH_tilt_up', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_tilt_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt up', + }), + 'context': , + 'entity_id': 'button.my_device_tilt_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_states[my_device-entry] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index dbb58bac01b..df5ef71bf44 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -35,6 +35,61 @@ 'via_device_id': None, }) # --- +# name: test_states[number.my_device_pan_degrees-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_pan_degrees', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan degrees', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_step', + 'unique_id': '123456789ABCDEFGH_pan_step', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_pan_degrees-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan degrees', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_pan_degrees', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- # name: test_states[number.my_device_smooth_off-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -200,6 +255,61 @@ 'state': 'False', }) # --- +# name: test_states[number.my_device_tilt_degrees-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_tilt_degrees', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt degrees', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_step', + 'unique_id': '123456789ABCDEFGH_tilt_step', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_tilt_degrees-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt degrees', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_tilt_degrees', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- # name: test_states[number.my_device_turn_off_in-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From 2d2b979c7d7a5cc04f27ad19f72c7ac3127dd7a0 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 22 Dec 2024 21:55:04 +0100 Subject: [PATCH 1030/1198] Bump pylamarzocco to 1.4.2 (#133826) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index b34df6d6917..309b858c77c 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.1"] + "requirements": ["pylamarzocco==1.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 321f104fa1f..3133044fbe0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2040,7 +2040,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.1 +pylamarzocco==1.4.2 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 58004e5bd8b..3e024e0d40f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1654,7 +1654,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.1 +pylamarzocco==1.4.2 # homeassistant.components.lastfm pylast==5.1.0 From c2358d51586741628a32db02d75466ffbb23e9d8 Mon Sep 17 00:00:00 2001 From: Lucas Gasenzer Date: Sun, 22 Dec 2024 22:37:57 +0100 Subject: [PATCH 1031/1198] Add Switchbot Water Leak Detector (BLE) (#133799) Co-authored-by: J. Nick Koston --- .../components/switchbot/__init__.py | 1 + .../components/switchbot/binary_sensor.py | 5 +++ homeassistant/components/switchbot/const.py | 2 + tests/components/switchbot/__init__.py | 24 +++++++++++ tests/components/switchbot/test_sensor.py | 43 +++++++++++++++++++ 5 files changed, 75 insertions(+) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index 522258c2a55..499a5073872 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -64,6 +64,7 @@ PLATFORMS_BY_TYPE = { SupportedModels.HUB2.value: [Platform.SENSOR], SupportedModels.RELAY_SWITCH_1PM.value: [Platform.SWITCH, Platform.SENSOR], SupportedModels.RELAY_SWITCH_1.value: [Platform.SWITCH], + SupportedModels.LEAK.value: [Platform.BINARY_SENSOR, Platform.SENSOR], } CLASS_BY_DEVICE = { SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight, diff --git a/homeassistant/components/switchbot/binary_sensor.py b/homeassistant/components/switchbot/binary_sensor.py index a545ffd01ce..144872ff315 100644 --- a/homeassistant/components/switchbot/binary_sensor.py +++ b/homeassistant/components/switchbot/binary_sensor.py @@ -64,6 +64,11 @@ BINARY_SENSOR_TYPES: dict[str, BinarySensorEntityDescription] = { translation_key="door_auto_lock_paused", entity_category=EntityCategory.DIAGNOSTIC, ), + "leak": BinarySensorEntityDescription( + key="leak", + name=None, + device_class=BinarySensorDeviceClass.MOISTURE, + ), } diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 383fd6b03b6..854ab32b657 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -33,6 +33,7 @@ class SupportedModels(StrEnum): HUB2 = "hub2" RELAY_SWITCH_1PM = "relay_switch_1pm" RELAY_SWITCH_1 = "relay_switch_1" + LEAK = "leak" CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -58,6 +59,7 @@ NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.METER_PRO_C: SupportedModels.HYGROMETER_CO2, SwitchbotModel.CONTACT_SENSOR: SupportedModels.CONTACT, SwitchbotModel.MOTION_SENSOR: SupportedModels.MOTION, + SwitchbotModel.LEAK: SupportedModels.LEAK, } SUPPORTED_MODEL_TYPES = ( diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index c5ecebf21b3..9ecffd395a3 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -250,3 +250,27 @@ WORELAY_SWITCH_1PM_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) + +LEAK_SERVICE_INFO = BluetoothServiceInfoBleak( + name="Any", + manufacturer_data={ + 2409: b"\xd6407D1\x02V\x90\x00\x00\x00\x00\x1e\x05\x00\x00\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"&\\x00V"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="Any", + manufacturer_data={ + 2409: b"\xd6407D1\x02V\x90\x00\x00\x00\x00\x1e\x05\x00\x00\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"&\\x00V"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Any"), + time=0, + connectable=False, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 205bb739508..acf1bacc054 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -22,6 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import ( + LEAK_SERVICE_INFO, WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO, WORELAY_SWITCH_1PM_SERVICE_INFO, @@ -151,3 +152,45 @@ async def test_relay_switch_1pm_power_sensor(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_leak_sensor(hass: HomeAssistant) -> None: + """Test setting up the leak detector.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, LEAK_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "leak", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + battery_sensor = hass.states.get("sensor.test_name_battery") + battery_sensor_attrs = battery_sensor.attributes + assert battery_sensor.state == "86" + assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery" + assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + leak_sensor = hass.states.get("binary_sensor.test_name") + leak_sensor_attrs = leak_sensor.attributes + assert leak_sensor.state == "off" + assert leak_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() From c9ad87d4643a0d3f000ed6f83c8f442bb49a35a9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 22:44:15 +0100 Subject: [PATCH 1032/1198] Add light tests for Niko Home Control (#133750) --- .../components/niko_home_control/light.py | 1 + .../components/niko_home_control/conftest.py | 44 +++++- .../snapshots/test_light.ambr | 112 ++++++++++++++ .../niko_home_control/test_light.py | 138 ++++++++++++++++++ 4 files changed, 291 insertions(+), 4 deletions(-) create mode 100644 tests/components/niko_home_control/snapshots/test_light.ambr create mode 100644 tests/components/niko_home_control/test_light.py diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index c9902cbf11b..69d4e71c755 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -108,6 +108,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity): if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + self._attr_brightness = round(action.state * 2.55) def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py index 63307a88e8a..b3dedd0c182 100644 --- a/tests/components/niko_home_control/conftest.py +++ b/tests/components/niko_home_control/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from nhc.light import NHCLight import pytest from homeassistant.components.niko_home_control.const import DOMAIN @@ -22,16 +23,48 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_niko_home_control_connection() -> Generator[AsyncMock]: +def light() -> NHCLight: + """Return a light mock.""" + mock = AsyncMock(spec=NHCLight) + mock.id = 1 + mock.type = 1 + mock.is_dimmable = False + mock.name = "light" + mock.suggested_area = "room" + mock.state = 100 + return mock + + +@pytest.fixture +def dimmable_light() -> NHCLight: + """Return a dimmable light mock.""" + mock = AsyncMock(spec=NHCLight) + mock.id = 2 + mock.type = 2 + mock.is_dimmable = True + mock.name = "dimmable light" + mock.suggested_area = "room" + mock.state = 100 + return mock + + +@pytest.fixture +def mock_niko_home_control_connection( + light: NHCLight, dimmable_light: NHCLight +) -> Generator[AsyncMock]: """Mock a NHC client.""" with ( patch( - "homeassistant.components.niko_home_control.config_flow.NHCController", + "homeassistant.components.niko_home_control.NHCController", autospec=True, ) as mock_client, + patch( + "homeassistant.components.niko_home_control.config_flow.NHCController", + new=mock_client, + ), ): client = mock_client.return_value - client.return_value = True + client.lights = [light, dimmable_light] yield client @@ -39,5 +72,8 @@ def mock_niko_home_control_connection() -> Generator[AsyncMock]: def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( - domain=DOMAIN, title="Niko Home Control", data={CONF_HOST: "192.168.0.123"} + domain=DOMAIN, + title="Niko Home Control", + data={CONF_HOST: "192.168.0.123"}, + entry_id="01JFN93M7KRA38V5AMPCJ2JYYV", ) diff --git a/tests/components/niko_home_control/snapshots/test_light.ambr b/tests/components/niko_home_control/snapshots/test_light.ambr new file mode 100644 index 00000000000..702b7326ee2 --- /dev/null +++ b/tests/components/niko_home_control/snapshots/test_light.ambr @@ -0,0 +1,112 @@ +# serializer version: 1 +# name: test_entities[light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'niko_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'dimmable light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entities[light.light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'niko_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/niko_home_control/test_light.py b/tests/components/niko_home_control/test_light.py new file mode 100644 index 00000000000..801bdf6a296 --- /dev/null +++ b/tests/components/niko_home_control/test_light.py @@ -0,0 +1,138 @@ +"""Tests for the Niko Home Control Light platform.""" + +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.niko_home_control.PLATFORMS", [Platform.LIGHT] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("light_id", "data", "set_brightness"), + [ + (0, {ATTR_ENTITY_ID: "light.light"}, 100.0), + ( + 1, + {ATTR_ENTITY_ID: "light.dimmable_light", ATTR_BRIGHTNESS: 50}, + 19.607843137254903, + ), + ], +) +async def test_turning_on( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light_id: int, + data: dict[str, Any], + set_brightness: int, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + data, + blocking=True, + ) + mock_niko_home_control_connection.lights[light_id].turn_on.assert_called_once_with( + set_brightness + ) + + +@pytest.mark.parametrize( + ("light_id", "entity_id"), + [ + (0, "light.light"), + (1, "light.dimmable_light"), + ], +) +async def test_turning_off( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light_id: int, + entity_id: str, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_niko_home_control_connection.lights[ + light_id + ].turn_off.assert_called_once_with() + + +async def test_updating( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light: AsyncMock, + dimmable_light: AsyncMock, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.light").state == STATE_ON + + light.state = 0 + await mock_niko_home_control_connection.register_callback.call_args_list[0][0][1](0) + await hass.async_block_till_done() + + assert hass.states.get("light.light").state == STATE_OFF + + assert hass.states.get("light.dimmable_light").state == STATE_ON + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255 + + dimmable_light.state = 80 + await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1]( + 80 + ) + await hass.async_block_till_done() + + assert hass.states.get("light.dimmable_light").state == STATE_ON + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 204 + + dimmable_light.state = 0 + await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1](0) + await hass.async_block_till_done() + + assert hass.states.get("light.dimmable_light").state == STATE_OFF + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] is None From 8eebbd45bdc441225cd0439e447e4b8c4603512d Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 22:52:35 +0100 Subject: [PATCH 1033/1198] Bump pyOverkiz to 1.15.5 (#133835) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 84fdc11ae47..3b093eb06ac 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.4"], + "requirements": ["pyoverkiz==1.15.5"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 3133044fbe0..a02fe7f33ff 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.4 +pyoverkiz==1.15.5 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3e024e0d40f..bbf04fbf2d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1758,7 +1758,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.4 +pyoverkiz==1.15.5 # homeassistant.components.onewire pyownet==0.10.0.post1 From 0560b634e39a87ba2543e6b2b114488f44c04a50 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 22 Dec 2024 23:14:01 +0100 Subject: [PATCH 1034/1198] Make To-do action names and descriptions consistent with HA standard (#133734) --- homeassistant/components/todo/strings.json | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 245e5c82fc8..cffb22e89f0 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -7,8 +7,8 @@ }, "services": { "get_items": { - "name": "Get to-do list items", - "description": "Get items on a to-do list.", + "name": "Get items", + "description": "Gets items on a to-do list.", "fields": { "status": { "name": "Status", @@ -17,8 +17,8 @@ } }, "add_item": { - "name": "Add to-do list item", - "description": "Add a new to-do list item.", + "name": "Add item", + "description": "Adds a new to-do list item.", "fields": { "item": { "name": "Item name", @@ -39,8 +39,8 @@ } }, "update_item": { - "name": "Update to-do list item", - "description": "Update an existing to-do list item based on its name.", + "name": "Update item", + "description": "Updates an existing to-do list item based on its name.", "fields": { "item": { "name": "Item name", @@ -69,12 +69,12 @@ } }, "remove_completed_items": { - "name": "Remove all completed to-do list items", - "description": "Remove all to-do list items that have been completed." + "name": "Remove completed items", + "description": "Removes all to-do list items that have been completed." }, "remove_item": { - "name": "Remove a to-do list item", - "description": "Remove an existing to-do list item by its name.", + "name": "Remove item", + "description": "Removes an existing to-do list item by its name.", "fields": { "item": { "name": "Item name", From 74b425a06e54e7c86ff482f7e928dd2fbc7c5395 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sun, 22 Dec 2024 23:20:01 +0100 Subject: [PATCH 1035/1198] Reload on connection lost for LCN integration (#133638) --- homeassistant/components/lcn/__init__.py | 28 ++++++++++++++++++++++++ tests/components/lcn/test_init.py | 17 ++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index a10d08ad073..7fbe7e7ac0e 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -14,6 +14,7 @@ from pypck.connection import ( PchkLcnNotConnectedError, PchkLicenseError, ) +from pypck.lcn_defs import LcnEvent from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -124,10 +125,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # register for LCN bus messages device_registry = dr.async_get(hass) + event_received = partial(async_host_event_received, hass, config_entry) input_received = partial( async_host_input_received, hass, config_entry, device_registry ) + lcn_connection.register_for_events(event_received) lcn_connection.register_for_inputs(input_received) return True @@ -183,6 +186,31 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return unload_ok +def async_host_event_received( + hass: HomeAssistant, config_entry: ConfigEntry, event: pypck.lcn_defs.LcnEvent +) -> None: + """Process received event from LCN.""" + lcn_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION] + + async def reload_config_entry() -> None: + """Close connection and schedule config entry for reload.""" + await lcn_connection.async_close() + hass.config_entries.async_schedule_reload(config_entry.entry_id) + + if event in ( + LcnEvent.CONNECTION_LOST, + LcnEvent.PING_TIMEOUT, + ): + _LOGGER.info('The connection to host "%s" has been lost', config_entry.title) + hass.async_create_task(reload_config_entry()) + elif event == LcnEvent.BUS_DISCONNECTED: + _LOGGER.info( + 'The connection to the LCN bus via host "%s" has been disconnected', + config_entry.title, + ) + hass.async_create_task(reload_config_entry()) + + def async_host_input_received( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index bffa91d14ef..4bb8d023d3f 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -9,6 +9,7 @@ from pypck.connection import ( PchkLcnNotConnectedError, PchkLicenseError, ) +from pypck.lcn_defs import LcnEvent import pytest from homeassistant import config_entries @@ -116,6 +117,22 @@ async def test_async_setup_entry_fails( assert entry.state is ConfigEntryState.SETUP_RETRY +@pytest.mark.parametrize( + "event", + [LcnEvent.CONNECTION_LOST, LcnEvent.PING_TIMEOUT, LcnEvent.BUS_DISCONNECTED], +) +async def test_async_entry_reload_on_host_event_received( + hass: HomeAssistant, entry: MockConfigEntry, event: LcnEvent +) -> None: + """Test for config entry reload on certain host event received.""" + lcn_connection = await init_integration(hass, entry) + with patch( + "homeassistant.config_entries.ConfigEntries.async_schedule_reload" + ) as async_schedule_reload: + lcn_connection.fire_event(event) + async_schedule_reload.assert_called_with(entry.entry_id) + + @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: """Test migration config entry.""" From a3657a0fef0fca03ff497bf306eda1bbee917b30 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sun, 22 Dec 2024 23:21:52 +0100 Subject: [PATCH 1036/1198] Suez_water: fix yesterday sensor extra_state invalid typing (#133425) --- .../components/suez_water/coordinator.py | 30 ++++++++++--------- homeassistant/components/suez_water/sensor.py | 10 +++---- tests/components/suez_water/conftest.py | 17 ++++++----- tests/components/suez_water/test_sensor.py | 8 +++++ 4 files changed, 38 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py index 72da68c0f5d..aab1ba110b7 100644 --- a/homeassistant/components/suez_water/coordinator.py +++ b/homeassistant/components/suez_water/coordinator.py @@ -1,9 +1,7 @@ """Suez water update coordinator.""" -from collections.abc import Mapping from dataclasses import dataclass from datetime import date -from typing import Any from pysuez import PySuezError, SuezClient @@ -20,11 +18,11 @@ from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN class SuezWaterAggregatedAttributes: """Class containing aggregated sensor extra attributes.""" - this_month_consumption: dict[date, float] - previous_month_consumption: dict[date, float] + this_month_consumption: dict[str, float] + previous_month_consumption: dict[str, float] last_year_overall: dict[str, float] this_year_overall: dict[str, float] - history: dict[date, float] + history: dict[str, float] highest_monthly_consumption: float @@ -33,7 +31,7 @@ class SuezWaterData: """Class used to hold all fetch data from suez api.""" aggregated_value: float - aggregated_attr: Mapping[str, Any] + aggregated_attr: SuezWaterAggregatedAttributes price: float @@ -68,18 +66,22 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): async def _async_update_data(self) -> SuezWaterData: """Fetch data from API endpoint.""" + + def map_dict(param: dict[date, float]) -> dict[str, float]: + return {str(key): value for key, value in param.items()} + try: aggregated = await self._suez_client.fetch_aggregated_data() data = SuezWaterData( aggregated_value=aggregated.value, - aggregated_attr={ - "this_month_consumption": aggregated.current_month, - "previous_month_consumption": aggregated.previous_month, - "highest_monthly_consumption": aggregated.highest_monthly_consumption, - "last_year_overall": aggregated.previous_year, - "this_year_overall": aggregated.current_year, - "history": aggregated.history, - }, + aggregated_attr=SuezWaterAggregatedAttributes( + this_month_consumption=map_dict(aggregated.current_month), + previous_month_consumption=map_dict(aggregated.previous_month), + highest_monthly_consumption=aggregated.highest_monthly_consumption, + last_year_overall=aggregated.previous_year, + this_year_overall=aggregated.current_year, + history=map_dict(aggregated.history), + ), price=(await self._suez_client.get_price()).price, ) except PySuezError as err: diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index e4e53dd7f6d..1152ebd551b 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,8 +2,8 @@ from __future__ import annotations -from collections.abc import Callable, Mapping -from dataclasses import dataclass +from collections.abc import Callable +from dataclasses import asdict, dataclass from typing import Any from pysuez.const import ATTRIBUTION @@ -28,7 +28,7 @@ class SuezWaterSensorEntityDescription(SensorEntityDescription): """Describes Suez water sensor entity.""" value_fn: Callable[[SuezWaterData], float | str | None] - attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None + attr_fn: Callable[[SuezWaterData], dict[str, Any] | None] = lambda _: None SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( @@ -38,7 +38,7 @@ SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfVolume.LITERS, device_class=SensorDeviceClass.WATER, value_fn=lambda suez_data: suez_data.aggregated_value, - attr_fn=lambda suez_data: suez_data.aggregated_attr, + attr_fn=lambda suez_data: asdict(suez_data.aggregated_attr), ), SuezWaterSensorEntityDescription( key="water_price", @@ -93,6 +93,6 @@ class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): return self.entity_description.value_fn(self.coordinator.data) @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: + def extra_state_attributes(self) -> dict[str, Any] | None: """Return extra state of the sensor.""" return self.entity_description.attr_fn(self.coordinator.data) diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f634a053c65..b034d9b00fa 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,6 +1,7 @@ """Common fixtures for the Suez Water tests.""" from collections.abc import Generator +from datetime import date from unittest.mock import AsyncMock, patch from pysuez import AggregatedData, PriceResult @@ -56,22 +57,22 @@ def mock_suez_client() -> Generator[AsyncMock]: result = AggregatedData( value=160, current_month={ - "2024-01-01": 130, - "2024-01-02": 145, + date.fromisoformat("2024-01-01"): 130, + date.fromisoformat("2024-01-02"): 145, }, previous_month={ - "2024-12-01": 154, - "2024-12-02": 166, + date.fromisoformat("2024-12-01"): 154, + date.fromisoformat("2024-12-02"): 166, }, current_year=1500, previous_year=1000, attribution=ATTRIBUTION, highest_monthly_consumption=2558, history={ - "2024-01-01": 130, - "2024-01-02": 145, - "2024-12-01": 154, - "2024-12-02": 166, + date.fromisoformat("2024-01-01"): 130, + date.fromisoformat("2024-01-02"): 145, + date.fromisoformat("2024-12-01"): 154, + date.fromisoformat("2024-12-02"): 166, }, ) diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py index cb578432f62..950d5d8393d 100644 --- a/tests/components/suez_water/test_sensor.py +++ b/tests/components/suez_water/test_sensor.py @@ -1,5 +1,6 @@ """Test Suez_water sensor platform.""" +from datetime import date from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory @@ -32,6 +33,13 @@ async def test_sensors_valid_state( assert mock_config_entry.state is ConfigEntryState.LOADED await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + state = hass.states.get("sensor.suez_mock_device_water_usage_yesterday") + assert state + previous: dict = state.attributes["previous_month_consumption"] + assert previous + assert previous.get(date.fromisoformat("2024-12-01")) is None + assert previous.get(str(date.fromisoformat("2024-12-01"))) == 154 + @pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) async def test_sensors_failed_update( From 4ed0c21a4a605f23c18ee0dd24fb01437dec74b1 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 23:35:50 +0100 Subject: [PATCH 1037/1198] Add data descriptions to Config Flow in Overkiz (#133758) --- homeassistant/components/overkiz/strings.json | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 1595cd52aeb..0c564a003d6 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -6,12 +6,18 @@ "description": "Select your server. The Overkiz platform is used by various vendors like Somfy (Connexoon / TaHoma), Hitachi (Hi Kumo) and Atlantic (Cozytouch).", "data": { "hub": "Server" + }, + "data_description": { + "hub": "Select the mobile app that you use to control your devices." } }, "local_or_cloud": { - "description": "Choose between local or cloud API. Local API supports TaHoma Connexoon, TaHoma v2, and TaHoma Switch. Climate devices and scenarios are not supported in local API.", + "description": "Choose how you want to connect to your gateway.", "data": { "api_type": "API type" + }, + "data_description": { + "api_type": "Local API is only supported by TaHoma Connexoon, TaHoma v2, and TaHoma Switch. Climate devices and scenarios are **not** available via the local API." } }, "cloud": { @@ -19,6 +25,10 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The username of your cloud account (app).", + "password": "The password of your cloud account (app)." } }, "local": { @@ -28,6 +38,12 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your Overkiz hub.", + "username": "The username of your cloud account (app).", + "password": "The password of your cloud account (app).", + "verify_ssl": "Verify the SSL certificate. Select this only if you are connecting via the hostname." } } }, From 8ab936b87c9f83c69849d14ca00294a2094d40cf Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 22:54:44 +0000 Subject: [PATCH 1038/1198] Add detection switches to tplink integration (#133828) --- homeassistant/components/tplink/icons.json | 24 +++ homeassistant/components/tplink/strings.json | 12 ++ homeassistant/components/tplink/switch.py | 12 ++ .../components/tplink/fixtures/features.json | 20 ++ .../tplink/snapshots/test_switch.ambr | 184 ++++++++++++++++++ 5 files changed, 252 insertions(+) diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 3f3a3b1233b..9cc0326b59f 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -89,6 +89,30 @@ "state": { "on": "mdi:motion-sensor" } + }, + "motion_detection": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } + }, + "person_detection": { + "default": "mdi:account-off", + "state": { + "on": "mdi:account" + } + }, + "tamper_detection": { + "default": "mdi:shield-off", + "state": { + "on": "mdi:shield" + } + }, + "baby_cry_detection": { + "default": "mdi:baby-face-outline", + "state": { + "on": "mdi:baby-face" + } } }, "sensor": { diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 5aa7c37d612..664d52c16af 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -233,6 +233,18 @@ }, "pir_enabled": { "name": "Motion sensor" + }, + "motion_detection": { + "name": "Motion detection" + }, + "person_detection": { + "name": "Person detection" + }, + "tamper_detection": { + "name": "Tamper detection" + }, + "baby_cry_detection": { + "name": "Baby cry detection" } }, "number": { diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 7e223752665..28dedc7e7a1 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -54,6 +54,18 @@ SWITCH_DESCRIPTIONS: tuple[TPLinkSwitchEntityDescription, ...] = ( TPLinkSwitchEntityDescription( key="pir_enabled", ), + TPLinkSwitchEntityDescription( + key="motion_detection", + ), + TPLinkSwitchEntityDescription( + key="person_detection", + ), + TPLinkSwitchEntityDescription( + key="tamper_detection", + ), + TPLinkSwitchEntityDescription( + key="baby_cry_detection", + ), ) SWITCH_DESCRIPTIONS_MAP = {desc.key: desc for desc in SWITCH_DESCRIPTIONS} diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index a54edf56c62..3d27e63b06a 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -44,6 +44,26 @@ "type": "Switch", "category": "Config" }, + "motion_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "person_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "tamper_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "baby_cry_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, "current_consumption": { "value": 5.23, "type": "Sensor", diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 36c630474c8..7adda900c02 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -173,6 +173,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_baby_cry_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_baby_cry_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Baby cry detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'baby_cry_detection', + 'unique_id': '123456789ABCDEFGH_baby_cry_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_baby_cry_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Baby cry detection', + }), + 'context': , + 'entity_id': 'switch.my_device_baby_cry_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -311,6 +357,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_motion_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_motion_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detection', + 'unique_id': '123456789ABCDEFGH_motion_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_motion_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Motion detection', + }), + 'context': , + 'entity_id': 'switch.my_device_motion_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_motion_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -357,6 +449,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_person_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_person_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Person detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'person_detection', + 'unique_id': '123456789ABCDEFGH_person_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_person_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Person detection', + }), + 'context': , + 'entity_id': 'switch.my_device_person_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -403,3 +541,49 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_tamper_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_tamper_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tamper detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tamper_detection', + 'unique_id': '123456789ABCDEFGH_tamper_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_tamper_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tamper detection', + }), + 'context': , + 'entity_id': 'switch.my_device_tamper_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- From df261660474dbdec780aa534fa0f764d1a2ba3c3 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 23:58:13 +0100 Subject: [PATCH 1039/1198] Unifiprotect: add error message if the get_user_keyring_info permissions are not sufficient (#133841) --- .../components/unifiprotect/services.py | 3 ++ .../components/unifiprotect/test_services.py | 31 +++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 6a1daef178e..402aae2eeba 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -236,6 +236,9 @@ async def get_user_keyring_info(call: ServiceCall) -> ServiceResponse: """Get the user keyring info.""" camera = _async_get_ufp_camera(call) ulp_users = camera.api.bootstrap.ulp_users.as_list() + if not ulp_users: + raise HomeAssistantError("No users found, please check Protect permissions.") + user_keyrings: list[JsonValueType] = [ { KEYRINGS_USER_FULL_NAME: user.full_name, diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index efc9d1ace9e..9697d1f11a4 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -262,13 +262,13 @@ async def test_remove_privacy_zone( @pytest.mark.asyncio -async def test_get_doorbell_user( +async def get_user_keyring_info( hass: HomeAssistant, entity_registry: er.EntityRegistry, ufp: MockUFPFixture, doorbell: Camera, ) -> None: - """Test get_doorbell_user service.""" + """Test get_user_keyring_info service.""" ulp_user = Mock(full_name="Test User", status="active", ulp_id="user_ulp_id") keyring = Mock( @@ -315,3 +315,30 @@ async def test_get_doorbell_user( }, ], } + + +async def test_get_user_keyring_info_no_users( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + ufp: MockUFPFixture, + doorbell: Camera, +) -> None: + """Test get_user_keyring_info service with no users.""" + + ufp.api.bootstrap.ulp_users.as_list = Mock(return_value=[]) + ufp.api.bootstrap.keyrings.as_list = Mock(return_value=[]) + + await init_entry(hass, ufp, [doorbell]) + + camera_entry = entity_registry.async_get("binary_sensor.test_camera_doorbell") + + with pytest.raises( + HomeAssistantError, match="No users found, please check Protect permissions." + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_USER_KEYRING_INFO, + {ATTR_DEVICE_ID: camera_entry.device_id}, + blocking=True, + return_response=True, + ) From 00a1ae0eeb83eeafd81c424391a28c2c5c788c6c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 12:58:39 -1000 Subject: [PATCH 1040/1198] Bump protobuf to 5.29.2 (#133839) --- homeassistant/package_constraints.txt | 2 +- requirements_test.txt | 2 +- script/gen_requirements_all.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d5731041d08..b149c4dafb8 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -145,7 +145,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==5.29.2 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder diff --git a/requirements_test.txt b/requirements_test.txt index e8561eba0a5..2a6841ada2a 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -43,7 +43,7 @@ types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 types-pillow==10.2.0.20240822 -types-protobuf==5.28.3.20241030 +types-protobuf==5.29.1.20241207 types-psutil==6.1.0.20241102 types-python-dateutil==2.9.0.20241003 types-python-slugify==8.0.2.20240310 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 71229d0b57d..c447c64f655 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -178,7 +178,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==5.29.2 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder From 353f0854748356697bd85d65aa0e8038cd7b4269 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 13:05:51 -1000 Subject: [PATCH 1041/1198] Bump anyio to 4.7.0 (#133842) --- homeassistant/package_constraints.txt | 2 +- script/gen_requirements_all.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b149c4dafb8..6863da50af3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -106,7 +106,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.7.0 h11==0.14.0 httpcore==1.0.5 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index c447c64f655..86179ac228f 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -139,7 +139,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.7.0 h11==0.14.0 httpcore==1.0.5 From 67f0de441b489890efa802a325f187b761098ad6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Mon, 23 Dec 2024 00:06:01 +0100 Subject: [PATCH 1042/1198] Fulfill IQS rule runtime-data in ViCare integration (#133633) --- homeassistant/components/vicare/__init__.py | 63 ++++++------------- .../components/vicare/binary_sensor.py | 10 +-- homeassistant/components/vicare/button.py | 10 +-- homeassistant/components/vicare/climate.py | 12 ++-- .../components/vicare/config_flow.py | 8 +-- homeassistant/components/vicare/const.py | 2 +- .../components/vicare/diagnostics.py | 9 ++- homeassistant/components/vicare/fan.py | 11 +--- homeassistant/components/vicare/number.py | 15 ++--- .../components/vicare/quality_scale.yaml | 4 +- homeassistant/components/vicare/sensor.py | 11 +--- homeassistant/components/vicare/types.py | 13 ++++ homeassistant/components/vicare/utils.py | 40 ++++++++++-- .../components/vicare/water_heater.py | 10 +-- tests/components/vicare/conftest.py | 4 +- tests/components/vicare/test_binary_sensor.py | 2 +- tests/components/vicare/test_button.py | 2 +- tests/components/vicare/test_climate.py | 2 +- tests/components/vicare/test_config_flow.py | 12 ++-- tests/components/vicare/test_fan.py | 2 +- tests/components/vicare/test_init.py | 2 +- tests/components/vicare/test_number.py | 2 +- tests/components/vicare/test_sensor.py | 4 +- tests/components/vicare/test_water_heater.py | 2 +- 24 files changed, 121 insertions(+), 131 deletions(-) diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index d6b9e4b923a..9c331f0e9ec 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -2,11 +2,9 @@ from __future__ import annotations -from collections.abc import Mapping from contextlib import suppress import logging import os -from typing import Any from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig @@ -16,8 +14,6 @@ from PyViCare.PyViCareUtils import ( ) from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -25,31 +21,28 @@ from homeassistant.helpers.storage import STORAGE_DIR from .const import ( DEFAULT_CACHE_DURATION, - DEVICE_LIST, DOMAIN, PLATFORMS, UNSUPPORTED_DEVICES, + VICARE_TOKEN_FILENAME, ) -from .types import ViCareDevice -from .utils import get_device, get_device_serial +from .types import ViCareConfigEntry, ViCareData, ViCareDevice +from .utils import get_device, get_device_serial, login _LOGGER = logging.getLogger(__name__) -_TOKEN_FILENAME = "vicare_token.save" -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ViCareConfigEntry) -> bool: """Set up from config entry.""" _LOGGER.debug("Setting up ViCare component") - - hass.data[DOMAIN] = {} - hass.data[DOMAIN][entry.entry_id] = {} - try: - await hass.async_add_executor_job(setup_vicare_api, hass, entry) + entry.runtime_data = await hass.async_add_executor_job( + setup_vicare_api, hass, entry + ) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError) as err: raise ConfigEntryAuthFailed("Authentication failed") from err - for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST]: + for device in entry.runtime_data.devices: # Migration can be removed in 2025.4.0 await async_migrate_devices_and_entities(hass, entry, device) @@ -58,28 +51,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def vicare_login( - hass: HomeAssistant, - entry_data: Mapping[str, Any], - cache_duration=DEFAULT_CACHE_DURATION, -) -> PyViCare: - """Login via PyVicare API.""" - vicare_api = PyViCare() - vicare_api.setCacheDuration(cache_duration) - vicare_api.initWithCredentials( - entry_data[CONF_USERNAME], - entry_data[CONF_PASSWORD], - entry_data[CONF_CLIENT_ID], - hass.config.path(STORAGE_DIR, _TOKEN_FILENAME), - ) - return vicare_api - - -def setup_vicare_api(hass: HomeAssistant, entry: ConfigEntry) -> None: +def setup_vicare_api(hass: HomeAssistant, entry: ViCareConfigEntry) -> PyViCare: """Set up PyVicare API.""" - vicare_api = vicare_login(hass, entry.data) + client = login(hass, entry.data) - device_config_list = get_supported_devices(vicare_api.devices) + device_config_list = get_supported_devices(client.devices) + + # increase cache duration to fit rate limit to number of devices if (number_of_devices := len(device_config_list)) > 1: cache_duration = DEFAULT_CACHE_DURATION * number_of_devices _LOGGER.debug( @@ -87,36 +65,35 @@ def setup_vicare_api(hass: HomeAssistant, entry: ConfigEntry) -> None: number_of_devices, cache_duration, ) - vicare_api = vicare_login(hass, entry.data, cache_duration) - device_config_list = get_supported_devices(vicare_api.devices) + client = login(hass, entry.data, cache_duration) + device_config_list = get_supported_devices(client.devices) for device in device_config_list: _LOGGER.debug( "Found device: %s (online: %s)", device.getModel(), str(device.isOnline()) ) - hass.data[DOMAIN][entry.entry_id][DEVICE_LIST] = [ + devices = [ ViCareDevice(config=device_config, api=get_device(entry, device_config)) for device_config in device_config_list ] + return ViCareData(client=client, devices=devices) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ViCareConfigEntry) -> bool: """Unload ViCare config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) with suppress(FileNotFoundError): await hass.async_add_executor_job( - os.remove, hass.config.path(STORAGE_DIR, _TOKEN_FILENAME) + os.remove, hass.config.path(STORAGE_DIR, VICARE_TOKEN_FILENAME) ) return unload_ok async def async_migrate_devices_and_entities( - hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice + hass: HomeAssistant, entry: ViCareConfigEntry, device: ViCareDevice ) -> None: """Migrate old entry.""" device_registry = dr.async_get(hass) diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 55f0ab96ed0..ced02dae97e 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -24,13 +24,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixin +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixin from .utils import ( get_burners, get_circuits, @@ -152,16 +150,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare binary sensor devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index 49d142c1edb..ad7d600eba3 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -16,14 +16,12 @@ from PyViCare.PyViCareUtils import ( import requests from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixinWithSet +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixinWithSet from .utils import get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -67,16 +65,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare button entities.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 67330bf201d..62231a4e2fe 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -24,7 +24,6 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_TEMPERATURE, PRECISION_TENTHS, @@ -37,9 +36,9 @@ from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN +from .const import DOMAIN from .entity import ViCareEntity -from .types import HeatingProgram, ViCareDevice +from .types import HeatingProgram, ViCareConfigEntry, ViCareDevice from .utils import get_burners, get_circuits, get_compressors, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -99,25 +98,22 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare climate platform.""" platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service( SERVICE_SET_VICARE_MODE, {vol.Required(SERVICE_SET_VICARE_MODE_ATTR_MODE): cv.string}, "set_vicare_mode", ) - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/config_flow.py b/homeassistant/components/vicare/config_flow.py index c711cc06074..6594e6ec9e4 100644 --- a/homeassistant/components/vicare/config_flow.py +++ b/homeassistant/components/vicare/config_flow.py @@ -18,7 +18,6 @@ from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac -from . import vicare_login from .const import ( CONF_HEATING_TYPE, DEFAULT_HEATING_TYPE, @@ -26,6 +25,7 @@ from .const import ( VICARE_NAME, HeatingType, ) +from .utils import login _LOGGER = logging.getLogger(__name__) @@ -62,9 +62,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - await self.hass.async_add_executor_job( - vicare_login, self.hass, user_input - ) + await self.hass.async_add_executor_job(login, self.hass, user_input) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: @@ -96,7 +94,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): } try: - await self.hass.async_add_executor_job(vicare_login, self.hass, data) + await self.hass.async_add_executor_job(login, self.hass, data) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 828a879927d..bcf41223d3f 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -25,8 +25,8 @@ UNSUPPORTED_DEVICES = [ "E3_RoomControl_One_522", ] -DEVICE_LIST = "device_list" VICARE_NAME = "ViCare" +VICARE_TOKEN_FILENAME = "vicare_token.save" CONF_CIRCUIT = "circuit" CONF_HEATING_TYPE = "heating_type" diff --git a/homeassistant/components/vicare/diagnostics.py b/homeassistant/components/vicare/diagnostics.py index 9182e96509f..7695c304451 100644 --- a/homeassistant/components/vicare/diagnostics.py +++ b/homeassistant/components/vicare/diagnostics.py @@ -6,25 +6,24 @@ import json from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import DEVICE_LIST, DOMAIN +from .types import ViCareConfigEntry TO_REDACT = {CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: ViCareConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" def dump_devices() -> list[dict[str, Any]]: """Dump devices.""" return [ - json.loads(device.config.dump_secure()) - for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST] + json.loads(device.dump_secure()) + for device in entry.runtime_data.client.devices ] return { diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 6e8513a1f7e..69aa8396fea 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -19,7 +19,6 @@ from PyViCare.PyViCareVentilationDevice import ( from requests.exceptions import ConnectionError as RequestConnectionError from homeassistant.components.fan import FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -27,9 +26,8 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice +from .types import ViCareConfigEntry, ViCareDevice from .utils import get_device_serial _LOGGER = logging.getLogger(__name__) @@ -104,17 +102,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare fan platform.""" - - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index f9af9636941..8ffaa727634 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -25,14 +25,17 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import HeatingProgram, ViCareDevice, ViCareRequiredKeysMixin +from .types import ( + HeatingProgram, + ViCareConfigEntry, + ViCareDevice, + ViCareRequiredKeysMixin, +) from .utils import get_circuits, get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -370,16 +373,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare number devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml index 959e2e90583..35a1e7b0adb 100644 --- a/homeassistant/components/vicare/quality_scale.yaml +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -6,9 +6,7 @@ rules: status: todo comment: Uniqueness is not checked yet. config-flow-test-coverage: done - runtime-data: - status: todo - comment: runtime_data is not used yet. + runtime-data: done test-before-setup: done appropriate-polling: done entity-unique-id: done diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 57b7c0bec9a..3386c849f74 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -25,7 +25,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, EntityCategory, @@ -40,8 +39,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( - DEVICE_LIST, - DOMAIN, VICARE_CUBIC_METER, VICARE_KW, VICARE_KWH, @@ -50,7 +47,7 @@ from .const import ( VICARE_WH, ) from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixin +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixin from .utils import ( get_burners, get_circuits, @@ -968,16 +965,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare sensor devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ), # run update to have device_class set depending on unit_of_measurement True, diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 98d1c0566ce..65ae2a53c3e 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -6,6 +6,7 @@ from dataclasses import dataclass import enum from typing import Any +from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig @@ -15,6 +16,7 @@ from homeassistant.components.climate import ( PRESET_HOME, PRESET_SLEEP, ) +from homeassistant.config_entries import ConfigEntry class HeatingProgram(enum.StrEnum): @@ -80,6 +82,17 @@ class ViCareDevice: api: PyViCareDevice +@dataclass(frozen=True) +class ViCareData: + """ViCare data class.""" + + client: PyViCare + devices: list[ViCareDevice] + + +type ViCareConfigEntry = ConfigEntry[ViCareData] + + @dataclass(frozen=True) class ViCareRequiredKeysMixin: """Mixin for required keys.""" diff --git a/homeassistant/components/vicare/utils.py b/homeassistant/components/vicare/utils.py index 5156ea4a41e..120dad83113 100644 --- a/homeassistant/components/vicare/utils.py +++ b/homeassistant/components/vicare/utils.py @@ -1,7 +1,12 @@ """ViCare helpers functions.""" -import logging +from __future__ import annotations +from collections.abc import Mapping +import logging +from typing import Any + +from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( @@ -14,16 +19,41 @@ from PyViCare.PyViCareUtils import ( ) import requests -from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers.storage import STORAGE_DIR -from .const import CONF_HEATING_TYPE, HEATING_TYPE_TO_CREATOR_METHOD, HeatingType -from .types import ViCareRequiredKeysMixin +from .const import ( + CONF_HEATING_TYPE, + DEFAULT_CACHE_DURATION, + HEATING_TYPE_TO_CREATOR_METHOD, + VICARE_TOKEN_FILENAME, + HeatingType, +) +from .types import ViCareConfigEntry, ViCareRequiredKeysMixin _LOGGER = logging.getLogger(__name__) +def login( + hass: HomeAssistant, + entry_data: Mapping[str, Any], + cache_duration=DEFAULT_CACHE_DURATION, +) -> PyViCare: + """Login via PyVicare API.""" + vicare_api = PyViCare() + vicare_api.setCacheDuration(cache_duration) + vicare_api.initWithCredentials( + entry_data[CONF_USERNAME], + entry_data[CONF_PASSWORD], + entry_data[CONF_CLIENT_ID], + hass.config.path(STORAGE_DIR, VICARE_TOKEN_FILENAME), + ) + return vicare_api + + def get_device( - entry: ConfigEntry, device_config: PyViCareDeviceConfig + entry: ViCareConfigEntry, device_config: PyViCareDeviceConfig ) -> PyViCareDevice: """Get device for device config.""" return getattr( diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index 5e241c9a3be..114ff620c3f 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -20,14 +20,12 @@ from homeassistant.components.water_heater import ( WaterHeaterEntity, WaterHeaterEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice +from .types import ViCareConfigEntry, ViCareDevice from .utils import get_circuits, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -81,16 +79,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare water heater platform.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index aadf85e7081..8e10d2f1a25 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -84,7 +84,7 @@ async def mock_vicare_gas_boiler( """Return a mocked ViCare API representing a single gas boiler device.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with patch( - f"{MODULE}.vicare_login", + f"{MODULE}.login", return_value=MockPyViCare(fixtures), ): await setup_integration(hass, mock_config_entry) @@ -102,7 +102,7 @@ async def mock_vicare_room_sensors( Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), ] with patch( - f"{MODULE}.vicare_login", + f"{MODULE}.login", return_value=MockPyViCare(fixtures), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_binary_sensor.py b/tests/components/vicare/test_binary_sensor.py index b9b8a57a59b..44612673a11 100644 --- a/tests/components/vicare/test_binary_sensor.py +++ b/tests/components/vicare/test_binary_sensor.py @@ -43,7 +43,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.BINARY_SENSOR]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_button.py b/tests/components/vicare/test_button.py index c024af41d78..cdc47e3833d 100644 --- a/tests/components/vicare/test_button.py +++ b/tests/components/vicare/test_button.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.BUTTON]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_climate.py b/tests/components/vicare/test_climate.py index 44df87276e7..f48a8988cf0 100644 --- a/tests/components/vicare/test_climate.py +++ b/tests/components/vicare/test_climate.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index a522cf75d5d..d44fd1b9fed 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -49,7 +49,7 @@ async def test_user_create_entry( # test PyViCareInvalidConfigurationError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidConfigurationError( {"error": "foo", "error_description": "bar"} ), @@ -65,7 +65,7 @@ async def test_user_create_entry( # test PyViCareInvalidCredentialsError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidCredentialsError, ): result = await hass.config_entries.flow.async_configure( @@ -79,7 +79,7 @@ async def test_user_create_entry( # test success with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( @@ -110,7 +110,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> # test PyViCareInvalidConfigurationError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidConfigurationError( {"error": "foo", "error_description": "bar"} ), @@ -125,7 +125,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> # test success with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( @@ -160,7 +160,7 @@ async def test_form_dhcp( assert result["errors"] == {} with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vicare/test_fan.py b/tests/components/vicare/test_fan.py index ba5db6e42c7..aaf6a968ffd 100644 --- a/tests/components/vicare/test_fan.py +++ b/tests/components/vicare/test_fan.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.FAN]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_init.py b/tests/components/vicare/test_init.py index 62bec7f50c5..d553f2758b8 100644 --- a/tests/components/vicare/test_init.py +++ b/tests/components/vicare/test_init.py @@ -26,7 +26,7 @@ async def test_device_and_entity_migration( Fixture({"type:boiler"}, "vicare/dummy-device-no-serial.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), ): mock_config_entry.add_to_hass(hass) diff --git a/tests/components/vicare/test_number.py b/tests/components/vicare/test_number.py index c3aa66a86f6..7b9c1915b95 100644 --- a/tests/components/vicare/test_number.py +++ b/tests/components/vicare/test_number.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.NUMBER]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py index 06c8b963680..afd3232478a 100644 --- a/tests/components/vicare/test_sensor.py +++ b/tests/components/vicare/test_sensor.py @@ -27,7 +27,7 @@ async def test_all_entities( Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), ): await setup_integration(hass, mock_config_entry) @@ -48,7 +48,7 @@ async def test_room_sensors( Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_water_heater.py b/tests/components/vicare/test_water_heater.py index fbb5863cf7a..f9ca431af6d 100644 --- a/tests/components/vicare/test_water_heater.py +++ b/tests/components/vicare/test_water_heater.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.WATER_HEATER]), ): await setup_integration(hass, mock_config_entry) From 29fa40a5cf276509160f3564d920a54c02294d76 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:07:05 -1000 Subject: [PATCH 1043/1198] Add backup the list of integrations platforms to preload (#133856) `backup` is now at the top of the startup time list. This will help reduce it. --- homeassistant/loader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 1fa9d0cd49d..78c89b94765 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -78,6 +78,7 @@ BASE_PRELOAD_PLATFORMS = [ "repairs", "system_health", "trigger", + "backup", ] From de1b6a0dfcd2b679d020f8163db9c83ffb6c8bf0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:17:13 -1000 Subject: [PATCH 1044/1198] Add backup to the list of storage preloads (#133855) --- homeassistant/bootstrap.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 1034223051c..78c7d91fae0 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -252,6 +252,7 @@ PRELOAD_STORAGE = [ "assist_pipeline.pipelines", "core.analytics", "auth_module.totp", + "backup", ] From dcc9be02ca8cf5b024b7ec79e60e2504d941692c Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Mon, 23 Dec 2024 05:19:05 +0100 Subject: [PATCH 1045/1198] Bump ZHA to 0.0.43 (#133854) * Bump ZHA to 0.0.43 * Add strings for v2 quirk entities --- homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/strings.json | 108 +++++++++++++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 111 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 3a301be9b02..e396c8776e7 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.42"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.43"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 4706e204872..8e4d3f78eb4 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -586,6 +586,12 @@ }, "preheat_status": { "name": "Pre-heat status" + }, + "open_window_detection_status": { + "name": "Open window detection status" + }, + "window_detection": { + "name": "Open window detection" } }, "button": { @@ -822,6 +828,57 @@ }, "approach_distance": { "name": "Approach distance" + }, + "fixed_load_demand": { + "name": "Fixed load demand" + }, + "display_brightness": { + "name": "Display brightness" + }, + "display_inactive_brightness": { + "name": "Display inactive brightness" + }, + "display_activity_timeout": { + "name": "Display activity timeout" + }, + "open_window_detection_threshold": { + "name": "Open window detection threshold" + }, + "open_window_event_duration": { + "name": "Open window event duration" + }, + "open_window_detection_guard_period": { + "name": "Open window detection guard period" + }, + "fallback_timeout": { + "name": "Fallback timeout" + }, + "boost_amount": { + "name": "Boost amount" + }, + "ambient_sensor_correction": { + "name": "Ambient sensor correction" + }, + "external_sensor_correction": { + "name": "External sensor correction" + }, + "move_sensitivity": { + "name": "Motion sensitivity" + }, + "detection_distance_min": { + "name": "Minimum range" + }, + "detection_distance_max": { + "name": "Maximum range" + }, + "presence_sensitivity": { + "name": "Presence sensitivity" + }, + "presence_timeout": { + "name": "Fade time" + }, + "regulator_set_point": { + "name": "Regulator set point" } }, "select": { @@ -926,6 +983,45 @@ }, "external_trigger_mode": { "name": "External trigger mode" + }, + "local_temperature_source": { + "name": "Local temperature source" + }, + "control_type": { + "name": "Control type" + }, + "thermostat_application": { + "name": "Thermostat application" + }, + "heating_fuel": { + "name": "Heating fuel" + }, + "heat_transfer_medium": { + "name": "Heat transfer medium" + }, + "heating_emitter_type": { + "name": "Heating emitter type" + }, + "external_temperature_sensor_type": { + "name": "External temperature sensor type" + }, + "preset_mode": { + "name": "Preset mode" + }, + "sensor_mode": { + "name": "Sensor mode" + }, + "thermostat_mode": { + "name": "Thermostat mode" + }, + "regulator_period": { + "name": "Regulator period" + }, + "click_mode": { + "name": "Click mode" + }, + "operation_mode": { + "name": "Operation mode" } }, "sensor": { @@ -1132,6 +1228,15 @@ }, "motion_distance": { "name": "Motion distance" + }, + "control_status": { + "name": "Control status" + }, + "distance": { + "name": "Target distance" + }, + "local_temperature_floor": { + "name": "Floor temperature" } }, "switch": { @@ -1257,6 +1362,9 @@ }, "enable_siren": { "name": "Enable siren" + }, + "find_switch": { + "name": "Distance switch" } } } diff --git a/requirements_all.txt b/requirements_all.txt index a02fe7f33ff..b194f249770 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3097,7 +3097,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.42 +zha==0.0.43 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bbf04fbf2d6..2576bdeedf7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2486,7 +2486,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.42 +zha==0.0.43 # homeassistant.components.zwave_js zwave-js-server-python==0.60.0 From 3658cdba4c865dc21977281f871edd35a024aed2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:19:44 -1000 Subject: [PATCH 1046/1198] Ensure late import in backup of hassio.backup does not block the event loop (#133857) * Ensure late import in backup of components.hassio.backup does not block the event loop Preload backup when loading hassio to ensure it happens in the executor https://github.com/home-assistant/core/blob/67f0de441b489890efa802a325f187b761098ad6/homeassistant/components/backup/__init__.py#L57 * improve comment --- homeassistant/components/hassio/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index a2a9d8ff028..fec84737e78 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -64,7 +64,10 @@ from homeassistant.util.dt import now # config_flow, diagnostics, system_health, and entity platforms are imported to # ensure other dependencies that wait for hassio are not waiting # for hassio to import its platforms +# backup is pre-imported to ensure that the backup integration does not load +# it from the event loop from . import ( # noqa: F401 + backup, binary_sensor, config_flow, diagnostics, From cf45c670556d4d61b022b171f42ca3a6226f8747 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Mon, 23 Dec 2024 05:26:11 +0100 Subject: [PATCH 1047/1198] Fix TypeError in maxcube climate action inference logic (#133853) The maxcube-api library initializes the valve_position as a None value, so that during initialization if the cube does not respond quickly enough the comparison fails to compare a None-Type to an integer. --- homeassistant/components/maxcube/climate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/maxcube/climate.py b/homeassistant/components/maxcube/climate.py index da5a9f34dda..296da4f0ab4 100644 --- a/homeassistant/components/maxcube/climate.py +++ b/homeassistant/components/maxcube/climate.py @@ -171,8 +171,8 @@ class MaxCubeClimate(ClimateEntity): else: return None - # Assume heating when valve is open - if valve > 0: + # Assume heating when valve is open. + if valve: return HVACAction.HEATING return HVACAction.OFF if self.hvac_mode == HVACMode.OFF else HVACAction.IDLE From 6cdbdadc244f9257db5c5379ccf3032013ec5ec7 Mon Sep 17 00:00:00 2001 From: "Teemu R." Date: Mon, 23 Dec 2024 06:38:10 +0100 Subject: [PATCH 1048/1198] Ignore devices (bravias) with 'video' service_type for songpal discovery (#133724) --- homeassistant/components/songpal/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 41cc0763642..1c13013108f 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -116,7 +116,7 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): ] # Ignore Bravia TVs - if "videoScreen" in service_types: + if "videoScreen" in service_types or "video" in service_types: return self.async_abort(reason="not_songpal_device") if TYPE_CHECKING: From ad0ee8f2d6cddaff48544b51f32d5d41df2d2781 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 08:18:23 +0100 Subject: [PATCH 1049/1198] Bump github/codeql-action from 3.27.9 to 3.28.0 (#133862) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index d3efa8ebaa3..511ec963db3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.9 + uses: github/codeql-action/init@v3.28.0 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.9 + uses: github/codeql-action/analyze@v3.28.0 with: category: "/language:python" From 4321d27ed349583b2c4e18763dd51286010c8c0f Mon Sep 17 00:00:00 2001 From: jon6fingrs <53415122+jon6fingrs@users.noreply.github.com> Date: Mon, 23 Dec 2024 02:39:43 -0500 Subject: [PATCH 1050/1198] Ensure icalendar==6.1.0 is installed for caldav integration (#133541) --- homeassistant/components/caldav/manifest.json | 2 +- requirements_all.txt | 3 +++ requirements_test_all.txt | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/caldav/manifest.json b/homeassistant/components/caldav/manifest.json index e0d598e6493..5c1334c8029 100644 --- a/homeassistant/components/caldav/manifest.json +++ b/homeassistant/components/caldav/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/caldav", "iot_class": "cloud_polling", "loggers": ["caldav", "vobject"], - "requirements": ["caldav==1.3.9"] + "requirements": ["caldav==1.3.9", "icalendar==6.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b194f249770..65d9ca63667 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1177,6 +1177,9 @@ ibmiotf==0.3.4 # homeassistant.components.local_todo ical==8.2.0 +# homeassistant.components.caldav +icalendar==6.1.0 + # homeassistant.components.ping icmplib==3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2576bdeedf7..b4d0fbf0432 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -997,6 +997,9 @@ ibeacon-ble==1.2.0 # homeassistant.components.local_todo ical==8.2.0 +# homeassistant.components.caldav +icalendar==6.1.0 + # homeassistant.components.ping icmplib==3.0 From ddb3edca5dd2dd56435d6377e6381478e9fcc75f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 21:44:01 -1000 Subject: [PATCH 1051/1198] Bump PySwitchbot to 0.55.4 (#133861) --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 3153e181af9..1b80da43e16 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.55.3"] + "requirements": ["PySwitchbot==0.55.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 65d9ca63667..661571b2cb9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.3 +PySwitchbot==0.55.4 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b4d0fbf0432..9ff8ca7c990 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.3 +PySwitchbot==0.55.4 # homeassistant.components.syncthru PySyncThru==0.7.10 From 9e1ba004d4a880916c1cd38fa79579c52ed54829 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 23 Dec 2024 09:17:52 +0100 Subject: [PATCH 1052/1198] Add translated enum entity for Fronius error code (#133394) --- homeassistant/components/fronius/const.py | 161 +++ homeassistant/components/fronius/sensor.py | 10 + homeassistant/components/fronius/strings.json | 101 ++ .../fronius/snapshots/test_sensor.ambr | 976 ++++++++++++++++++ tests/components/fronius/test_sensor.py | 18 +- 5 files changed, 1257 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 273f1acab41..e8b2fa6c2e8 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -68,6 +68,167 @@ def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption | N return _INVERTER_STATUS_CODES.get(code) # type: ignore[arg-type] +INVERTER_ERROR_CODES: Final[dict[int, str]] = { + 0: "no_error", + 102: "ac_voltage_too_high", + 103: "ac_voltage_too_low", + 105: "ac_frequency_too_high", + 106: "ac_frequency_too_low", + 107: "ac_grid_outside_permissible_limits", + 108: "stand_alone_operation_detected", + 112: "rcmu_error", + 240: "arc_detection_triggered", + 241: "arc_detection_triggered", + 242: "arc_detection_triggered", + 243: "arc_detection_triggered", + 301: "overcurrent_ac", + 302: "overcurrent_dc", + 303: "dc_module_over_temperature", + 304: "ac_module_over_temperature", + 305: "no_power_fed_in_despite_closed_relay", + 306: "pv_output_too_low_for_feeding_energy_into_the_grid", + 307: "low_pv_voltage_dc_input_voltage_too_low", + 308: "intermediate_circuit_voltage_too_high", + 309: "dc_input_voltage_mppt_1_too_high", + 311: "polarity_of_dc_strings_reversed", + 313: "dc_input_voltage_mppt_2_too_high", + 314: "current_sensor_calibration_timeout", + 315: "ac_current_sensor_error", + 316: "interrupt_check_fail", + 325: "overtemperature_in_connection_area", + 326: "fan_1_error", + 327: "fan_2_error", + 401: "no_communication_with_power_stage_set", + 406: "ac_module_temperature_sensor_faulty_l1", + 407: "ac_module_temperature_sensor_faulty_l2", + 408: "dc_component_measured_in_grid_too_high", + 412: "fixed_voltage_mode_out_of_range", + 415: "safety_cut_out_triggered", + 416: "no_communication_between_power_stage_and_control_system", + 417: "hardware_id_problem", + 419: "unique_id_conflict", + 420: "no_communication_with_hybrid_manager", + 421: "hid_range_error", + 425: "no_communication_with_power_stage_set", + 426: "possible_hardware_fault", + 427: "possible_hardware_fault", + 428: "possible_hardware_fault", + 431: "software_problem", + 436: "functional_incompatibility_between_pc_boards", + 437: "power_stage_set_problem", + 438: "functional_incompatibility_between_pc_boards", + 443: "intermediate_circuit_voltage_too_low_or_asymmetric", + 445: "compatibility_error_invalid_power_stage_configuration", + 447: "insulation_fault", + 448: "neutral_conductor_not_connected", + 450: "guard_cannot_be_found", + 451: "memory_error_detected", + 452: "communication", + 502: "insulation_error_on_solar_panels", + 509: "no_energy_fed_into_grid_past_24_hours", + 515: "no_communication_with_filter", + 516: "no_communication_with_storage_unit", + 517: "power_derating_due_to_high_temperature", + 518: "internal_dsp_malfunction", + 519: "no_communication_with_storage_unit", + 520: "no_energy_fed_by_mppt1_past_24_hours", + 522: "dc_low_string_1", + 523: "dc_low_string_2", + 558: "functional_incompatibility_between_pc_boards", + 559: "functional_incompatibility_between_pc_boards", + 560: "derating_caused_by_over_frequency", + 564: "functional_incompatibility_between_pc_boards", + 566: "arc_detector_switched_off", + 567: "grid_voltage_dependent_power_reduction_active", + 601: "can_bus_full", + 603: "ac_module_temperature_sensor_faulty_l3", + 604: "dc_module_temperature_sensor_faulty", + 607: "rcmu_error", + 608: "functional_incompatibility_between_pc_boards", + 701: "internal_processor_status", + 702: "internal_processor_status", + 703: "internal_processor_status", + 704: "internal_processor_status", + 705: "internal_processor_status", + 706: "internal_processor_status", + 707: "internal_processor_status", + 708: "internal_processor_status", + 709: "internal_processor_status", + 710: "internal_processor_status", + 711: "internal_processor_status", + 712: "internal_processor_status", + 713: "internal_processor_status", + 714: "internal_processor_status", + 715: "internal_processor_status", + 716: "internal_processor_status", + 721: "eeprom_reinitialised", + 722: "internal_processor_status", + 723: "internal_processor_status", + 724: "internal_processor_status", + 725: "internal_processor_status", + 726: "internal_processor_status", + 727: "internal_processor_status", + 728: "internal_processor_status", + 729: "internal_processor_status", + 730: "internal_processor_status", + 731: "initialisation_error_usb_flash_drive_not_supported", + 732: "initialisation_error_usb_stick_over_current", + 733: "no_usb_flash_drive_connected", + 734: "update_file_not_recognised_or_missing", + 735: "update_file_does_not_match_device", + 736: "write_or_read_error_occurred", + 737: "file_could_not_be_opened", + 738: "log_file_cannot_be_saved", + 740: "initialisation_error_file_system_error_on_usb", + 741: "error_during_logging_data_recording", + 743: "error_during_update_process", + 745: "update_file_corrupt", + 746: "error_during_update_process", + 751: "time_lost", + 752: "real_time_clock_communication_error", + 753: "real_time_clock_in_emergency_mode", + 754: "internal_processor_status", + 755: "internal_processor_status", + 757: "real_time_clock_hardware_error", + 758: "real_time_clock_in_emergency_mode", + 760: "internal_hardware_error", + 761: "internal_processor_status", + 762: "internal_processor_status", + 763: "internal_processor_status", + 764: "internal_processor_status", + 765: "internal_processor_status", + 766: "emergency_power_derating_activated", + 767: "internal_processor_status", + 768: "different_power_limitation_in_hardware_modules", + 772: "storage_unit_not_available", + 773: "software_update_invalid_country_setup", + 775: "pmc_power_stage_set_not_available", + 776: "invalid_device_type", + 781: "internal_processor_status", + 782: "internal_processor_status", + 783: "internal_processor_status", + 784: "internal_processor_status", + 785: "internal_processor_status", + 786: "internal_processor_status", + 787: "internal_processor_status", + 788: "internal_processor_status", + 789: "internal_processor_status", + 790: "internal_processor_status", + 791: "internal_processor_status", + 792: "internal_processor_status", + 793: "internal_processor_status", + 794: "internal_processor_status", + 1001: "insulation_measurement_triggered", + 1024: "inverter_settings_changed_restart_required", + 1030: "wired_shut_down_triggered", + 1036: "grid_frequency_exceeded_limit_reconnecting", + 1112: "mains_voltage_dependent_power_reduction", + 1175: "too_little_dc_power_for_feed_in_operation", + 1196: "inverter_required_setup_values_not_received", + 65000: "dc_connection_inverter_battery_interrupted", +} + + class MeterLocationCodeOption(StrEnum): """Meter location codes for Fronius meters.""" diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index 95c5df269e4..03f666ffafd 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -33,6 +33,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( DOMAIN, + INVERTER_ERROR_CODES, SOLAR_NET_DISCOVERY_NEW, InverterStatusCodeOption, MeterLocationCodeOption, @@ -205,6 +206,15 @@ INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ FroniusSensorEntityDescription( key="error_code", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + FroniusSensorEntityDescription( + key="error_message", + response_key="error_code", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.ENUM, + options=list(dict.fromkeys(INVERTER_ERROR_CODES.values())), + value_fn=INVERTER_ERROR_CODES.get, # type: ignore[arg-type] ), FroniusSensorEntityDescription( key="status_code", diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index e2740c76696..b77f6fec83c 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -73,6 +73,107 @@ "error_code": { "name": "Error code" }, + "error_message": { + "name": "Error message", + "state": { + "no_error": "No error", + "ac_voltage_too_high": "AC voltage too high", + "ac_voltage_too_low": "AC voltage too low", + "ac_frequency_too_high": "AC frequency too high", + "ac_frequency_too_low": "AC frequency too low", + "ac_grid_outside_permissible_limits": "AC grid outside the permissible limits", + "stand_alone_operation_detected": "Stand alone operation detected", + "rcmu_error": "RCMU error", + "arc_detection_triggered": "Arc detection triggered", + "overcurrent_ac": "Overcurrent (AC)", + "overcurrent_dc": "Overcurrent (DC)", + "dc_module_over_temperature": "DC module over temperature", + "ac_module_over_temperature": "AC module over temperature", + "no_power_fed_in_despite_closed_relay": "No power being fed in, despite closed relay", + "pv_output_too_low_for_feeding_energy_into_the_grid": "PV output too low for feeding energy into the grid", + "low_pv_voltage_dc_input_voltage_too_low": "Low PV voltage - DC input voltage too low for feeding energy into the grid", + "intermediate_circuit_voltage_too_high": "Intermediate circuit voltage too high", + "dc_input_voltage_mppt_1_too_high": "DC input voltage MPPT 1 too high", + "polarity_of_dc_strings_reversed": "Polarity of DC strings reversed", + "dc_input_voltage_mppt_2_too_high": "DC input voltage MPPT 2 too high", + "current_sensor_calibration_timeout": "Current sensor calibration timeout", + "ac_current_sensor_error": "AC current sensor error", + "interrupt_check_fail": "Interrupt Check fail", + "overtemperature_in_connection_area": "Overtemperature in the connection area", + "fan_1_error": "Fan 1 error", + "fan_2_error": "Fan 2 error", + "no_communication_with_power_stage_set": "No communication with the power stage set possible", + "ac_module_temperature_sensor_faulty_l1": "AC module temperature sensor faulty (L1)", + "ac_module_temperature_sensor_faulty_l2": "AC module temperature sensor faulty (L2)", + "dc_component_measured_in_grid_too_high": "DC component measured in the grid too high", + "fixed_voltage_mode_out_of_range": "Fixed voltage mode has been selected instead of MPP voltage mode and the fixed voltage has been set to too low or too high a value", + "safety_cut_out_triggered": "Safety cut out via option card or RECERBO has triggered", + "no_communication_between_power_stage_and_control_system": "No communication possible between power stage set and control system", + "hardware_id_problem": "Hardware ID problem", + "unique_id_conflict": "Unique ID conflict", + "no_communication_with_hybrid_manager": "No communication possible with the Hybrid manager", + "hid_range_error": "HID range error", + "possible_hardware_fault": "Possible hardware fault", + "software_problem": "Software problem", + "functional_incompatibility_between_pc_boards": "Functional incompatibility (one or more PC boards in the inverter are not compatible with each other, e.g. after a PC board has been replaced)", + "power_stage_set_problem": "Power stage set problem", + "intermediate_circuit_voltage_too_low_or_asymmetric": "Intermediate circuit voltage too low or asymmetric", + "compatibility_error_invalid_power_stage_configuration": "Compatibility error (e.g. due to replacement of a PC board) - invalid power stage set configuration", + "insulation_fault": "Insulation fault", + "neutral_conductor_not_connected": "Neutral conductor not connected", + "guard_cannot_be_found": "Guard cannot be found", + "memory_error_detected": "Memory error detected", + "communication": "Communication error", + "insulation_error_on_solar_panels": "Insulation error on the solar panels", + "no_energy_fed_into_grid_past_24_hours": "No energy fed into the grid in the past 24 hours", + "no_communication_with_filter": "No communication with filter possible", + "no_communication_with_storage_unit": "No communication possible with the storage unit", + "power_derating_due_to_high_temperature": "Power derating caused by too high a temperature", + "internal_dsp_malfunction": "Internal DSP malfunction", + "no_energy_fed_by_mppt1_past_24_hours": "No energy fed into the grid by MPPT1 in the past 24 hours", + "dc_low_string_1": "DC low string 1", + "dc_low_string_2": "DC low string 2", + "derating_caused_by_over_frequency": "Derating caused by over-frequency", + "arc_detector_switched_off": "Arc detector switched off (e.g. during external arc monitoring)", + "grid_voltage_dependent_power_reduction_active": "Grid Voltage Dependent Power Reduction is active", + "can_bus_full": "CAN bus is full", + "ac_module_temperature_sensor_faulty_l3": "AC module temperature sensor faulty (L3)", + "dc_module_temperature_sensor_faulty": "DC module temperature sensor faulty", + "internal_processor_status": "Warning about the internal processor status. See status code for more information", + "eeprom_reinitialised": "EEPROM has been re-initialised", + "initialisation_error_usb_flash_drive_not_supported": "Initialisation error – USB flash drive is not supported", + "initialisation_error_usb_stick_over_current": "Initialisation error – Over current on USB stick", + "no_usb_flash_drive_connected": "No USB flash drive connected", + "update_file_not_recognised_or_missing": "Update file not recognised or not present", + "update_file_does_not_match_device": "Update file does not match the device, update file too old", + "write_or_read_error_occurred": "Write or read error occurred", + "file_could_not_be_opened": "File could not be opened", + "log_file_cannot_be_saved": "Log file cannot be saved (e.g. USB flash drive is write protected or full)", + "initialisation_error_file_system_error_on_usb": "Initialisation error in file system on USB flash drive", + "error_during_logging_data_recording": "Error during recording of logging data", + "error_during_update_process": "Error occurred during update process", + "update_file_corrupt": "Update file corrupt", + "time_lost": "Time lost", + "real_time_clock_communication_error": "Real Time Clock module communication error", + "real_time_clock_in_emergency_mode": "Internal error: Real Time Clock module is in emergency mode", + "real_time_clock_hardware_error": "Hardware error in the Real Time Clock module", + "internal_hardware_error": "Internal hardware error", + "emergency_power_derating_activated": "Emergency power derating activated", + "different_power_limitation_in_hardware_modules": "Different power limitation in the hardware modules", + "storage_unit_not_available": "Storage unit not available", + "software_update_invalid_country_setup": "Software update group 0 (invalid country setup)", + "pmc_power_stage_set_not_available": "PMC power stage set not available", + "invalid_device_type": "Invalid device type", + "insulation_measurement_triggered": "Insulation measurement triggered", + "inverter_settings_changed_restart_required": "Inverter settings have been changed, inverter restart required", + "wired_shut_down_triggered": "Wired shut down triggered", + "grid_frequency_exceeded_limit_reconnecting": "The grid frequency has exceeded a limit value when reconnecting", + "mains_voltage_dependent_power_reduction": "Mains voltage-dependent power reduction", + "too_little_dc_power_for_feed_in_operation": "Too little DC power for feed-in operation", + "inverter_required_setup_values_not_received": "Inverter required setup values could not be received", + "dc_connection_inverter_battery_interrupted": "DC connection between inverter and battery interrupted" + } + }, "status_code": { "name": "Status code" }, diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr index 8f8c9d919fc..81770893273 100644 --- a/tests/components/fronius/snapshots/test_sensor.ambr +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -402,6 +402,250 @@ 'state': '0', }) # --- +# name: test_gen24[sensor.inverter_name_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '12345678-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Inverter name Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.inverter_name_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_gen24[sensor.inverter_name_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3653,6 +3897,250 @@ 'state': '0', }) # --- +# name: test_gen24_storage[sensor.gen24_storage_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '12345678-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gen24 Storage Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_gen24_storage[sensor.gen24_storage_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7022,6 +7510,250 @@ 'state': '0', }) # --- +# name: test_primo_s0[sensor.primo_3_0_1_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '234567-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 3.0-1 Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_primo_s0[sensor.primo_3_0_1_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7733,6 +8465,250 @@ 'state': '0', }) # --- +# name: test_primo_s0[sensor.primo_5_0_1_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '123456-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 5.0-1 Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_primo_s0[sensor.primo_5_0_1_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index b5d051d56ca..63f36705c8f 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -36,7 +36,7 @@ async def test_symo_inverter( mock_responses(aioclient_mock, night=True) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) assert_state("sensor.symo_20_total_energy", 44186900) @@ -49,7 +49,7 @@ async def test_symo_inverter( freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 65 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) assert_state("sensor.symo_20_energy_day", 1113) @@ -108,7 +108,7 @@ async def test_symo_meter( mock_responses(aioclient_mock) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 65 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) assert_state("sensor.smart_meter_63a_current_phase_2", 6.68) @@ -205,7 +205,7 @@ async def test_symo_power_flow( mock_responses(aioclient_mock, night=True) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) @@ -223,7 +223,7 @@ async def test_symo_power_flow( async_fire_time_changed(hass) await hass.async_block_till_done() # 54 because power_flow `rel_SelfConsumption` and `P_PV` is not `null` anymore - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 61 assert_state("sensor.solarnet_energy_day", 1101.7001) assert_state("sensor.solarnet_total_energy", 44188000) assert_state("sensor.solarnet_energy_year", 25508788) @@ -242,7 +242,7 @@ async def test_symo_power_flow( freezer.tick(FroniusPowerFlowUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 61 assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) assert_state("sensor.solarnet_energy_year", 25507686) @@ -271,7 +271,7 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) assert_state("sensor.inverter_name_total_energy", 1530193.42) @@ -313,7 +313,7 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 72 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 73 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices @@ -367,7 +367,7 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 47 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 49 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices From 8991cd4f4622fcc10c80d98316d4c2121de14094 Mon Sep 17 00:00:00 2001 From: mrtlhfr <10065880+mrtlhfr@users.noreply.github.com> Date: Mon, 23 Dec 2024 03:23:04 -0500 Subject: [PATCH 1053/1198] Adding initial support for Tuya Electric Fireplaces (#133503) --- homeassistant/components/tuya/climate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/tuya/climate.py b/homeassistant/components/tuya/climate.py index 62aa29494e9..1780256a740 100644 --- a/homeassistant/components/tuya/climate.py +++ b/homeassistant/components/tuya/climate.py @@ -77,6 +77,9 @@ CLIMATE_DESCRIPTIONS: dict[str, TuyaClimateEntityDescription] = { key="wkf", switch_only_hvac_mode=HVACMode.HEAT, ), + # Electric Fireplace + # https://developer.tuya.com/en/docs/iot/f?id=Kacpeobojffop + "dbl": TuyaClimateEntityDescription(key="dbl", switch_only_hvac_mode=HVACMode.HEAT), } From b1fe247eed570f9d32ade7eab35a2c6dcb87341d Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 23 Dec 2024 09:23:13 +0100 Subject: [PATCH 1054/1198] Upgrade QS from silver to gold for slide_local (#133863) Upgrade QS to gold --- homeassistant/components/slide_local/manifest.json | 2 +- homeassistant/components/slide_local/quality_scale.yaml | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json index 69d5c93b0af..7e524c54a25 100644 --- a/homeassistant/components/slide_local/manifest.json +++ b/homeassistant/components/slide_local/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/slide_local", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "silver", + "quality_scale": "gold", "requirements": ["goslide-api==0.7.0"], "zeroconf": [ { diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 54dfd87d98c..0bb30ee8269 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -56,7 +56,9 @@ rules: comment: | Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). discovery-update-info: done - repair-issues: todo + repair-issues: + status: exempt + comment: No issues/repairs. docs-use-cases: done docs-supported-devices: done docs-supported-functions: done From a6f631729962ef9d7a4bb0f82f30ac399d9ccfae Mon Sep 17 00:00:00 2001 From: Matrix Date: Mon, 23 Dec 2024 16:24:02 +0800 Subject: [PATCH 1055/1198] Add Leak detect entity for YoLink water meter controller (#131682) --- homeassistant/components/yolink/binary_sensor.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/homeassistant/components/yolink/binary_sensor.py b/homeassistant/components/yolink/binary_sensor.py index 07a1fb07cc0..fa4c2202b03 100644 --- a/homeassistant/components/yolink/binary_sensor.py +++ b/homeassistant/components/yolink/binary_sensor.py @@ -12,6 +12,7 @@ from yolink.const import ( ATTR_DEVICE_LEAK_SENSOR, ATTR_DEVICE_MOTION_SENSOR, ATTR_DEVICE_VIBRATION_SENSOR, + ATTR_DEVICE_WATER_METER_CONTROLLER, ) from yolink.device import YoLinkDevice @@ -44,6 +45,7 @@ SENSOR_DEVICE_TYPE = [ ATTR_DEVICE_LEAK_SENSOR, ATTR_DEVICE_VIBRATION_SENSOR, ATTR_DEVICE_CO_SMOKE_SENSOR, + ATTR_DEVICE_WATER_METER_CONTROLLER, ] @@ -84,6 +86,15 @@ SENSOR_TYPES: tuple[YoLinkBinarySensorEntityDescription, ...] = ( value=lambda state: state.get("smokeAlarm"), exists_fn=lambda device: device.device_type == ATTR_DEVICE_CO_SMOKE_SENSOR, ), + YoLinkBinarySensorEntityDescription( + key="pipe_leak_detected", + state_key="alarm", + device_class=BinarySensorDeviceClass.MOISTURE, + value=lambda state: state.get("leak") if state is not None else None, + exists_fn=lambda device: ( + device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER + ), + ), ) From 59d8c79371b4b7029e7faf64d53934c32e53e099 Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:27:53 +0100 Subject: [PATCH 1056/1198] Use user defined charge limit for charge limit range in Peblar (#133868) --- homeassistant/components/peblar/number.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index d2983438a91..1a7cec43295 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -46,7 +46,7 @@ DESCRIPTIONS = [ entity_category=EntityCategory.CONFIG, native_step=1, native_min_value=6, - native_max_value_fn=lambda x: x.system_information.hardware_max_current, + native_max_value_fn=lambda x: x.user_configuration_coordinator.data.user_defined_charge_limit_current, native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), value_fn=lambda x: round(x.ev.charge_current_limit / 1000), From 83f5ca5a303178b5ba2f08b48195c5fcd56f2c2f Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Mon, 23 Dec 2024 11:10:10 +0100 Subject: [PATCH 1057/1198] Add actions with response values to Music Assistant (#133521) Co-authored-by: Franck Nijhof Co-authored-by: OzGav Co-authored-by: Joost Lekkerkerker --- .../components/music_assistant/__init__.py | 12 + .../components/music_assistant/actions.py | 212 ++++++++++++++++++ .../components/music_assistant/const.py | 50 +++++ .../components/music_assistant/icons.json | 5 +- .../music_assistant/media_player.py | 73 ++++-- .../components/music_assistant/schemas.py | 182 +++++++++++++++ .../components/music_assistant/services.yaml | 143 ++++++++++++ .../components/music_assistant/strings.json | 111 +++++++++ tests/components/music_assistant/common.py | 3 +- .../snapshots/test_actions.ambr | 202 +++++++++++++++++ .../snapshots/test_media_player.ambr | 85 +++++++ .../music_assistant/test_actions.py | 68 ++++++ .../music_assistant/test_media_player.py | 24 ++ 13 files changed, 1155 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/music_assistant/actions.py create mode 100644 homeassistant/components/music_assistant/schemas.py create mode 100644 tests/components/music_assistant/snapshots/test_actions.ambr create mode 100644 tests/components/music_assistant/test_actions.py diff --git a/homeassistant/components/music_assistant/__init__.py b/homeassistant/components/music_assistant/__init__.py index 22de510ebe3..052f4f556c1 100644 --- a/homeassistant/components/music_assistant/__init__.py +++ b/homeassistant/components/music_assistant/__init__.py @@ -17,22 +17,28 @@ from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import ( IssueSeverity, async_create_issue, async_delete_issue, ) +from .actions import register_actions from .const import DOMAIN, LOGGER if TYPE_CHECKING: from music_assistant_models.event import MassEvent + from homeassistant.helpers.typing import ConfigType + PLATFORMS = [Platform.MEDIA_PLAYER] CONNECT_TIMEOUT = 10 LISTEN_READY_TIMEOUT = 30 +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + type MusicAssistantConfigEntry = ConfigEntry[MusicAssistantEntryData] @@ -44,6 +50,12 @@ class MusicAssistantEntryData: listen_task: asyncio.Task +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Music Assistant component.""" + register_actions(hass) + return True + + async def async_setup_entry( hass: HomeAssistant, entry: MusicAssistantConfigEntry ) -> bool: diff --git a/homeassistant/components/music_assistant/actions.py b/homeassistant/components/music_assistant/actions.py new file mode 100644 index 00000000000..f3297bf0a6f --- /dev/null +++ b/homeassistant/components/music_assistant/actions.py @@ -0,0 +1,212 @@ +"""Custom actions (previously known as services) for the Music Assistant integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from music_assistant_models.enums import MediaType +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) +from homeassistant.exceptions import ServiceValidationError +import homeassistant.helpers.config_validation as cv + +from .const import ( + ATTR_ALBUM_ARTISTS_ONLY, + ATTR_ALBUM_TYPE, + ATTR_ALBUMS, + ATTR_ARTISTS, + ATTR_CONFIG_ENTRY_ID, + ATTR_FAVORITE, + ATTR_ITEMS, + ATTR_LIBRARY_ONLY, + ATTR_LIMIT, + ATTR_MEDIA_TYPE, + ATTR_OFFSET, + ATTR_ORDER_BY, + ATTR_PLAYLISTS, + ATTR_RADIO, + ATTR_SEARCH, + ATTR_SEARCH_ALBUM, + ATTR_SEARCH_ARTIST, + ATTR_SEARCH_NAME, + ATTR_TRACKS, + DOMAIN, +) +from .schemas import ( + LIBRARY_RESULTS_SCHEMA, + SEARCH_RESULT_SCHEMA, + media_item_dict_from_mass_item, +) + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + + from . import MusicAssistantConfigEntry + +SERVICE_SEARCH = "search" +SERVICE_GET_LIBRARY = "get_library" +DEFAULT_OFFSET = 0 +DEFAULT_LIMIT = 25 +DEFAULT_SORT_ORDER = "name" + + +@callback +def get_music_assistant_client( + hass: HomeAssistant, config_entry_id: str +) -> MusicAssistantClient: + """Get the Music Assistant client for the given config entry.""" + entry: MusicAssistantConfigEntry | None + if not (entry := hass.config_entries.async_get_entry(config_entry_id)): + raise ServiceValidationError("Entry not found") + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError("Entry not loaded") + return entry.runtime_data.mass + + +@callback +def register_actions(hass: HomeAssistant) -> None: + """Register custom actions.""" + hass.services.async_register( + DOMAIN, + SERVICE_SEARCH, + handle_search, + schema=vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_SEARCH_NAME): cv.string, + vol.Optional(ATTR_MEDIA_TYPE): vol.All( + cv.ensure_list, [vol.Coerce(MediaType)] + ), + vol.Optional(ATTR_SEARCH_ARTIST): cv.string, + vol.Optional(ATTR_SEARCH_ALBUM): cv.string, + vol.Optional(ATTR_LIMIT, default=5): vol.Coerce(int), + vol.Optional(ATTR_LIBRARY_ONLY, default=False): cv.boolean, + } + ), + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( + DOMAIN, + SERVICE_GET_LIBRARY, + handle_get_library, + schema=vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Optional(ATTR_FAVORITE): cv.boolean, + vol.Optional(ATTR_SEARCH): cv.string, + vol.Optional(ATTR_LIMIT): cv.positive_int, + vol.Optional(ATTR_OFFSET): int, + vol.Optional(ATTR_ORDER_BY): cv.string, + vol.Optional(ATTR_ALBUM_TYPE): list[MediaType], + vol.Optional(ATTR_ALBUM_ARTISTS_ONLY): cv.boolean, + } + ), + supports_response=SupportsResponse.ONLY, + ) + + +async def handle_search(call: ServiceCall) -> ServiceResponse: + """Handle queue_command action.""" + mass = get_music_assistant_client(call.hass, call.data[ATTR_CONFIG_ENTRY_ID]) + search_name = call.data[ATTR_SEARCH_NAME] + search_artist = call.data.get(ATTR_SEARCH_ARTIST) + search_album = call.data.get(ATTR_SEARCH_ALBUM) + if search_album and search_artist: + search_name = f"{search_artist} - {search_album} - {search_name}" + elif search_album: + search_name = f"{search_album} - {search_name}" + elif search_artist: + search_name = f"{search_artist} - {search_name}" + search_results = await mass.music.search( + search_query=search_name, + media_types=call.data.get(ATTR_MEDIA_TYPE, MediaType.ALL), + limit=call.data[ATTR_LIMIT], + library_only=call.data[ATTR_LIBRARY_ONLY], + ) + response: ServiceResponse = SEARCH_RESULT_SCHEMA( + { + ATTR_ARTISTS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.artists + ], + ATTR_ALBUMS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.albums + ], + ATTR_TRACKS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.tracks + ], + ATTR_PLAYLISTS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.playlists + ], + ATTR_RADIO: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.radio + ], + } + ) + return response + + +async def handle_get_library(call: ServiceCall) -> ServiceResponse: + """Handle get_library action.""" + mass = get_music_assistant_client(call.hass, call.data[ATTR_CONFIG_ENTRY_ID]) + media_type = call.data[ATTR_MEDIA_TYPE] + limit = call.data.get(ATTR_LIMIT, DEFAULT_LIMIT) + offset = call.data.get(ATTR_OFFSET, DEFAULT_OFFSET) + order_by = call.data.get(ATTR_ORDER_BY, DEFAULT_SORT_ORDER) + base_params = { + "favorite": call.data.get(ATTR_FAVORITE), + "search": call.data.get(ATTR_SEARCH), + "limit": limit, + "offset": offset, + "order_by": order_by, + } + if media_type == MediaType.ALBUM: + library_result = await mass.music.get_library_albums( + **base_params, + album_types=call.data.get(ATTR_ALBUM_TYPE), + ) + elif media_type == MediaType.ARTIST: + library_result = await mass.music.get_library_artists( + **base_params, + album_artists_only=call.data.get(ATTR_ALBUM_ARTISTS_ONLY), + ) + elif media_type == MediaType.TRACK: + library_result = await mass.music.get_library_tracks( + **base_params, + ) + elif media_type == MediaType.RADIO: + library_result = await mass.music.get_library_radios( + **base_params, + ) + elif media_type == MediaType.PLAYLIST: + library_result = await mass.music.get_library_playlists( + **base_params, + ) + else: + raise ServiceValidationError(f"Unsupported media type {media_type}") + + response: ServiceResponse = LIBRARY_RESULTS_SCHEMA( + { + ATTR_ITEMS: [ + media_item_dict_from_mass_item(mass, item) for item in library_result + ], + ATTR_LIMIT: limit, + ATTR_OFFSET: offset, + ATTR_ORDER_BY: order_by, + ATTR_MEDIA_TYPE: media_type, + } + ) + return response diff --git a/homeassistant/components/music_assistant/const.py b/homeassistant/components/music_assistant/const.py index 6512f58b96c..1980c495278 100644 --- a/homeassistant/components/music_assistant/const.py +++ b/homeassistant/components/music_assistant/const.py @@ -14,5 +14,55 @@ ATTR_GROUP_PARENTS = "group_parents" ATTR_MASS_PLAYER_TYPE = "mass_player_type" ATTR_ACTIVE_QUEUE = "active_queue" ATTR_STREAM_TITLE = "stream_title" +ATTR_MEDIA_TYPE = "media_type" +ATTR_SEARCH_NAME = "name" +ATTR_SEARCH_ARTIST = "artist" +ATTR_SEARCH_ALBUM = "album" +ATTR_LIMIT = "limit" +ATTR_LIBRARY_ONLY = "library_only" +ATTR_FAVORITE = "favorite" +ATTR_SEARCH = "search" +ATTR_OFFSET = "offset" +ATTR_ORDER_BY = "order_by" +ATTR_ALBUM_TYPE = "album_type" +ATTR_ALBUM_ARTISTS_ONLY = "album_artists_only" +ATTR_CONFIG_ENTRY_ID = "config_entry_id" +ATTR_URI = "uri" +ATTR_IMAGE = "image" +ATTR_VERSION = "version" +ATTR_ARTISTS = "artists" +ATTR_ALBUMS = "albums" +ATTR_TRACKS = "tracks" +ATTR_PLAYLISTS = "playlists" +ATTR_RADIO = "radio" +ATTR_ITEMS = "items" +ATTR_RADIO_MODE = "radio_mode" +ATTR_MEDIA_ID = "media_id" +ATTR_ARTIST = "artist" +ATTR_ALBUM = "album" +ATTR_URL = "url" +ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" +ATTR_ANNOUNCE_VOLUME = "announce_volume" +ATTR_SOURCE_PLAYER = "source_player" +ATTR_AUTO_PLAY = "auto_play" +ATTR_QUEUE_ID = "queue_id" +ATTR_ACTIVE = "active" +ATTR_SHUFFLE_ENABLED = "shuffle_enabled" +ATTR_REPEAT_MODE = "repeat_mode" +ATTR_CURRENT_INDEX = "current_index" +ATTR_ELAPSED_TIME = "elapsed_time" +ATTR_CURRENT_ITEM = "current_item" +ATTR_NEXT_ITEM = "next_item" +ATTR_QUEUE_ITEM_ID = "queue_item_id" +ATTR_DURATION = "duration" +ATTR_MEDIA_ITEM = "media_item" +ATTR_STREAM_DETAILS = "stream_details" +ATTR_CONTENT_TYPE = "content_type" +ATTR_SAMPLE_RATE = "sample_rate" +ATTR_BIT_DEPTH = "bit_depth" +ATTR_STREAM_TITLE = "stream_title" +ATTR_PROVIDER = "provider" +ATTR_ITEM_ID = "item_id" + LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/music_assistant/icons.json b/homeassistant/components/music_assistant/icons.json index 7533dbb6dad..0fa64b8d273 100644 --- a/homeassistant/components/music_assistant/icons.json +++ b/homeassistant/components/music_assistant/icons.json @@ -2,6 +2,9 @@ "services": { "play_media": { "service": "mdi:play" }, "play_announcement": { "service": "mdi:bullhorn" }, - "transfer_queue": { "service": "mdi:transfer" } + "transfer_queue": { "service": "mdi:transfer" }, + "search": { "service": "mdi:magnify" }, + "get_queue": { "service": "mdi:playlist-music" }, + "get_library": { "service": "mdi:music-box-multiple" } } } diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 7004f09aad5..9aa7498a2ee 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -36,8 +36,8 @@ from homeassistant.components.media_player import ( RepeatMode, async_process_play_media_url, ) -from homeassistant.const import STATE_OFF -from homeassistant.core import HomeAssistant +from homeassistant.const import ATTR_NAME, STATE_OFF +from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv @@ -48,9 +48,33 @@ from homeassistant.helpers.entity_platform import ( from homeassistant.util.dt import utc_from_timestamp from . import MusicAssistantConfigEntry -from .const import ATTR_ACTIVE_QUEUE, ATTR_MASS_PLAYER_TYPE, DOMAIN +from .const import ( + ATTR_ACTIVE, + ATTR_ACTIVE_QUEUE, + ATTR_ALBUM, + ATTR_ANNOUNCE_VOLUME, + ATTR_ARTIST, + ATTR_AUTO_PLAY, + ATTR_CURRENT_INDEX, + ATTR_CURRENT_ITEM, + ATTR_ELAPSED_TIME, + ATTR_ITEMS, + ATTR_MASS_PLAYER_TYPE, + ATTR_MEDIA_ID, + ATTR_MEDIA_TYPE, + ATTR_NEXT_ITEM, + ATTR_QUEUE_ID, + ATTR_RADIO_MODE, + ATTR_REPEAT_MODE, + ATTR_SHUFFLE_ENABLED, + ATTR_SOURCE_PLAYER, + ATTR_URL, + ATTR_USE_PRE_ANNOUNCE, + DOMAIN, +) from .entity import MusicAssistantEntity from .media_browser import async_browse_media +from .schemas import QUEUE_DETAILS_SCHEMA, queue_item_dict_from_mass_item if TYPE_CHECKING: from music_assistant_client import MusicAssistantClient @@ -89,16 +113,7 @@ QUEUE_OPTION_MAP = { SERVICE_PLAY_MEDIA_ADVANCED = "play_media" SERVICE_PLAY_ANNOUNCEMENT = "play_announcement" SERVICE_TRANSFER_QUEUE = "transfer_queue" -ATTR_RADIO_MODE = "radio_mode" -ATTR_MEDIA_ID = "media_id" -ATTR_MEDIA_TYPE = "media_type" -ATTR_ARTIST = "artist" -ATTR_ALBUM = "album" -ATTR_URL = "url" -ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" -ATTR_ANNOUNCE_VOLUME = "announce_volume" -ATTR_SOURCE_PLAYER = "source_player" -ATTR_AUTO_PLAY = "auto_play" +SERVICE_GET_QUEUE = "get_queue" def catch_musicassistant_error[_R, **P]( @@ -179,6 +194,12 @@ async def async_setup_entry( }, "_async_handle_transfer_queue", ) + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + schema=None, + func="_async_handle_get_queue", + supports_response=SupportsResponse.ONLY, + ) class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): @@ -513,6 +534,32 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): source_queue_id, target_queue_id, auto_play ) + @catch_musicassistant_error + async def _async_handle_get_queue(self) -> ServiceResponse: + """Handle get_queue action.""" + if not self.active_queue: + raise HomeAssistantError("No active queue found") + active_queue = self.active_queue + response: ServiceResponse = QUEUE_DETAILS_SCHEMA( + { + ATTR_QUEUE_ID: active_queue.queue_id, + ATTR_ACTIVE: active_queue.active, + ATTR_NAME: active_queue.display_name, + ATTR_ITEMS: active_queue.items, + ATTR_SHUFFLE_ENABLED: active_queue.shuffle_enabled, + ATTR_REPEAT_MODE: active_queue.repeat_mode.value, + ATTR_CURRENT_INDEX: active_queue.current_index, + ATTR_ELAPSED_TIME: active_queue.corrected_elapsed_time, + ATTR_CURRENT_ITEM: queue_item_dict_from_mass_item( + self.mass, active_queue.current_item + ), + ATTR_NEXT_ITEM: queue_item_dict_from_mass_item( + self.mass, active_queue.next_item + ), + } + ) + return response + async def async_browse_media( self, media_content_type: MediaType | str | None = None, diff --git a/homeassistant/components/music_assistant/schemas.py b/homeassistant/components/music_assistant/schemas.py new file mode 100644 index 00000000000..9caae2ee0b4 --- /dev/null +++ b/homeassistant/components/music_assistant/schemas.py @@ -0,0 +1,182 @@ +"""Voluptuous schemas for Music Assistant integration service responses.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_models.enums import MediaType +import voluptuous as vol + +from homeassistant.const import ATTR_NAME +import homeassistant.helpers.config_validation as cv + +from .const import ( + ATTR_ACTIVE, + ATTR_ALBUM, + ATTR_ALBUMS, + ATTR_ARTISTS, + ATTR_BIT_DEPTH, + ATTR_CONTENT_TYPE, + ATTR_CURRENT_INDEX, + ATTR_CURRENT_ITEM, + ATTR_DURATION, + ATTR_ELAPSED_TIME, + ATTR_IMAGE, + ATTR_ITEM_ID, + ATTR_ITEMS, + ATTR_LIMIT, + ATTR_MEDIA_ITEM, + ATTR_MEDIA_TYPE, + ATTR_NEXT_ITEM, + ATTR_OFFSET, + ATTR_ORDER_BY, + ATTR_PLAYLISTS, + ATTR_PROVIDER, + ATTR_QUEUE_ID, + ATTR_QUEUE_ITEM_ID, + ATTR_RADIO, + ATTR_REPEAT_MODE, + ATTR_SAMPLE_RATE, + ATTR_SHUFFLE_ENABLED, + ATTR_STREAM_DETAILS, + ATTR_STREAM_TITLE, + ATTR_TRACKS, + ATTR_URI, + ATTR_VERSION, +) + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + from music_assistant_models.media_items import ItemMapping, MediaItemType + from music_assistant_models.queue_item import QueueItem + +MEDIA_ITEM_SCHEMA = vol.Schema( + { + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Required(ATTR_URI): cv.string, + vol.Required(ATTR_NAME): cv.string, + vol.Required(ATTR_VERSION): cv.string, + vol.Optional(ATTR_IMAGE, default=None): vol.Any(None, cv.string), + vol.Optional(ATTR_ARTISTS): [vol.Self], + vol.Optional(ATTR_ALBUM): vol.Self, + } +) + + +def media_item_dict_from_mass_item( + mass: MusicAssistantClient, + item: MediaItemType | ItemMapping | None, +) -> dict[str, Any] | None: + """Parse a Music Assistant MediaItem.""" + if not item: + return None + base = { + ATTR_MEDIA_TYPE: item.media_type, + ATTR_URI: item.uri, + ATTR_NAME: item.name, + ATTR_VERSION: item.version, + ATTR_IMAGE: mass.get_media_item_image_url(item), + } + if artists := getattr(item, "artists", None): + base[ATTR_ARTISTS] = [media_item_dict_from_mass_item(mass, x) for x in artists] + if album := getattr(item, "album", None): + base[ATTR_ALBUM] = media_item_dict_from_mass_item(mass, album) + return base + + +SEARCH_RESULT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ARTISTS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_ALBUMS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_TRACKS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_PLAYLISTS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_RADIO): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + }, +) + +LIBRARY_RESULTS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ITEMS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_LIMIT): int, + vol.Required(ATTR_OFFSET): int, + vol.Required(ATTR_ORDER_BY): str, + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + } +) + +AUDIO_FORMAT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONTENT_TYPE): str, + vol.Required(ATTR_SAMPLE_RATE): int, + vol.Required(ATTR_BIT_DEPTH): int, + vol.Required(ATTR_PROVIDER): str, + vol.Required(ATTR_ITEM_ID): str, + } +) + +QUEUE_ITEM_SCHEMA = vol.Schema( + { + vol.Required(ATTR_QUEUE_ITEM_ID): cv.string, + vol.Required(ATTR_NAME): cv.string, + vol.Optional(ATTR_DURATION, default=None): vol.Any(None, int), + vol.Optional(ATTR_MEDIA_ITEM, default=None): vol.Any( + None, vol.Schema(MEDIA_ITEM_SCHEMA) + ), + vol.Optional(ATTR_STREAM_DETAILS): vol.Schema(AUDIO_FORMAT_SCHEMA), + vol.Optional(ATTR_STREAM_TITLE, default=None): vol.Any(None, cv.string), + } +) + + +def queue_item_dict_from_mass_item( + mass: MusicAssistantClient, + item: QueueItem | None, +) -> dict[str, Any] | None: + """Parse a Music Assistant QueueItem.""" + if not item: + return None + base = { + ATTR_QUEUE_ITEM_ID: item.queue_item_id, + ATTR_NAME: item.name, + ATTR_DURATION: item.duration, + ATTR_MEDIA_ITEM: media_item_dict_from_mass_item(mass, item.media_item), + } + if streamdetails := item.streamdetails: + base[ATTR_STREAM_TITLE] = streamdetails.stream_title + base[ATTR_STREAM_DETAILS] = { + ATTR_CONTENT_TYPE: streamdetails.audio_format.content_type.value, + ATTR_SAMPLE_RATE: streamdetails.audio_format.sample_rate, + ATTR_BIT_DEPTH: streamdetails.audio_format.bit_depth, + ATTR_PROVIDER: streamdetails.provider, + ATTR_ITEM_ID: streamdetails.item_id, + } + + return base + + +QUEUE_DETAILS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_QUEUE_ID): str, + vol.Required(ATTR_ACTIVE): bool, + vol.Required(ATTR_NAME): str, + vol.Required(ATTR_ITEMS): int, + vol.Required(ATTR_SHUFFLE_ENABLED): bool, + vol.Required(ATTR_REPEAT_MODE): str, + vol.Required(ATTR_CURRENT_INDEX): vol.Any(None, int), + vol.Required(ATTR_ELAPSED_TIME): vol.Coerce(int), + vol.Required(ATTR_CURRENT_ITEM): vol.Any(None, QUEUE_ITEM_SCHEMA), + vol.Required(ATTR_NEXT_ITEM): vol.Any(None, QUEUE_ITEM_SCHEMA), + } +) diff --git a/homeassistant/components/music_assistant/services.yaml b/homeassistant/components/music_assistant/services.yaml index 00f895c4ef6..73e8e2d7521 100644 --- a/homeassistant/components/music_assistant/services.yaml +++ b/homeassistant/components/music_assistant/services.yaml @@ -88,3 +88,146 @@ transfer_queue: example: "true" selector: boolean: + +get_queue: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + +search: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: music_assistant + name: + required: true + example: "We Are The Champions" + selector: + text: + media_type: + example: "playlist" + selector: + select: + multiple: true + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + artist: + example: "Queen" + selector: + text: + album: + example: "News of the world" + selector: + text: + limit: + advanced: true + example: 25 + default: 5 + selector: + number: + min: 1 + max: 100 + step: 1 + library_only: + example: "true" + default: false + selector: + boolean: + +get_library: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: music_assistant + media_type: + required: true + example: "playlist" + selector: + select: + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + favorite: + example: "true" + default: false + selector: + boolean: + search: + example: "We Are The Champions" + selector: + text: + limit: + advanced: true + example: 25 + default: 25 + selector: + number: + min: 1 + max: 500 + step: 1 + offset: + advanced: true + example: 25 + default: 0 + selector: + number: + min: 1 + max: 1000000 + step: 1 + order_by: + example: "random" + selector: + select: + translation_key: order_by + options: + - name + - name_desc + - sort_name + - sort_name_desc + - timestamp_added + - timestamp_added_desc + - last_played + - last_played_desc + - play_count + - play_count_desc + - year + - year_desc + - position + - position_desc + - artist_name + - artist_name_desc + - random + - random_play_count + album_type: + example: "single" + selector: + select: + multiple: true + translation_key: album_type + options: + - album + - single + - compilation + - ep + - unknown + album_artists_only: + example: "true" + default: false + selector: + boolean: diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json index cce7f9607c2..af366c94310 100644 --- a/homeassistant/components/music_assistant/strings.json +++ b/homeassistant/components/music_assistant/strings.json @@ -99,6 +99,86 @@ "description": "Start playing the queue on the target player. Omit to use the default behavior." } } + }, + "get_queue": { + "name": "Get playerQueue details (advanced)", + "description": "Get the details of the currently active queue of a Music Assistant player." + }, + "search": { + "name": "Search Music Assistant", + "description": "Perform a global search on the Music Assistant library and all providers.", + "fields": { + "config_entry_id": { + "name": "Music Assistant instance", + "description": "Select the Music Assistant instance to perform the search on." + }, + "name": { + "name": "Search name", + "description": "The name/title to search for." + }, + "media_type": { + "name": "Media type(s)", + "description": "The type of the content to search. Such as artist, album, track, radio, or playlist. All types if omitted." + }, + "artist": { + "name": "Artist name", + "description": "When specifying a track or album name in the name field, you can optionally restrict results by this artist name." + }, + "album": { + "name": "Album name", + "description": "When specifying a track name in the name field, you can optionally restrict results by this album name." + }, + "limit": { + "name": "Limit", + "description": "Maximum number of items to return (per media type)." + }, + "library_only": { + "name": "Only library items", + "description": "Only include results that are in the library." + } + } + }, + "get_library": { + "name": "Get Library items", + "description": "Get items from a Music Assistant library.", + "fields": { + "config_entry_id": { + "name": "[%key:component::music_assistant::services::search::fields::config_entry_id::name%]", + "description": "[%key:component::music_assistant::services::search::fields::config_entry_id::description%]" + }, + "media_type": { + "name": "Media type", + "description": "The media type for which to request details for." + }, + "favorite": { + "name": "Favorites only", + "description": "Filter items so only favorites items are returned." + }, + "search": { + "name": "Search", + "description": "Optional search string to search through this library." + }, + "limit": { + "name": "Limit", + "description": "Maximum number of items to return." + }, + "offset": { + "name": "Offset", + "description": "Offset to start the list from." + }, + "order_by": { + "name": "Order By", + "description": "Sort the list by this field." + }, + "album_type": { + "name": "Album type filter (albums library only)", + "description": "Filter albums by type." + }, + "album_artists_only": { + "name": "Enable album artists filter (only for artist library)", + "description": "Only return Album Artists when listing the Artists library items." + } + } } }, "selector": { @@ -119,6 +199,37 @@ "playlist": "Playlist", "radio": "Radio" } + }, + "order_by": { + "options": { + "name": "Name", + "name_desc": "Name (desc)", + "sort_name": "Sort name", + "sort_name_desc": "Sort name (desc)", + "timestamp_added": "Added", + "timestamp_added_desc": "Added (desc)", + "last_played": "Last played", + "last_played_desc": "Last played (desc)", + "play_count": "Play count", + "play_count_desc": "Play count (desc)", + "year": "Year", + "year_desc": "Year (desc)", + "position": "Position", + "position_desc": "Position (desc)", + "artist_name": "Artist name", + "artist_name_desc": "Artist name (desc)", + "random": "Random", + "random_play_count": "Random + least played" + } + }, + "album_type": { + "options": { + "album": "Album", + "single": "Single", + "ep": "EP", + "compilation": "Compilation", + "unknown": "Unknown" + } } } } diff --git a/tests/components/music_assistant/common.py b/tests/components/music_assistant/common.py index c8293b5622f..7c0f9df751a 100644 --- a/tests/components/music_assistant/common.py +++ b/tests/components/music_assistant/common.py @@ -30,7 +30,7 @@ def load_and_parse_fixture(fixture: str) -> dict[str, Any]: async def setup_integration_from_fixtures( hass: HomeAssistant, music_assistant_client: MagicMock, -) -> None: +) -> MockConfigEntry: """Set up MusicAssistant integration with fixture data.""" players = create_players_from_fixture() music_assistant_client.players._players = {x.player_id: x for x in players} @@ -65,6 +65,7 @@ async def setup_integration_from_fixtures( config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + return config_entry def create_players_from_fixture() -> list[Player]: diff --git a/tests/components/music_assistant/snapshots/test_actions.ambr b/tests/components/music_assistant/snapshots/test_actions.ambr new file mode 100644 index 00000000000..6c30ffc512c --- /dev/null +++ b/tests/components/music_assistant/snapshots/test_actions.ambr @@ -0,0 +1,202 @@ +# serializer version: 1 +# name: test_get_library_action + dict({ + 'items': list([ + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Traveller', + 'uri': 'library://album/463', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Chris Stapleton', + 'uri': 'library://artist/433', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Tennessee Whiskey', + 'uri': 'library://track/456', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Thelma + Louise', + 'uri': 'library://album/471', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Thelma + Louise', + 'uri': 'library://track/467', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'HIStory - PAST, PRESENT AND FUTURE - BOOK I', + 'uri': 'library://album/486', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Michael Jackson', + 'uri': 'library://artist/30', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': "They Don't Care About Us", + 'uri': 'library://track/485', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Better Dayz', + 'uri': 'library://album/487', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': '2Pac', + 'uri': 'library://artist/159', + 'version': '', + }), + dict({ + 'image': None, + 'media_type': , + 'name': 'The Outlawz', + 'uri': 'library://artist/451', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': "They Don't Give A F**** About Us", + 'uri': 'library://track/486', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Things We Lost In The Fire', + 'uri': 'library://album/488', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Things We Lost In The Fire', + 'uri': 'library://track/487', + 'version': 'TORN Remix', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Doom Days', + 'uri': 'library://album/489', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Those Nights', + 'uri': 'library://track/488', + 'version': '', + }), + ]), + 'limit': 25, + 'media_type': , + 'offset': 0, + 'order_by': 'name', + }) +# --- +# name: test_search_action + dict({ + 'albums': list([ + dict({ + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'A Space Love Adventure', + 'uri': 'library://artist/289', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Synth Punk EP', + 'uri': 'library://album/396', + 'version': '', + }), + dict({ + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Various Artists', + 'uri': 'library://artist/96', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Synthwave (The 80S Revival)', + 'uri': 'library://album/95', + 'version': 'The 80S Revival', + }), + ]), + 'artists': list([ + ]), + 'playlists': list([ + ]), + 'radio': list([ + ]), + 'tracks': list([ + ]), + }) +# --- diff --git a/tests/components/music_assistant/snapshots/test_media_player.ambr b/tests/components/music_assistant/snapshots/test_media_player.ambr index e3d7a4a0cbc..6c5389dbd6a 100644 --- a/tests/components/music_assistant/snapshots/test_media_player.ambr +++ b/tests/components/music_assistant/snapshots/test_media_player.ambr @@ -188,3 +188,88 @@ 'state': 'off', }) # --- +# name: test_media_player_get_queue_action + dict({ + 'media_player.test_group_player_1': dict({ + 'active': True, + 'current_index': 26, + 'current_item': dict({ + 'duration': 536, + 'media_item': dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Use Your Illusion I', + 'uri': 'spotify://album/0CxPbTRARqKUYighiEY9Sz', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': "Guns N' Roses", + 'uri': 'spotify://artist/3qm84nBOXUEQ2vnTfUTTFC', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'November Rain', + 'uri': 'spotify://track/3YRCqOhFifThpSRFJ1VWFM', + 'version': '', + }), + 'name': "Guns N' Roses - November Rain", + 'queue_item_id': '5d95dc5be77e4f7eb4939f62cfef527b', + 'stream_details': dict({ + 'bit_depth': 16, + 'content_type': 'ogg', + 'item_id': '3YRCqOhFifThpSRFJ1VWFM', + 'provider': 'spotify', + 'sample_rate': 44100, + }), + 'stream_title': None, + }), + 'items': 1094, + 'name': 'Test Group Player 1', + 'next_item': dict({ + 'duration': 207, + 'media_item': dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'La Folie', + 'uri': 'qobuz://album/0724353468859', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'The Stranglers', + 'uri': 'qobuz://artist/26779', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Golden Brown', + 'uri': 'qobuz://track/1004735', + 'version': '', + }), + 'name': 'The Stranglers - Golden Brown', + 'queue_item_id': '990ae8f29cdf4fb588d679b115621f55', + 'stream_details': dict({ + 'bit_depth': 16, + 'content_type': 'flac', + 'item_id': '1004735', + 'provider': 'qobuz', + 'sample_rate': 44100, + }), + 'stream_title': None, + }), + 'queue_id': 'test_group_player_1', + 'repeat_mode': 'all', + 'shuffle_enabled': True, + }), + }) +# --- diff --git a/tests/components/music_assistant/test_actions.py b/tests/components/music_assistant/test_actions.py new file mode 100644 index 00000000000..4d3917091c1 --- /dev/null +++ b/tests/components/music_assistant/test_actions.py @@ -0,0 +1,68 @@ +"""Test Music Assistant actions.""" + +from unittest.mock import AsyncMock, MagicMock + +from music_assistant_models.media_items import SearchResults +from syrupy import SnapshotAssertion + +from homeassistant.components.music_assistant.actions import ( + SERVICE_GET_LIBRARY, + SERVICE_SEARCH, +) +from homeassistant.components.music_assistant.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_FAVORITE, + ATTR_MEDIA_TYPE, + ATTR_SEARCH_NAME, + DOMAIN as MASS_DOMAIN, +) +from homeassistant.core import HomeAssistant + +from .common import create_library_albums_from_fixture, setup_integration_from_fixtures + + +async def test_search_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test music assistant search action.""" + entry = await setup_integration_from_fixtures(hass, music_assistant_client) + + music_assistant_client.music.search = AsyncMock( + return_value=SearchResults( + albums=create_library_albums_from_fixture(), + ) + ) + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_SEARCH, + { + ATTR_CONFIG_ENTRY_ID: entry.entry_id, + ATTR_SEARCH_NAME: "test", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_get_library_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test music assistant get_library action.""" + entry = await setup_integration_from_fixtures(hass, music_assistant_client) + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_GET_LIBRARY, + { + ATTR_CONFIG_ENTRY_ID: entry.entry_id, + ATTR_FAVORITE: False, + ATTR_MEDIA_TYPE: "track", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py index 13716b6a479..25dfcd22c72 100644 --- a/tests/components/music_assistant/test_media_player.py +++ b/tests/components/music_assistant/test_media_player.py @@ -6,6 +6,7 @@ from music_assistant_models.enums import MediaType, QueueOption from music_assistant_models.media_items import Track import pytest from syrupy import SnapshotAssertion +from syrupy.filters import paths from homeassistant.components.media_player import ( ATTR_GROUP_MEMBERS, @@ -32,6 +33,7 @@ from homeassistant.components.music_assistant.media_player import ( ATTR_SOURCE_PLAYER, ATTR_URL, ATTR_USE_PRE_ANNOUNCE, + SERVICE_GET_QUEUE, SERVICE_PLAY_ANNOUNCEMENT, SERVICE_PLAY_MEDIA_ADVANCED, SERVICE_TRANSFER_QUEUE, @@ -583,3 +585,25 @@ async def test_media_player_transfer_queue_action( auto_play=None, require_schema=25, ) + + +async def test_media_player_get_queue_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test media_player get_queue action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_group_player_1" + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + return_response=True, + ) + # no call is made, this info comes from the cached queue data + assert music_assistant_client.send_command.call_count == 0 + assert response == snapshot(exclude=paths(f"{entity_id}.elapsed_time")) From ed7da35de4de633c5cdf1578e143dbaf9c06b492 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 23 Dec 2024 11:11:25 +0100 Subject: [PATCH 1058/1198] Add coordinator error handling for Peblar Rocksolid EV Chargers (#133809) --- .../components/peblar/coordinator.py | 84 +++++++++---- tests/components/peblar/test_coordinator.py | 119 ++++++++++++++++++ 2 files changed, 182 insertions(+), 21 deletions(-) create mode 100644 tests/components/peblar/test_coordinator.py diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 4afc544cc1d..398788f1f9f 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -2,12 +2,16 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import timedelta +from typing import Any, Concatenate from peblar import ( Peblar, PeblarApi, + PeblarAuthenticationError, + PeblarConnectionError, PeblarError, PeblarEVInterface, PeblarMeter, @@ -16,12 +20,13 @@ from peblar import ( PeblarVersions, ) -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from tests.components.peblar.conftest import PeblarSystemInformation -from .const import LOGGER +from .const import DOMAIN, LOGGER @dataclass(kw_only=True) @@ -59,6 +64,49 @@ class PeblarData: system: PeblarSystem +def _coordinator_exception_handler[ + _DataUpdateCoordinatorT: PeblarDataUpdateCoordinator + | PeblarVersionDataUpdateCoordinator + | PeblarUserConfigurationDataUpdateCoordinator, + **_P, +]( + func: Callable[Concatenate[_DataUpdateCoordinatorT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_DataUpdateCoordinatorT, _P], Coroutine[Any, Any, Any]]: + """Handle exceptions within the update handler of a coordinator.""" + + async def handler( + self: _DataUpdateCoordinatorT, *args: _P.args, **kwargs: _P.kwargs + ) -> Any: + try: + return await func(self, *args, **kwargs) + except PeblarAuthenticationError as error: + if self.config_entry and self.config_entry.state is ConfigEntryState.LOADED: + # This is not the first refresh, so let's reload + # the config entry to ensure we trigger a re-authentication + # flow (or recover in case of API token changes). + self.hass.config_entries.async_schedule_reload( + self.config_entry.entry_id + ) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + except PeblarConnectionError as error: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(error)}, + ) from error + except PeblarError as error: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unknown_error", + translation_placeholders={"error": str(error)}, + ) from error + + return handler + + class PeblarVersionDataUpdateCoordinator( DataUpdateCoordinator[PeblarVersionInformation] ): @@ -77,15 +125,13 @@ class PeblarVersionDataUpdateCoordinator( update_interval=timedelta(hours=2), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarVersionInformation: """Fetch data from the Peblar device.""" - try: - return PeblarVersionInformation( - current=await self.peblar.current_versions(), - available=await self.peblar.available_versions(), - ) - except PeblarError as err: - raise UpdateFailed(err) from err + return PeblarVersionInformation( + current=await self.peblar.current_versions(), + available=await self.peblar.available_versions(), + ) class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): @@ -104,16 +150,14 @@ class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): update_interval=timedelta(seconds=10), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarData: """Fetch data from the Peblar device.""" - try: - return PeblarData( - ev=await self.api.ev_interface(), - meter=await self.api.meter(), - system=await self.api.system(), - ) - except PeblarError as err: - raise UpdateFailed(err) from err + return PeblarData( + ev=await self.api.ev_interface(), + meter=await self.api.meter(), + system=await self.api.system(), + ) class PeblarUserConfigurationDataUpdateCoordinator( @@ -134,9 +178,7 @@ class PeblarUserConfigurationDataUpdateCoordinator( update_interval=timedelta(minutes=5), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarUserConfiguration: """Fetch data from the Peblar device.""" - try: - return await self.peblar.user_configuration() - except PeblarError as err: - raise UpdateFailed(err) from err + return await self.peblar.user_configuration() diff --git a/tests/components/peblar/test_coordinator.py b/tests/components/peblar/test_coordinator.py new file mode 100644 index 00000000000..f438d807920 --- /dev/null +++ b/tests/components/peblar/test_coordinator.py @@ -0,0 +1,119 @@ +"""Tests for the Peblar coordinators.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True), + pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration"), +] + + +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + "An error occurred while communicating with the Peblar device: " + "Could not connect" + ), + ), + ( + PeblarError("Unknown error"), + ( + "An unknown error occurred while communicating " + "with the Peblar device: Unknown error" + ), + ), + ], +) +async def test_coordinator_error_handler( + hass: HomeAssistant, + mock_peblar: MagicMock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test the coordinators.""" + entity_id = "sensor.peblar_ev_charger_power" + + # Ensure we are set up and the coordinator is working. + # Confirming this through a sensor entity, that is available. + assert (state := hass.states.get(entity_id)) + assert state.state != STATE_UNAVAILABLE + + # Mock an error in the coordinator. + mock_peblar.rest_api.return_value.meter.side_effect = error + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now unavailable. + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE + + # Ensure the error is logged + assert log_message in caplog.text + + # Recover + mock_peblar.rest_api.return_value.meter.side_effect = None + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now available. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state != STATE_UNAVAILABLE + + +async def test_coordinator_error_handler_authentication_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the coordinator error handler with an authentication error.""" + + # Ensure the sensor entity is now available. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state != STATE_UNAVAILABLE + + # Mock an authentication in the coordinator + mock_peblar.rest_api.return_value.meter.side_effect = PeblarAuthenticationError( + "Authentication error" + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now unavailable. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state == STATE_UNAVAILABLE + + # Ensure we have triggered a reauthentication flow + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From b2170ad73276e4844bcfa83f97d207e5c196a105 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 23 Dec 2024 11:23:26 +0100 Subject: [PATCH 1059/1198] Mark Peblar Rocksolid EV Chargers Platinum (#133823) --- homeassistant/components/peblar/manifest.json | 2 +- .../components/peblar/quality_scale.yaml | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 0e3a66dd256..ab5572e66d0 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/peblar", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "bronze", + "quality_scale": "platinum", "requirements": ["peblar==0.3.0"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 9de0031373f..91f9bb7af55 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -28,7 +28,7 @@ rules: unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: status: exempt @@ -40,19 +40,19 @@ rules: log-when-unavailable: done parallel-updates: done reauthentication-flow: done - test-coverage: todo + test-coverage: done # Gold devices: done diagnostics: done discovery-update-info: done discovery: done - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-troubleshooting: todo - docs-use-cases: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: | From e3cf5c47b220476301463b77f9a86540b9c25efd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:28:20 +0100 Subject: [PATCH 1060/1198] Add compatibility code for deprecated WaterHeaterEntityEntityDescription (#133351) --- homeassistant/components/water_heater/__init__.py | 8 ++++++++ tests/components/water_heater/test_init.py | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index cac0a365f74..60be340a253 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -25,6 +25,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.deprecation import deprecated_class from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp @@ -133,6 +134,13 @@ class WaterHeaterEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes water heater entities.""" +@deprecated_class("WaterHeaterEntityDescription", breaks_in_ha_version="2026.1") +class WaterHeaterEntityEntityDescription( + WaterHeaterEntityDescription, frozen_or_thawed=True +): + """A (deprecated) class that describes water heater entities.""" + + CACHED_PROPERTIES_WITH_ATTR_ = { "temperature_unit", "current_operation", diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 78efd94ef8e..09a0a711582 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -13,6 +13,8 @@ from homeassistant.components.water_heater import ( SERVICE_SET_OPERATION_MODE, SET_TEMPERATURE_SCHEMA, WaterHeaterEntity, + WaterHeaterEntityDescription, + WaterHeaterEntityEntityDescription, WaterHeaterEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -204,3 +206,15 @@ async def test_operation_mode_validation( ) await hass.async_block_till_done() water_heater_entity.set_operation_mode.assert_has_calls([mock.call("eco")]) + + +@pytest.mark.parametrize( + ("class_name", "expected_log"), + [(WaterHeaterEntityDescription, False), (WaterHeaterEntityEntityDescription, True)], +) +async def test_deprecated_entity_description( + caplog: pytest.LogCaptureFixture, class_name: type, expected_log: bool +) -> None: + """Test deprecated WaterHeaterEntityEntityDescription logs warning.""" + class_name(key="test") + assert ("is a deprecated class" in caplog.text) is expected_log From 939365887f7e845082793f79d865973f9b161b54 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 23 Dec 2024 11:35:37 +0100 Subject: [PATCH 1061/1198] Add coordinator to Twinkly (#133793) --- homeassistant/components/twinkly/__init__.py | 29 +-- .../components/twinkly/coordinator.py | 102 ++++++++ .../components/twinkly/diagnostics.py | 4 +- homeassistant/components/twinkly/light.py | 232 +++++------------- tests/components/twinkly/conftest.py | 8 +- .../twinkly/fixtures/get_saved_movies.json | 16 +- .../twinkly/snapshots/test_diagnostics.ambr | 4 +- .../twinkly/snapshots/test_light.ambr | 6 +- tests/components/twinkly/test_light.py | 28 +++ 9 files changed, 222 insertions(+), 207 deletions(-) create mode 100644 homeassistant/components/twinkly/coordinator.py diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index cd76a79e1d7..aaad731d264 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,8 +1,6 @@ """The twinkly component.""" -from dataclasses import dataclass import logging -from typing import Any from aiohttp import ClientError from ttls.client import Twinkly @@ -10,27 +8,18 @@ from ttls.client import Twinkly from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION, DOMAIN +from .const import DOMAIN +from .coordinator import TwinklyCoordinator PLATFORMS = [Platform.LIGHT] _LOGGER = logging.getLogger(__name__) -@dataclass -class TwinklyData: - """Data for Twinkly integration.""" - - client: Twinkly - device_info: dict[str, Any] - sw_version: str | None - - -type TwinklyConfigEntry = ConfigEntry[TwinklyData] +type TwinklyConfigEntry = ConfigEntry[TwinklyCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: @@ -41,15 +30,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> b client = Twinkly(host, async_get_clientsession(hass)) - try: - device_info = await client.get_details() - software_version = await client.get_firmware_version() - except (TimeoutError, ClientError) as exception: - raise ConfigEntryNotReady from exception + coordinator = TwinklyCoordinator(hass, client) - entry.runtime_data = TwinklyData( - client, device_info, software_version.get(ATTR_VERSION) - ) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/twinkly/coordinator.py b/homeassistant/components/twinkly/coordinator.py new file mode 100644 index 00000000000..8a5e3e087ae --- /dev/null +++ b/homeassistant/components/twinkly/coordinator.py @@ -0,0 +1,102 @@ +"""Coordinator for Twinkly.""" + +from dataclasses import dataclass +from datetime import timedelta +import logging +from typing import Any + +from aiohttp import ClientError +from awesomeversion import AwesomeVersion +from ttls.client import Twinkly, TwinklyError + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEV_NAME, DOMAIN, MIN_EFFECT_VERSION + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class TwinklyData: + """Class for Twinkly data.""" + + device_info: dict[str, Any] + brightness: int + is_on: bool + movies: dict[int, str] + current_movie: int | None + + +class TwinklyCoordinator(DataUpdateCoordinator[TwinklyData]): + """Class to manage fetching Twinkly data from API.""" + + software_version: str + supports_effects: bool + device_name: str + + def __init__(self, hass: HomeAssistant, client: Twinkly) -> None: + """Initialize global Twinkly data updater.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=30), + ) + self.client = client + + async def _async_setup(self) -> None: + """Set up the Twinkly data.""" + try: + software_version = await self.client.get_firmware_version() + self.device_name = (await self.client.get_details())[DEV_NAME] + except (TimeoutError, ClientError) as exception: + raise UpdateFailed from exception + self.software_version = software_version["version"] + self.supports_effects = AwesomeVersion(self.software_version) >= AwesomeVersion( + MIN_EFFECT_VERSION + ) + + async def _async_update_data(self) -> TwinklyData: + """Fetch data from Twinkly.""" + movies: list[dict[str, Any]] = [] + current_movie: dict[str, Any] = {} + try: + device_info = await self.client.get_details() + brightness = await self.client.get_brightness() + is_on = await self.client.is_on() + if self.supports_effects: + movies = (await self.client.get_saved_movies())["movies"] + except (TimeoutError, ClientError) as exception: + raise UpdateFailed from exception + if self.supports_effects: + try: + current_movie = await self.client.get_current_movie() + except (TwinklyError, TimeoutError, ClientError) as exception: + _LOGGER.debug("Error fetching current movie: %s", exception) + brightness = ( + int(brightness["value"]) if brightness["mode"] == "enabled" else 100 + ) + brightness = int(round(brightness * 2.55)) if is_on else 0 + if self.device_name != device_info[DEV_NAME]: + self._async_update_device_info(device_info[DEV_NAME]) + return TwinklyData( + device_info, + brightness, + is_on, + {movie["id"]: movie["name"] for movie in movies}, + current_movie.get("id"), + ) + + def _async_update_device_info(self, name: str) -> None: + """Update the device info.""" + device_registry = dr.async_get(self.hass) + device = device_registry.async_get_device( + identifiers={(DOMAIN, self.data.device_info["mac"])}, + ) + if device: + device_registry.async_update_device( + device.id, + name=name, + ) diff --git a/homeassistant/components/twinkly/diagnostics.py b/homeassistant/components/twinkly/diagnostics.py index 9ddc65cf255..d732ce14929 100644 --- a/homeassistant/components/twinkly/diagnostics.py +++ b/homeassistant/components/twinkly/diagnostics.py @@ -34,8 +34,8 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "device_info": entry.runtime_data.device_info, - ATTR_SW_VERSION: entry.runtime_data.sw_version, + "device_info": entry.runtime_data.data.device_info, + ATTR_SW_VERSION: entry.runtime_data.software_version, "attributes": attributes, }, TO_REDACT, diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 7de07db3b30..1dfd6c1df30 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -5,9 +5,6 @@ from __future__ import annotations import logging from typing import Any -from aiohttp import ClientError -from awesomeversion import AwesomeVersion - from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_EFFECT, @@ -17,13 +14,12 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwinklyConfigEntry +from . import TwinklyConfigEntry, TwinklyCoordinator from .const import ( DEV_LED_PROFILE, DEV_MODEL, @@ -31,7 +27,6 @@ from .const import ( DEV_PROFILE_RGB, DEV_PROFILE_RGBW, DOMAIN, - MIN_EFFECT_VERSION, ) _LOGGER = logging.getLogger(__name__) @@ -43,26 +38,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Setups an entity from a config entry (UI config flow).""" - entity = TwinklyLight(config_entry) + entity = TwinklyLight(config_entry.runtime_data) async_add_entities([entity], update_before_add=True) -class TwinklyLight(LightEntity): +class TwinklyLight(CoordinatorEntity[TwinklyCoordinator], LightEntity): """Implementation of the light for the Twinkly service.""" _attr_has_entity_name = True _attr_name = None _attr_translation_key = "light" - def __init__( - self, - entry: TwinklyConfigEntry, - ) -> None: + def __init__(self, coordinator: TwinklyCoordinator) -> None: """Initialize a TwinklyLight entity.""" - device_info = entry.runtime_data.device_info - self._attr_unique_id: str = device_info["mac"] - self._conf = entry + super().__init__(coordinator) + device_info = coordinator.data.device_info + self._attr_unique_id = mac = device_info["mac"] if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: self._attr_supported_color_modes = {ColorMode.RGBW} @@ -75,66 +67,35 @@ class TwinklyLight(LightEntity): else: self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} self._attr_color_mode = ColorMode.BRIGHTNESS - - # Those are saved in the config entry in order to have meaningful values even - # if the device is currently offline. - # They are expected to be updated using the device_info. - self._name = entry.data[CONF_NAME] or "Twinkly light" - self._model = entry.data[CONF_MODEL] - self._mac = device_info["mac"] - - self._client = entry.runtime_data.client - - # Set default state before any update - self._attr_is_on = False - self._attr_available = False - self._current_movie: dict[Any, Any] = {} - self._movies: list[Any] = [] - self._software_version = entry.runtime_data.sw_version - # We guess that most devices are "new" and support effects - self._attr_supported_features = LightEntityFeature.EFFECT - - @property - def device_info(self) -> DeviceInfo | None: - """Get device specific attributes.""" - return DeviceInfo( - identifiers={(DOMAIN, self._mac)}, - connections={(CONNECTION_NETWORK_MAC, self._mac)}, + self.client = coordinator.client + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, mac)}, + connections={(CONNECTION_NETWORK_MAC, mac)}, manufacturer="LEDWORKS", - model=self._model, - name=self._name, - sw_version=self._software_version, + model=device_info[DEV_MODEL], + name=device_info[DEV_NAME], + sw_version=coordinator.software_version, ) + if coordinator.supports_effects: + self._attr_supported_features = LightEntityFeature.EFFECT + self._update_attr() @property def effect(self) -> str | None: """Return the current effect.""" - if "name" in self._current_movie: - return f"{self._current_movie['id']} {self._current_movie['name']}" + if (current_movie_id := self.coordinator.data.current_movie) is not None: + return ( + f"{current_movie_id} {self.coordinator.data.movies[current_movie_id]}" + ) return None @property def effect_list(self) -> list[str]: """Return the list of saved effects.""" - return [f"{movie['id']} {movie['name']}" for movie in self._movies] - - async def async_added_to_hass(self) -> None: - """Device is added to hass.""" - if self._software_version: - if AwesomeVersion(self._software_version) < AwesomeVersion( - MIN_EFFECT_VERSION - ): - self._attr_supported_features = ( - self.supported_features & ~LightEntityFeature.EFFECT - ) - device_registry = dr.async_get(self.hass) - device_entry = device_registry.async_get_device( - {(DOMAIN, self._attr_unique_id)}, set() - ) - if device_entry: - device_registry.async_update_device( - device_entry.id, sw_version=self._software_version - ) + return [ + f"{identifier} {name}" + for identifier, name in self.coordinator.data.movies.items() + ] async def async_turn_on(self, **kwargs: Any) -> None: """Turn device on.""" @@ -144,29 +105,29 @@ class TwinklyLight(LightEntity): # If brightness is 0, the twinkly will only "disable" the brightness, # which means that it will be 100%. if brightness == 0: - await self._client.turn_off() + await self.client.turn_off() return - await self._client.set_brightness(brightness) + await self.client.set_brightness(brightness) if ( ATTR_RGBW_COLOR in kwargs and kwargs[ATTR_RGBW_COLOR] != self._attr_rgbw_color ): - await self._client.interview() + await self.client.interview() if LightEntityFeature.EFFECT & self.supported_features: # Static color only supports rgb - await self._client.set_static_colour( + await self.client.set_static_colour( ( kwargs[ATTR_RGBW_COLOR][0], kwargs[ATTR_RGBW_COLOR][1], kwargs[ATTR_RGBW_COLOR][2], ) ) - await self._client.set_mode("color") - self._client.default_mode = "color" + await self.client.set_mode("color") + self.client.default_mode = "color" else: - await self._client.set_cycle_colours( + await self.client.set_cycle_colours( ( kwargs[ATTR_RGBW_COLOR][3], kwargs[ATTR_RGBW_COLOR][0], @@ -174,20 +135,20 @@ class TwinklyLight(LightEntity): kwargs[ATTR_RGBW_COLOR][2], ) ) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.set_mode("movie") + self.client.default_mode = "movie" self._attr_rgbw_color = kwargs[ATTR_RGBW_COLOR] if ATTR_RGB_COLOR in kwargs and kwargs[ATTR_RGB_COLOR] != self._attr_rgb_color: - await self._client.interview() + await self.client.interview() if LightEntityFeature.EFFECT & self.supported_features: - await self._client.set_static_colour(kwargs[ATTR_RGB_COLOR]) - await self._client.set_mode("color") - self._client.default_mode = "color" + await self.client.set_static_colour(kwargs[ATTR_RGB_COLOR]) + await self.client.set_mode("color") + self.client.default_mode = "color" else: - await self._client.set_cycle_colours(kwargs[ATTR_RGB_COLOR]) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.set_cycle_colours(kwargs[ATTR_RGB_COLOR]) + await self.client.set_mode("movie") + self.client.default_mode = "movie" self._attr_rgb_color = kwargs[ATTR_RGB_COLOR] @@ -196,100 +157,29 @@ class TwinklyLight(LightEntity): and LightEntityFeature.EFFECT & self.supported_features ): movie_id = kwargs[ATTR_EFFECT].split(" ")[0] - if "id" not in self._current_movie or int(movie_id) != int( - self._current_movie["id"] + if ( + self.coordinator.data.current_movie is None + or int(movie_id) != self.coordinator.data.current_movie ): - await self._client.interview() - await self._client.set_current_movie(int(movie_id)) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.interview() + await self.client.set_current_movie(int(movie_id)) + await self.client.set_mode("movie") + self.client.default_mode = "movie" if not self._attr_is_on: - await self._client.turn_on() + await self.client.turn_on() + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn device off.""" - await self._client.turn_off() + await self.client.turn_off() + await self.coordinator.async_refresh() - async def async_update(self) -> None: - """Asynchronously updates the device properties.""" - _LOGGER.debug("Updating '%s'", self._client.host) + def _update_attr(self) -> None: + """Update the entity attributes.""" + self._attr_is_on = self.coordinator.data.is_on + self._attr_brightness = self.coordinator.data.brightness - try: - self._attr_is_on = await self._client.is_on() - - brightness = await self._client.get_brightness() - brightness_value = ( - int(brightness["value"]) if brightness["mode"] == "enabled" else 100 - ) - - self._attr_brightness = ( - int(round(brightness_value * 2.55)) if self._attr_is_on else 0 - ) - - device_info = await self._client.get_details() - - if ( - DEV_NAME in device_info - and DEV_MODEL in device_info - and ( - device_info[DEV_NAME] != self._name - or device_info[DEV_MODEL] != self._model - ) - ): - self._name = device_info[DEV_NAME] - self._model = device_info[DEV_MODEL] - - # If the name has changed, persist it in conf entry, - # so we will be able to restore this new name if hass - # is started while the LED string is offline. - self.hass.config_entries.async_update_entry( - self._conf, - data={ - CONF_HOST: self._client.host, # this cannot change - CONF_ID: self._attr_unique_id, # this cannot change - CONF_NAME: self._name, - CONF_MODEL: self._model, - }, - ) - - device_registry = dr.async_get(self.hass) - device_entry = device_registry.async_get_device( - {(DOMAIN, self._attr_unique_id)} - ) - if device_entry: - device_registry.async_update_device( - device_entry.id, name=self._name, model=self._model - ) - - if LightEntityFeature.EFFECT & self.supported_features: - await self.async_update_movies() - await self.async_update_current_movie() - - if not self._attr_available: - _LOGGER.warning("Twinkly '%s' is now available", self._client.host) - - # We don't use the echo API to track the availability since - # we already have to pull the device to get its state. - self._attr_available = True - except (TimeoutError, ClientError): - # We log this as "info" as it's pretty common that the Christmas - # light are not reachable in July - if self._attr_available: - _LOGGER.warning( - "Twinkly '%s' is not reachable (client error)", self._client.host - ) - self._attr_available = False - - async def async_update_movies(self) -> None: - """Update the list of movies (effects).""" - movies = await self._client.get_saved_movies() - _LOGGER.debug("Movies: %s", movies) - if movies and "movies" in movies: - self._movies = movies["movies"] - - async def async_update_current_movie(self) -> None: - """Update the current active movie.""" - current_movie = await self._client.get_current_movie() - _LOGGER.debug("Current movie: %s", current_movie) - if current_movie and "id" in current_movie: - self._current_movie = current_movie + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attr() + super()._handle_coordinator_update() diff --git a/tests/components/twinkly/conftest.py b/tests/components/twinkly/conftest.py index 6b32c786c99..c66be97a257 100644 --- a/tests/components/twinkly/conftest.py +++ b/tests/components/twinkly/conftest.py @@ -10,11 +10,7 @@ from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from .const import TEST_MAC, TEST_MODEL, TEST_NAME -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -55,7 +51,7 @@ def mock_twinkly_client() -> Generator[AsyncMock]: client.get_firmware_version.return_value = load_json_object_fixture( "get_firmware_version.json", DOMAIN ) - client.get_saved_movies.return_value = load_json_array_fixture( + client.get_saved_movies.return_value = load_json_object_fixture( "get_saved_movies.json", DOMAIN ) client.get_current_movie.return_value = load_json_object_fixture( diff --git a/tests/components/twinkly/fixtures/get_saved_movies.json b/tests/components/twinkly/fixtures/get_saved_movies.json index 0ee21f3254d..0fa7696d3df 100644 --- a/tests/components/twinkly/fixtures/get_saved_movies.json +++ b/tests/components/twinkly/fixtures/get_saved_movies.json @@ -1,4 +1,12 @@ -[ - { "id": 1, "name": "Rainbow" }, - { "id": 2, "name": "Flare" } -] +{ + "movies": [ + { + "id": 1, + "name": "Rainbow" + }, + { + "id": 2, + "name": "Flare" + } + ] +} diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index e9c89754ab7..814dc7dfc1f 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -4,8 +4,10 @@ 'attributes': dict({ 'brightness': 26, 'color_mode': 'rgb', - 'effect': None, + 'effect': '1 Rainbow', 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'friendly_name': 'Tree 1', 'hs_color': list([ diff --git a/tests/components/twinkly/snapshots/test_light.ambr b/tests/components/twinkly/snapshots/test_light.ambr index ac4e275a0a1..a97c3f941ff 100644 --- a/tests/components/twinkly/snapshots/test_light.ambr +++ b/tests/components/twinkly/snapshots/test_light.ambr @@ -6,6 +6,8 @@ 'area_id': None, 'capabilities': dict({ 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'supported_color_modes': list([ , @@ -43,8 +45,10 @@ 'attributes': ReadOnlyDict({ 'brightness': 26, 'color_mode': , - 'effect': None, + 'effect': '1 Rainbow', 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'friendly_name': 'Tree 1', 'hs_color': tuple( diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index c008ab51ef7..acf30764bab 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -9,6 +9,7 @@ from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from ttls.client import TwinklyError from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -25,6 +26,7 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, + STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -278,6 +280,28 @@ async def test_turn_off( mock_twinkly_client.turn_off.assert_called_once_with() +async def test_no_current_movie( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handling of missing current movie data.""" + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.tree_1").attributes[ATTR_EFFECT] == "1 Rainbow" + + mock_twinkly_client.get_current_movie.side_effect = TwinklyError + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("light.tree_1").state != STATE_UNAVAILABLE + assert hass.states.get("light.tree_1").attributes[ATTR_EFFECT] is None + + async def test_update_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -294,6 +318,10 @@ async def test_update_name( await setup_integration(hass, mock_config_entry) + dev_entry = device_registry.async_get_device({(DOMAIN, TEST_MAC)}) + + assert dev_entry.name == "Tree 1" + mock_twinkly_client.get_details.return_value["device_name"] = "new_device_name" freezer.tick(timedelta(seconds=30)) From cf3d4eb26af9429216e972d37d829845fca6a6ee Mon Sep 17 00:00:00 2001 From: Omni Flux Date: Mon, 23 Dec 2024 05:35:59 -0500 Subject: [PATCH 1062/1198] Respect ESPHome ClimateTrait supports_current_temperature (#132149) --- homeassistant/components/esphome/climate.py | 2 ++ tests/components/esphome/test_climate.py | 33 +++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index 8089fc4712a..478ce9bae2c 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -230,6 +230,8 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti @esphome_float_state_property def current_temperature(self) -> float | None: """Return the current temperature.""" + if not self._static_info.supports_current_temperature: + return None return self._state.current_temperature @property diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 189b86fc5fd..2a5013444dd 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -484,3 +484,36 @@ async def test_climate_entity_attributes( assert state is not None assert state.state == HVACMode.COOL assert state.attributes == snapshot(name="climate-entity-attributes") + + +async def test_climate_entity_attribute_current_temperature_unsupported( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a climate entity with current temperature unsupported.""" + entity_info = [ + ClimateInfo( + object_id="myclimate", + key=1, + name="my climate", + unique_id="my_climate", + supports_current_temperature=False, + ) + ] + states = [ + ClimateState( + key=1, + current_temperature=30, + ) + ] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("climate.test_myclimate") + assert state is not None + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None From c5fe25a001a4ac768ac7a2f8e430ed3e635d8d45 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 23 Dec 2024 12:05:29 +0100 Subject: [PATCH 1063/1198] Bump deebot-client to 10.0.1 (#133634) --- homeassistant/components/ecovacs/manifest.json | 2 +- homeassistant/components/ecovacs/number.py | 2 +- homeassistant/components/ecovacs/select.py | 4 ++-- homeassistant/components/ecovacs/switch.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 271f9ee8dcd..3a2d4e7704b 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"] } diff --git a/homeassistant/components/ecovacs/number.py b/homeassistant/components/ecovacs/number.py index 2b9bdc1a425..adf282560a9 100644 --- a/homeassistant/components/ecovacs/number.py +++ b/homeassistant/components/ecovacs/number.py @@ -95,7 +95,7 @@ async def async_setup_entry( class EcovacsNumberEntity( - EcovacsDescriptionEntity[CapabilitySet[EventT, int]], + EcovacsDescriptionEntity[CapabilitySet[EventT, [int]]], NumberEntity, ): """Ecovacs number entity.""" diff --git a/homeassistant/components/ecovacs/select.py b/homeassistant/components/ecovacs/select.py index c8b01a0f83a..3c3852f05ec 100644 --- a/homeassistant/components/ecovacs/select.py +++ b/homeassistant/components/ecovacs/select.py @@ -66,7 +66,7 @@ async def async_setup_entry( class EcovacsSelectEntity( - EcovacsDescriptionEntity[CapabilitySetTypes[EventT, str]], + EcovacsDescriptionEntity[CapabilitySetTypes[EventT, [str], str]], SelectEntity, ): """Ecovacs select entity.""" @@ -77,7 +77,7 @@ class EcovacsSelectEntity( def __init__( self, device: Device, - capability: CapabilitySetTypes[EventT, str], + capability: CapabilitySetTypes[EventT, [str], str], entity_description: EcovacsSelectEntityDescription, **kwargs: Any, ) -> None: diff --git a/homeassistant/components/ecovacs/switch.py b/homeassistant/components/ecovacs/switch.py index 872981b5c28..288d092d391 100644 --- a/homeassistant/components/ecovacs/switch.py +++ b/homeassistant/components/ecovacs/switch.py @@ -131,7 +131,7 @@ class EcovacsSwitchEntity( await super().async_added_to_hass() async def on_event(event: EnableEvent) -> None: - self._attr_is_on = event.enable + self._attr_is_on = event.enabled self.async_write_ha_state() self._subscribe(self._capability.event, on_event) diff --git a/requirements_all.txt b/requirements_all.txt index 661571b2cb9..965b5673961 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.11 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.4.0 +deebot-client==10.0.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9ff8ca7c990..b59be622158 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.11 # homeassistant.components.ecovacs -deebot-client==9.4.0 +deebot-client==10.0.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 386a7223931e5090c31a69c628163a8d9abf937d Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:05:31 +0000 Subject: [PATCH 1064/1198] ElevenLabs invalid api key config flow testing (#133822) --- .../components/elevenlabs/quality_scale.yaml | 6 +----- tests/components/elevenlabs/conftest.py | 15 +++++++++----- .../components/elevenlabs/test_config_flow.py | 20 ++++++++++++++++++- 3 files changed, 30 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/elevenlabs/quality_scale.yaml b/homeassistant/components/elevenlabs/quality_scale.yaml index 49f0d7518f5..ecd2092492c 100644 --- a/homeassistant/components/elevenlabs/quality_scale.yaml +++ b/homeassistant/components/elevenlabs/quality_scale.yaml @@ -7,11 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: > - We should have every test end in either ABORT or CREATE_ENTRY. - test_invalid_api_key should assert the kind of error that is raised. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: done diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index c9ed49ba13c..d410f8bccdd 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -24,14 +24,19 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -def mock_async_client() -> Generator[AsyncMock]: - """Override async ElevenLabs client.""" +def _client_mock(): client_mock = AsyncMock() client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) client_mock.models.get_all.return_value = MOCK_MODELS + return client_mock + + +@pytest.fixture +def mock_async_client() -> Generator[AsyncMock]: + """Override async ElevenLabs client.""" with patch( - "elevenlabs.AsyncElevenLabs", return_value=client_mock + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + return_value=_client_mock(), ) as mock_async_client: yield mock_async_client @@ -41,7 +46,7 @@ def mock_async_client_fail() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" with patch( "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", - return_value=AsyncMock(), + return_value=_client_mock(), ) as mock_async_client: mock_async_client.side_effect = ApiError yield mock_async_client diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py index 971fa75939a..95e7ab5214e 100644 --- a/tests/components/elevenlabs/test_config_flow.py +++ b/tests/components/elevenlabs/test_config_flow.py @@ -73,10 +73,28 @@ async def test_invalid_api_key( }, ) assert result["type"] is FlowResultType.FORM - assert result["errors"] + assert result["errors"] == {"base": "invalid_api_key"} mock_setup_entry.assert_not_called() + # Reset the side effect + mock_async_client_fail.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "ElevenLabs" + assert result["data"] == { + "api_key": "api_key", + } + assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() + async def test_options_flow_init( hass: HomeAssistant, From 7f6a77ad2f659fe463bcdb0dd6aea15d664f502a Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:53:17 +0000 Subject: [PATCH 1065/1198] Fix tplink camera entity unique id (#133880) --- homeassistant/components/tplink/camera.py | 2 +- .../tplink/snapshots/test_camera.ambr | 2 +- tests/components/tplink/test_camera.py | 28 +++++++++++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py index 5ed279909d6..8aa49cf3c93 100644 --- a/homeassistant/components/tplink/camera.py +++ b/homeassistant/components/tplink/camera.py @@ -108,7 +108,7 @@ class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): def _get_unique_id(self) -> str: """Return unique ID for the entity.""" - return f"{legacy_device_id(self._device)}-{self.entity_description}" + return f"{legacy_device_id(self._device)}-{self.entity_description.key}" @callback def _async_update_attrs(self) -> None: diff --git a/tests/components/tplink/snapshots/test_camera.ambr b/tests/components/tplink/snapshots/test_camera.ambr index 4ce1813d704..4417395078a 100644 --- a/tests/components/tplink/snapshots/test_camera.ambr +++ b/tests/components/tplink/snapshots/test_camera.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'live_view', - 'unique_id': "123456789ABCDEFGH-TPLinkCameraEntityDescription(key='live_view', device_class=None, entity_category=None, entity_registry_enabled_default=True, entity_registry_visible_default=True, force_update=False, icon=None, has_entity_name=False, name=, translation_key='live_view', translation_placeholders=None, unit_of_measurement=None, deprecated_info=None)", + 'unique_id': '123456789ABCDEFGH-live_view', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/tplink/test_camera.py b/tests/components/tplink/test_camera.py index d8b0f82e32a..8ca56a84b6b 100644 --- a/tests/components/tplink/test_camera.py +++ b/tests/components/tplink/test_camera.py @@ -24,6 +24,7 @@ from homeassistant.core import HomeAssistant, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from . import ( + DEVICE_ID, IP_ADDRESS3, MAC_ADDRESS3, SMALLEST_VALID_JPEG_BYTES, @@ -68,6 +69,33 @@ async def test_states( ) +async def test_camera_unique_id( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test camera unique id.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + device_id=DEVICE_ID, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_camera_config_entry.entry_id + ) + assert device_entries + entity_id = "camera.my_camera_live_view" + entity_registry = er.async_get(hass) + assert entity_registry.async_get(entity_id).unique_id == f"{DEVICE_ID}-live_view" + + async def test_handle_mjpeg_stream( hass: HomeAssistant, mock_camera_config_entry: MockConfigEntry, From 70648da8fd8b9327e4f88d66e47a3d7609c0ca10 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 23 Dec 2024 15:05:45 +0100 Subject: [PATCH 1066/1198] Improve firmware update required issue (#133878) --- homeassistant/components/reolink/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 53152131bdb..283c1d42e89 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -115,7 +115,7 @@ }, "firmware_update": { "title": "Reolink firmware update required", - "description": "\"{name}\" with model \"{model}\" and hardware version \"{hw_version}\" is running a old firmware version \"{current_firmware}\", while at least firmware version \"{required_firmware}\" is required for proper operation of the Reolink integration. The latest firmware can be downloaded from the [Reolink download center]({download_link})." + "description": "\"{name}\" with model \"{model}\" and hardware version \"{hw_version}\" is running a old firmware version \"{current_firmware}\", while at least firmware version \"{required_firmware}\" is required for proper operation of the Reolink integration. The firmware can be updated by pressing \"install\" in the more info dialog of the update entity of \"{name}\" from within Home Assistant. Alternatively, the latest firmware can be downloaded from the [Reolink download center]({download_link})." }, "hdr_switch_deprecated": { "title": "Reolink HDR switch deprecated", From 43a420cf0130b5126b11afe84365d04715085d32 Mon Sep 17 00:00:00 2001 From: "Glenn Vandeuren (aka Iondependent)" Date: Mon, 23 Dec 2024 15:47:09 +0100 Subject: [PATCH 1067/1198] Add cover to the niko_home_control integration (#133801) Co-authored-by: Joost Lekkerkerker --- .../components/niko_home_control/__init__.py | 2 +- .../components/niko_home_control/cover.py | 54 +++++++ .../components/niko_home_control/__init__.py | 15 ++ .../components/niko_home_control/conftest.py | 16 +- .../snapshots/test_cover.ambr | 48 ++++++ .../niko_home_control/test_cover.py | 138 ++++++++++++++++++ .../niko_home_control/test_light.py | 10 +- 7 files changed, 275 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/niko_home_control/cover.py create mode 100644 tests/components/niko_home_control/snapshots/test_cover.ambr create mode 100644 tests/components/niko_home_control/test_cover.py diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py index 0bc1b117a70..ae4e8986816 100644 --- a/homeassistant/components/niko_home_control/__init__.py +++ b/homeassistant/components/niko_home_control/__init__.py @@ -13,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .const import _LOGGER -PLATFORMS: list[Platform] = [Platform.LIGHT] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT] type NikoHomeControlConfigEntry = ConfigEntry[NHCController] diff --git a/homeassistant/components/niko_home_control/cover.py b/homeassistant/components/niko_home_control/cover.py new file mode 100644 index 00000000000..51e2a8a702d --- /dev/null +++ b/homeassistant/components/niko_home_control/cover.py @@ -0,0 +1,54 @@ +"""Cover Platform for Niko Home Control.""" + +from __future__ import annotations + +from typing import Any + +from nhc.cover import NHCCover + +from homeassistant.components.cover import CoverEntity, CoverEntityFeature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NikoHomeControlConfigEntry +from .entity import NikoHomeControlEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NikoHomeControlConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Niko Home Control cover entry.""" + controller = entry.runtime_data + + async_add_entities( + NikoHomeControlCover(cover, controller, entry.entry_id) + for cover in controller.covers + ) + + +class NikoHomeControlCover(NikoHomeControlEntity, CoverEntity): + """Representation of a Niko Cover.""" + + _attr_name = None + _attr_supported_features: CoverEntityFeature = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP + ) + _action: NHCCover + + def open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + self._action.open() + + def close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + self._action.close() + + def stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + self._action.stop() + + def update_state(self): + """Update HA state.""" + self._attr_is_closed = self._action.state == 0 diff --git a/tests/components/niko_home_control/__init__.py b/tests/components/niko_home_control/__init__.py index f6e8187bf0f..0182a24ba7c 100644 --- a/tests/components/niko_home_control/__init__.py +++ b/tests/components/niko_home_control/__init__.py @@ -1,5 +1,10 @@ """Tests for the niko_home_control integration.""" +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock + +import pytest + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -11,3 +16,13 @@ async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + +def find_update_callback( + mock: AsyncMock, identifier: int +) -> Callable[[int], Awaitable[None]]: + """Find the update callback for a specific identifier.""" + for call in mock.register_callback.call_args_list: + if call[0][0] == identifier: + return call[0][1] + pytest.fail(f"Callback for identifier {identifier} not found") diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py index b3dedd0c182..130baf72228 100644 --- a/tests/components/niko_home_control/conftest.py +++ b/tests/components/niko_home_control/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from nhc.cover import NHCCover from nhc.light import NHCLight import pytest @@ -48,9 +49,21 @@ def dimmable_light() -> NHCLight: return mock +@pytest.fixture +def cover() -> NHCCover: + """Return a cover mock.""" + mock = AsyncMock(spec=NHCCover) + mock.id = 3 + mock.type = 4 + mock.name = "cover" + mock.suggested_area = "room" + mock.state = 100 + return mock + + @pytest.fixture def mock_niko_home_control_connection( - light: NHCLight, dimmable_light: NHCLight + light: NHCLight, dimmable_light: NHCLight, cover: NHCCover ) -> Generator[AsyncMock]: """Mock a NHC client.""" with ( @@ -65,6 +78,7 @@ def mock_niko_home_control_connection( ): client = mock_client.return_value client.lights = [light, dimmable_light] + client.covers = [cover] yield client diff --git a/tests/components/niko_home_control/snapshots/test_cover.ambr b/tests/components/niko_home_control/snapshots/test_cover.ambr new file mode 100644 index 00000000000..6f99c1adb8c --- /dev/null +++ b/tests/components/niko_home_control/snapshots/test_cover.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_cover[cover.cover-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'niko_home_control', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.cover-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'cover', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/niko_home_control/test_cover.py b/tests/components/niko_home_control/test_cover.py new file mode 100644 index 00000000000..5e9a17c3324 --- /dev/null +++ b/tests/components/niko_home_control/test_cover.py @@ -0,0 +1,138 @@ +"""Tests for the Niko Home Control cover platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_OPEN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import find_update_callback, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_cover( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.niko_home_control.PLATFORMS", [Platform.COVER] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("cover_id", "entity_id"), + [ + (0, "cover.cover"), + ], +) +async def test_open_cover( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + cover_id: int, + entity_id: int, +) -> None: + """Test opening the cover.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_niko_home_control_connection.covers[cover_id].open.assert_called_once_with() + + +@pytest.mark.parametrize( + ("cover_id", "entity_id"), + [ + (0, "cover.cover"), + ], +) +async def test_close_cover( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + cover_id: int, + entity_id: str, +) -> None: + """Test closing the cover.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_niko_home_control_connection.covers[cover_id].close.assert_called_once_with() + + +@pytest.mark.parametrize( + ("cover_id", "entity_id"), + [ + (0, "cover.cover"), + ], +) +async def test_stop_cover( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + cover_id: int, + entity_id: str, +) -> None: + """Test closing the cover.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_niko_home_control_connection.covers[cover_id].stop.assert_called_once_with() + + +async def test_updating( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + cover: AsyncMock, +) -> None: + """Test closing the cover.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("cover.cover").state == STATE_OPEN + + cover.state = 0 + await find_update_callback(mock_niko_home_control_connection, 3)(0) + await hass.async_block_till_done() + + assert hass.states.get("cover.cover").state == STATE_CLOSED + + cover.state = 100 + await find_update_callback(mock_niko_home_control_connection, 3)(100) + await hass.async_block_till_done() + + assert hass.states.get("cover.cover").state == STATE_OPEN diff --git a/tests/components/niko_home_control/test_light.py b/tests/components/niko_home_control/test_light.py index 801bdf6a296..a61cc5204f6 100644 --- a/tests/components/niko_home_control/test_light.py +++ b/tests/components/niko_home_control/test_light.py @@ -18,7 +18,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import setup_integration +from . import find_update_callback, setup_integration from tests.common import MockConfigEntry, snapshot_platform @@ -113,7 +113,7 @@ async def test_updating( assert hass.states.get("light.light").state == STATE_ON light.state = 0 - await mock_niko_home_control_connection.register_callback.call_args_list[0][0][1](0) + await find_update_callback(mock_niko_home_control_connection, 1)(0) await hass.async_block_till_done() assert hass.states.get("light.light").state == STATE_OFF @@ -122,16 +122,14 @@ async def test_updating( assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255 dimmable_light.state = 80 - await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1]( - 80 - ) + await find_update_callback(mock_niko_home_control_connection, 2)(80) await hass.async_block_till_done() assert hass.states.get("light.dimmable_light").state == STATE_ON assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 204 dimmable_light.state = 0 - await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1](0) + await find_update_callback(mock_niko_home_control_connection, 2)(0) await hass.async_block_till_done() assert hass.states.get("light.dimmable_light").state == STATE_OFF From 5ef12c39934c30f2b28c1ce0752d9dee911ea3f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 23 Dec 2024 15:51:21 +0100 Subject: [PATCH 1068/1198] Add AEMET Weather Radar images (#131386) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/aemet/__init__.py | 4 +- homeassistant/components/aemet/config_flow.py | 3 +- homeassistant/components/aemet/const.py | 3 +- homeassistant/components/aemet/diagnostics.py | 3 +- homeassistant/components/aemet/image.py | 86 +++++++++++++++++++ homeassistant/components/aemet/strings.json | 8 ++ .../aemet/snapshots/test_diagnostics.ambr | 7 ++ tests/components/aemet/test_config_flow.py | 20 ++++- tests/components/aemet/test_image.py | 22 +++++ tests/components/aemet/util.py | 18 +++- 10 files changed, 164 insertions(+), 10 deletions(-) create mode 100644 homeassistant/components/aemet/image.py create mode 100644 tests/components/aemet/test_image.py diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index 79dc3cc55ce..4bd9dd03eea 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -13,7 +13,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client from homeassistant.helpers.storage import STORAGE_DIR -from .const import CONF_STATION_UPDATES, DOMAIN, PLATFORMS +from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DOMAIN, PLATFORMS from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -26,6 +26,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo latitude = entry.data[CONF_LATITUDE] longitude = entry.data[CONF_LONGITUDE] update_features: int = UpdateFeature.FORECAST + if entry.options.get(CONF_RADAR_UPDATES, False): + update_features |= UpdateFeature.RADAR if entry.options.get(CONF_STATION_UPDATES, True): update_features |= UpdateFeature.STATION diff --git a/homeassistant/components/aemet/config_flow.py b/homeassistant/components/aemet/config_flow.py index e2b0b436c8c..80b5c07e6bd 100644 --- a/homeassistant/components/aemet/config_flow.py +++ b/homeassistant/components/aemet/config_flow.py @@ -17,10 +17,11 @@ from homeassistant.helpers.schema_config_entry_flow import ( SchemaOptionsFlowHandler, ) -from .const import CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN +from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN OPTIONS_SCHEMA = vol.Schema( { + vol.Required(CONF_RADAR_UPDATES, default=False): bool, vol.Required(CONF_STATION_UPDATES, default=True): bool, } ) diff --git a/homeassistant/components/aemet/const.py b/homeassistant/components/aemet/const.py index 665075c4093..b79a94d209d 100644 --- a/homeassistant/components/aemet/const.py +++ b/homeassistant/components/aemet/const.py @@ -51,8 +51,9 @@ from homeassistant.components.weather import ( from homeassistant.const import Platform ATTRIBUTION = "Powered by AEMET OpenData" +CONF_RADAR_UPDATES = "radar_updates" CONF_STATION_UPDATES = "station_updates" -PLATFORMS = [Platform.SENSOR, Platform.WEATHER] +PLATFORMS = [Platform.IMAGE, Platform.SENSOR, Platform.WEATHER] DEFAULT_NAME = "AEMET" DOMAIN = "aemet" diff --git a/homeassistant/components/aemet/diagnostics.py b/homeassistant/components/aemet/diagnostics.py index bc366fc6d44..b072309d4b8 100644 --- a/homeassistant/components/aemet/diagnostics.py +++ b/homeassistant/components/aemet/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from aemet_opendata.const import AOD_COORDS +from aemet_opendata.const import AOD_COORDS, AOD_IMG_BYTES from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import ( @@ -26,6 +26,7 @@ TO_REDACT_CONFIG = [ TO_REDACT_COORD = [ AOD_COORDS, + AOD_IMG_BYTES, ] diff --git a/homeassistant/components/aemet/image.py b/homeassistant/components/aemet/image.py new file mode 100644 index 00000000000..ffc53022e4c --- /dev/null +++ b/homeassistant/components/aemet/image.py @@ -0,0 +1,86 @@ +"""Support for the AEMET OpenData images.""" + +from __future__ import annotations + +from typing import Final + +from aemet_opendata.const import AOD_DATETIME, AOD_IMG_BYTES, AOD_IMG_TYPE, AOD_RADAR +from aemet_opendata.helpers import dict_nested_value + +from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator +from .entity import AemetEntity + +AEMET_IMAGES: Final[tuple[ImageEntityDescription, ...]] = ( + ImageEntityDescription( + key=AOD_RADAR, + translation_key="weather_radar", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: AemetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up AEMET OpenData image entities based on a config entry.""" + domain_data = config_entry.runtime_data + name = domain_data.name + coordinator = domain_data.coordinator + + unique_id = config_entry.unique_id + assert unique_id is not None + + async_add_entities( + AemetImage( + hass, + name, + coordinator, + description, + unique_id, + ) + for description in AEMET_IMAGES + if dict_nested_value(coordinator.data["lib"], [description.key]) is not None + ) + + +class AemetImage(AemetEntity, ImageEntity): + """Implementation of an AEMET OpenData image.""" + + entity_description: ImageEntityDescription + + def __init__( + self, + hass: HomeAssistant, + name: str, + coordinator: WeatherUpdateCoordinator, + description: ImageEntityDescription, + unique_id: str, + ) -> None: + """Initialize the image.""" + super().__init__(coordinator, name, unique_id) + ImageEntity.__init__(self, hass) + self.entity_description = description + self._attr_unique_id = f"{unique_id}-{description.key}" + + self._async_update_attrs() + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() + + @callback + def _async_update_attrs(self) -> None: + """Update image attributes.""" + image_data = self.get_aemet_value([self.entity_description.key]) + self._cached_image = Image( + content_type=image_data.get(AOD_IMG_TYPE), + content=image_data.get(AOD_IMG_BYTES), + ) + self._attr_image_last_updated = image_data.get(AOD_DATETIME) diff --git a/homeassistant/components/aemet/strings.json b/homeassistant/components/aemet/strings.json index 75c810978ad..d65c546b050 100644 --- a/homeassistant/components/aemet/strings.json +++ b/homeassistant/components/aemet/strings.json @@ -18,10 +18,18 @@ } } }, + "entity": { + "image": { + "weather_radar": { + "name": "Weather radar" + } + } + }, "options": { "step": { "init": { "data": { + "radar_updates": "Gather data from AEMET weather radar", "station_updates": "Gather data from AEMET weather stations" } } diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 54546507dfa..0e40cce1b86 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -17,6 +17,7 @@ 'entry_id': '7442b231f139e813fc1939281123f220', 'minor_version': 1, 'options': dict({ + 'radar_updates': True, }), 'pref_disable_new_entities': False, 'pref_disable_polling': False, @@ -33,6 +34,12 @@ ]), }), 'lib': dict({ + 'radar': dict({ + 'datetime': '2021-01-09T11:34:06.448809+00:00', + 'id': 'national', + 'image-bytes': '**REDACTED**', + 'image-type': 'image/gif', + }), 'station': dict({ 'altitude': 667.0, 'coordinates': '**REDACTED**', diff --git a/tests/components/aemet/test_config_flow.py b/tests/components/aemet/test_config_flow.py index 0f3491b1c43..3dd8303c8cb 100644 --- a/tests/components/aemet/test_config_flow.py +++ b/tests/components/aemet/test_config_flow.py @@ -6,7 +6,11 @@ from aemet_opendata.exceptions import AuthError from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.aemet.const import CONF_STATION_UPDATES, DOMAIN +from homeassistant.components.aemet.const import ( + CONF_RADAR_UPDATES, + CONF_STATION_UPDATES, + DOMAIN, +) from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant @@ -61,13 +65,20 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: @pytest.mark.parametrize( - ("user_input", "expected"), [({}, True), ({CONF_STATION_UPDATES: False}, False)] + ("user_input", "expected"), + [ + ({}, {CONF_RADAR_UPDATES: False, CONF_STATION_UPDATES: True}), + ( + {CONF_RADAR_UPDATES: False, CONF_STATION_UPDATES: False}, + {CONF_RADAR_UPDATES: False, CONF_STATION_UPDATES: False}, + ), + ], ) async def test_form_options( hass: HomeAssistant, freezer: FrozenDateTimeFactory, user_input: dict[str, bool], - expected: bool, + expected: dict[str, bool], ) -> None: """Test the form options.""" @@ -98,7 +109,8 @@ async def test_form_options( assert result["type"] is FlowResultType.CREATE_ENTRY assert entry.options == { - CONF_STATION_UPDATES: expected, + CONF_RADAR_UPDATES: expected[CONF_RADAR_UPDATES], + CONF_STATION_UPDATES: expected[CONF_STATION_UPDATES], } await hass.async_block_till_done() diff --git a/tests/components/aemet/test_image.py b/tests/components/aemet/test_image.py new file mode 100644 index 00000000000..4321daac883 --- /dev/null +++ b/tests/components/aemet/test_image.py @@ -0,0 +1,22 @@ +"""The image tests for the AEMET OpenData platform.""" + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + + +async def test_aemet_create_images( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test creation of AEMET images.""" + + await hass.config.async_set_time_zone("UTC") + freezer.move_to("2021-01-09 12:00:00+00:00") + await async_init_integration(hass) + + state = hass.states.get("image.aemet_weather_radar") + assert state is not None + assert state.state == "2021-01-09T11:34:06.448809+00:00" diff --git a/tests/components/aemet/util.py b/tests/components/aemet/util.py index 162ee657513..0361ca9e6d8 100644 --- a/tests/components/aemet/util.py +++ b/tests/components/aemet/util.py @@ -3,9 +3,9 @@ from typing import Any from unittest.mock import patch -from aemet_opendata.const import ATTR_DATA +from aemet_opendata.const import ATTR_BYTES, ATTR_DATA, ATTR_TIMESTAMP, ATTR_TYPE -from homeassistant.components.aemet.const import DOMAIN +from homeassistant.components.aemet.const import CONF_RADAR_UPDATES, DOMAIN from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant @@ -19,6 +19,14 @@ FORECAST_HOURLY_DATA_MOCK = { ATTR_DATA: load_json_value_fixture("aemet/town-28065-forecast-hourly-data.json"), } +RADAR_DATA_MOCK = { + ATTR_DATA: { + ATTR_TYPE: "image/gif", + ATTR_BYTES: bytes([0]), + }, + ATTR_TIMESTAMP: "2021-01-09T11:34:06.448809+00:00", +} + STATION_DATA_MOCK = { ATTR_DATA: load_json_value_fixture("aemet/station-3195-data.json"), } @@ -53,6 +61,9 @@ def mock_api_call(cmd: str, fetch_data: bool = False) -> dict[str, Any]: return FORECAST_DAILY_DATA_MOCK if cmd == "prediccion/especifica/municipio/horaria/28065": return FORECAST_HOURLY_DATA_MOCK + if cmd == "red/radar/nacional": + return RADAR_DATA_MOCK + return {} @@ -69,6 +80,9 @@ async def async_init_integration(hass: HomeAssistant): }, entry_id="7442b231f139e813fc1939281123f220", unique_id="40.30403754--3.72935236", + options={ + CONF_RADAR_UPDATES: True, + }, ) config_entry.add_to_hass(hass) From 8e86c3c77599595c2e11a9434a57c7331c6015f3 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 23 Dec 2024 15:54:40 +0100 Subject: [PATCH 1069/1198] Add Ecovacs station entities (#133876) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/ecovacs/button.py | 50 +- homeassistant/components/ecovacs/const.py | 4 + homeassistant/components/ecovacs/icons.json | 12 + homeassistant/components/ecovacs/sensor.py | 12 +- homeassistant/components/ecovacs/strings.json | 16 + homeassistant/components/ecovacs/util.py | 11 + .../fixtures/devices/qhe2o2/device.json | 29 + .../ecovacs/snapshots/test_button.ambr | 322 ++++++++ .../ecovacs/snapshots/test_sensor.ambr | 775 ++++++++++++++++++ tests/components/ecovacs/test_button.py | 39 +- tests/components/ecovacs/test_sensor.py | 27 +- 11 files changed, 1290 insertions(+), 7 deletions(-) create mode 100644 tests/components/ecovacs/fixtures/devices/qhe2o2/device.json diff --git a/homeassistant/components/ecovacs/button.py b/homeassistant/components/ecovacs/button.py index 5d76b38bed8..2759ca972df 100644 --- a/homeassistant/components/ecovacs/button.py +++ b/homeassistant/components/ecovacs/button.py @@ -2,7 +2,12 @@ from dataclasses import dataclass -from deebot_client.capabilities import CapabilityExecute, CapabilityLifeSpan +from deebot_client.capabilities import ( + CapabilityExecute, + CapabilityExecuteTypes, + CapabilityLifeSpan, +) +from deebot_client.commands import StationAction from deebot_client.events import LifeSpan from homeassistant.components.button import ButtonEntity, ButtonEntityDescription @@ -11,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import EcovacsConfigEntry -from .const import SUPPORTED_LIFESPANS +from .const import SUPPORTED_LIFESPANS, SUPPORTED_STATION_ACTIONS from .entity import ( EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity, @@ -35,6 +40,13 @@ class EcovacsLifespanButtonEntityDescription(ButtonEntityDescription): component: LifeSpan +@dataclass(kw_only=True, frozen=True) +class EcovacsStationActionButtonEntityDescription(ButtonEntityDescription): + """Ecovacs station action button entity description.""" + + action: StationAction + + ENTITY_DESCRIPTIONS: tuple[EcovacsButtonEntityDescription, ...] = ( EcovacsButtonEntityDescription( capability_fn=lambda caps: caps.map.relocation if caps.map else None, @@ -44,6 +56,16 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsButtonEntityDescription, ...] = ( ), ) +STATION_ENTITY_DESCRIPTIONS = tuple( + EcovacsStationActionButtonEntityDescription( + action=action, + key=f"station_action_{action.name.lower()}", + translation_key=f"station_action_{action.name.lower()}", + ) + for action in SUPPORTED_STATION_ACTIONS +) + + LIFESPAN_ENTITY_DESCRIPTIONS = tuple( EcovacsLifespanButtonEntityDescription( component=component, @@ -74,6 +96,15 @@ async def async_setup_entry( for description in LIFESPAN_ENTITY_DESCRIPTIONS if description.component in device.capabilities.life_span.types ) + entities.extend( + EcovacsStationActionButtonEntity( + device, device.capabilities.station.action, description + ) + for device in controller.devices + if device.capabilities.station + for description in STATION_ENTITY_DESCRIPTIONS + if description.action in device.capabilities.station.action.types + ) async_add_entities(entities) @@ -103,3 +134,18 @@ class EcovacsResetLifespanButtonEntity( await self._device.execute_command( self._capability.reset(self.entity_description.component) ) + + +class EcovacsStationActionButtonEntity( + EcovacsDescriptionEntity[CapabilityExecuteTypes[StationAction]], + ButtonEntity, +): + """Ecovacs station action button entity.""" + + entity_description: EcovacsStationActionButtonEntityDescription + + async def async_press(self) -> None: + """Press the button.""" + await self._device.execute_command( + self._capability.execute(self.entity_description.action) + ) diff --git a/homeassistant/components/ecovacs/const.py b/homeassistant/components/ecovacs/const.py index ac7a268f1bd..0bfe9cfd544 100644 --- a/homeassistant/components/ecovacs/const.py +++ b/homeassistant/components/ecovacs/const.py @@ -2,6 +2,7 @@ from enum import StrEnum +from deebot_client.commands import StationAction from deebot_client.events import LifeSpan DOMAIN = "ecovacs" @@ -19,8 +20,11 @@ SUPPORTED_LIFESPANS = ( LifeSpan.SIDE_BRUSH, LifeSpan.UNIT_CARE, LifeSpan.ROUND_MOP, + LifeSpan.STATION_FILTER, ) +SUPPORTED_STATION_ACTIONS = (StationAction.EMPTY_DUSTBIN,) + LEGACY_SUPPORTED_LIFESPANS = ( "main_brush", "side_brush", diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 6097f43a4e4..b0e2a0595bf 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -27,11 +27,17 @@ "reset_lifespan_side_brush": { "default": "mdi:broom" }, + "reset_lifespan_station_filter": { + "default": "mdi:air-filter" + }, "reset_lifespan_unit_care": { "default": "mdi:robot-vacuum" }, "reset_lifespan_round_mop": { "default": "mdi:broom" + }, + "station_action_empty_dustbin": { + "default": "mdi:delete-restore" } }, "event": { @@ -72,6 +78,9 @@ "lifespan_side_brush": { "default": "mdi:broom" }, + "lifespan_station_filter": { + "default": "mdi:air-filter" + }, "lifespan_unit_care": { "default": "mdi:robot-vacuum" }, @@ -87,6 +96,9 @@ "network_ssid": { "default": "mdi:wifi" }, + "station_state": { + "default": "mdi:home" + }, "stats_area": { "default": "mdi:floor-plan" }, diff --git a/homeassistant/components/ecovacs/sensor.py b/homeassistant/components/ecovacs/sensor.py index 7c190d27775..0e906c6cb16 100644 --- a/homeassistant/components/ecovacs/sensor.py +++ b/homeassistant/components/ecovacs/sensor.py @@ -16,6 +16,7 @@ from deebot_client.events import ( NetworkInfoEvent, StatsEvent, TotalStatsEvent, + station, ) from sucks import VacBot @@ -46,7 +47,7 @@ from .entity import ( EcovacsLegacyEntity, EventT, ) -from .util import get_supported_entitites +from .util import get_name_key, get_options, get_supported_entitites @dataclass(kw_only=True, frozen=True) @@ -136,6 +137,15 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = ( entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), + # Station + EcovacsSensorEntityDescription[station.StationEvent]( + capability_fn=lambda caps: caps.station.state if caps.station else None, + value_fn=lambda e: get_name_key(e.state), + key="station_state", + translation_key="station_state", + device_class=SensorDeviceClass.ENUM, + options=get_options(station.State), + ), ) diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index c9de461ad5b..723bdef17f8 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -46,6 +46,9 @@ "relocate": { "name": "Relocate" }, + "reset_lifespan_base_station_filter": { + "name": "Reset station filter lifespan" + }, "reset_lifespan_blade": { "name": "Reset blade lifespan" }, @@ -66,6 +69,9 @@ }, "reset_lifespan_side_brush": { "name": "Reset side brush lifespan" + }, + "station_action_empty_dustbin": { + "name": "Empty dustbin" } }, "event": { @@ -107,6 +113,9 @@ } } }, + "lifespan_base_station_filter": { + "name": "Station filter lifespan" + }, "lifespan_blade": { "name": "Blade lifespan" }, @@ -140,6 +149,13 @@ "network_ssid": { "name": "Wi-Fi SSID" }, + "station_state": { + "name": "Station state", + "state": { + "idle": "[%key:common::state::idle%]", + "emptying_dustbin": "Emptying dustbin" + } + }, "stats_area": { "name": "Area cleaned" }, diff --git a/homeassistant/components/ecovacs/util.py b/homeassistant/components/ecovacs/util.py index a4894de8968..0cfbf1e8f91 100644 --- a/homeassistant/components/ecovacs/util.py +++ b/homeassistant/components/ecovacs/util.py @@ -7,6 +7,8 @@ import random import string from typing import TYPE_CHECKING +from deebot_client.events.station import State + from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify @@ -47,4 +49,13 @@ def get_supported_entitites( @callback def get_name_key(enum: Enum) -> str: """Return the lower case name of the enum.""" + if enum is State.EMPTYING: + # Will be fixed in the next major release of deebot-client + return "emptying_dustbin" return enum.name.lower() + + +@callback +def get_options(enum: type[Enum]) -> list[str]: + """Return the options for the enum.""" + return [get_name_key(option) for option in enum] diff --git a/tests/components/ecovacs/fixtures/devices/qhe2o2/device.json b/tests/components/ecovacs/fixtures/devices/qhe2o2/device.json new file mode 100644 index 00000000000..0fbaaf896ee --- /dev/null +++ b/tests/components/ecovacs/fixtures/devices/qhe2o2/device.json @@ -0,0 +1,29 @@ +{ + "did": "8516fbb1-17f1-4194-0000001", + "name": "E1234567890000000003", + "class": "qhe2o2", + "resource": "NHl5", + "company": "eco-ng", + "bindTs": 1734792100110, + "service": { + "jmq": "jmq-ngiot-eu.dc.ww.ecouser.net", + "mqs": "api-ngiot.dc-eu.ww.ecouser.net" + }, + "deviceName": "DEEBOT N20 PRO PLUS", + "icon": "https: //portal-ww.ecouser.net/api/pim/file/get/0000001", + "ota": true, + "UILogicId": "y2_ww_h_y2h5", + "materialNo": "110-2406-0001", + "pid": "0000001", + "product_category": "DEEBOT", + "model": "Y2_AES_BLACK_INT", + "updateInfo": { + "needUpdate": false, + "changeLog": "" + }, + "nick": "Dusty", + "homeId": "1234567890abcdef12345678", + "homeSort": 1, + "status": 1, + "otaUpgrade": {} +} diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index efae8896962..f21d019a7b1 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -91,6 +91,328 @@ 'state': '2024-01-01T00:00:00+00:00', }) # --- +# name: test_buttons[qhe2o2][button.dusty_empty_dustbin:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dusty_empty_dustbin', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Empty dustbin', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'station_action_empty_dustbin', + 'unique_id': '8516fbb1-17f1-4194-0000001_station_action_empty_dustbin', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_empty_dustbin:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Empty dustbin', + }), + 'context': , + 'entity_id': 'button.dusty_empty_dustbin', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_relocate:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_relocate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relocate', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relocate', + 'unique_id': '8516fbb1-17f1-4194-0000001_relocate', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_relocate:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Relocate', + }), + 'context': , + 'entity_id': 'button.dusty_relocate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_filter_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_reset_filter_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_filter', + 'unique_id': '8516fbb1-17f1-4194-0000001_reset_lifespan_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_filter_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Reset filter lifespan', + }), + 'context': , + 'entity_id': 'button.dusty_reset_filter_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_main_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_reset_main_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset main brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_brush', + 'unique_id': '8516fbb1-17f1-4194-0000001_reset_lifespan_brush', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_main_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Reset main brush lifespan', + }), + 'context': , + 'entity_id': 'button.dusty_reset_main_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_round_mop_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_reset_round_mop_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset round mop lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_round_mop', + 'unique_id': '8516fbb1-17f1-4194-0000001_reset_lifespan_round_mop', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_round_mop_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Reset round mop lifespan', + }), + 'context': , + 'entity_id': 'button.dusty_reset_round_mop_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_side_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_reset_side_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset side brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_side_brush', + 'unique_id': '8516fbb1-17f1-4194-0000001_reset_lifespan_side_brush', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_side_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Reset side brush lifespan', + }), + 'context': , + 'entity_id': 'button.dusty_reset_side_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_unit_care_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dusty_reset_unit_care_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset unit care lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_unit_care', + 'unique_id': '8516fbb1-17f1-4194-0000001_reset_lifespan_unit_care', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[qhe2o2][button.dusty_reset_unit_care_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Reset unit care lifespan', + }), + 'context': , + 'entity_id': 'button.dusty_reset_unit_care_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- # name: test_buttons[yna5x1][button.ozmo_950_relocate:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index 9c76c00b5b7..755fcda9e7d 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -725,6 +725,781 @@ 'state': 'Testnetwork', }) # --- +# name: test_sensors[qhe2o2][sensor.dusty_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000001_stats_area', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Area cleaned', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dusty_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_battery:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '8516fbb1-17f1-4194-0000001_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_battery:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Dusty Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000001_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Dusty Cleaning duration', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dusty_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.0', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_error:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error', + 'unique_id': '8516fbb1-17f1-4194-0000001_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_error:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'description': 'NoError: Robot is operational', + 'friendly_name': 'Dusty Error', + }), + 'context': , + 'entity_id': 'sensor.dusty_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_filter_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_filter_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filter lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_filter', + 'unique_id': '8516fbb1-17f1-4194-0000001_lifespan_filter', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_filter_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Filter lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_filter_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_ip_address:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ip', + 'unique_id': '8516fbb1-17f1-4194-0000001_network_ip', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_ip_address:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty IP address', + }), + 'context': , + 'entity_id': 'sensor.dusty_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.0.10', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_main_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_main_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_brush', + 'unique_id': '8516fbb1-17f1-4194-0000001_lifespan_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_main_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Main brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_main_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_round_mop_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_round_mop_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Round mop lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_round_mop', + 'unique_id': '8516fbb1-17f1-4194-0000001_lifespan_round_mop', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_round_mop_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Round mop lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_round_mop_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_side_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_side_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Side brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_side_brush', + 'unique_id': '8516fbb1-17f1-4194-0000001_lifespan_side_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_side_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Side brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_side_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_station_state:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'emptying_dustbin', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_station_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Station state', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'station_state', + 'unique_id': '8516fbb1-17f1-4194-0000001_station_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_station_state:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Dusty Station state', + 'options': list([ + 'idle', + 'emptying_dustbin', + ]), + }), + 'context': , + 'entity_id': 'sensor.dusty_station_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'emptying_dustbin', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_total_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000001_total_stats_area', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Total area cleaned', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dusty_total_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_total_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000001_total_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Dusty Total cleaning duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dusty_total_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.000', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_cleanings:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dusty_total_cleanings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total cleanings', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_cleanings', + 'unique_id': '8516fbb1-17f1-4194-0000001_total_stats_cleanings', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_total_cleanings:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Total cleanings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.dusty_total_cleanings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_unit_care_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_unit_care_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unit care lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_unit_care', + 'unique_id': '8516fbb1-17f1-4194-0000001_lifespan_unit_care', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_unit_care_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Unit care lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dusty_unit_care_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_wi_fi_rssi:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_wi_fi_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi RSSI', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_rssi', + 'unique_id': '8516fbb1-17f1-4194-0000001_network_rssi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_wi_fi_rssi:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Wi-Fi RSSI', + }), + 'context': , + 'entity_id': 'sensor.dusty_wi_fi_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-62', + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dusty_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi SSID', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ssid', + 'unique_id': '8516fbb1-17f1-4194-0000001_network_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[qhe2o2][sensor.dusty_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dusty Wi-Fi SSID', + }), + 'context': , + 'entity_id': 'sensor.dusty_wi_fi_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Testnetwork', + }) +# --- # name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 4b3068f6cda..65e0b19ea02 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -1,7 +1,12 @@ """Tests for Ecovacs sensors.""" from deebot_client.command import Command -from deebot_client.commands.json import ResetLifeSpan, SetRelocationState +from deebot_client.commands import StationAction +from deebot_client.commands.json import ( + ResetLifeSpan, + SetRelocationState, + station_action, +) from deebot_client.events import LifeSpan import pytest from syrupy import SnapshotAssertion @@ -60,8 +65,38 @@ def platforms() -> Platform | list[Platform]: ), ], ), + ( + "qhe2o2", + [ + ("button.dusty_relocate", SetRelocationState()), + ( + "button.dusty_reset_main_brush_lifespan", + ResetLifeSpan(LifeSpan.BRUSH), + ), + ( + "button.dusty_reset_filter_lifespan", + ResetLifeSpan(LifeSpan.FILTER), + ), + ( + "button.dusty_reset_side_brush_lifespan", + ResetLifeSpan(LifeSpan.SIDE_BRUSH), + ), + ( + "button.dusty_reset_unit_care_lifespan", + ResetLifeSpan(LifeSpan.UNIT_CARE), + ), + ( + "button.dusty_reset_round_mop_lifespan", + ResetLifeSpan(LifeSpan.ROUND_MOP), + ), + ( + "button.dusty_empty_dustbin", + station_action.StationAction(StationAction.EMPTY_DUSTBIN), + ), + ], + ), ], - ids=["yna5x1", "5xu9h3"], + ids=["yna5x1", "5xu9h3", "qhe2o2"], ) async def test_buttons( hass: HomeAssistant, diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 53c57999776..5bcd8385320 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -11,6 +11,7 @@ from deebot_client.events import ( NetworkInfoEvent, StatsEvent, TotalStatsEvent, + station, ) import pytest from syrupy import SnapshotAssertion @@ -45,6 +46,7 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): event_bus.notify(LifeSpanEvent(LifeSpan.FILTER, 56, 40 * 60)) event_bus.notify(LifeSpanEvent(LifeSpan.SIDE_BRUSH, 40, 20 * 60)) event_bus.notify(ErrorEvent(0, "NoError: Robot is operational")) + event_bus.notify(station.StationEvent(station.State.EMPTYING)) await block_till_done(hass, event_bus) @@ -87,8 +89,29 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.goat_g1_error", ], ), + ( + "qhe2o2", + [ + "sensor.dusty_area_cleaned", + "sensor.dusty_cleaning_duration", + "sensor.dusty_total_area_cleaned", + "sensor.dusty_total_cleaning_duration", + "sensor.dusty_total_cleanings", + "sensor.dusty_battery", + "sensor.dusty_ip_address", + "sensor.dusty_wi_fi_rssi", + "sensor.dusty_wi_fi_ssid", + "sensor.dusty_station_state", + "sensor.dusty_main_brush_lifespan", + "sensor.dusty_filter_lifespan", + "sensor.dusty_side_brush_lifespan", + "sensor.dusty_unit_care_lifespan", + "sensor.dusty_round_mop_lifespan", + "sensor.dusty_error", + ], + ), ], - ids=["yna5x1", "5xu9h3"], + ids=["yna5x1", "5xu9h3", "qhe2o2"], ) async def test_sensors( hass: HomeAssistant, @@ -99,7 +122,7 @@ async def test_sensors( entity_ids: list[str], ) -> None: """Test that sensor entity snapshots match.""" - assert entity_ids == hass.states.async_entity_ids() + assert hass.states.async_entity_ids() == entity_ids for entity_id in entity_ids: assert (state := hass.states.get(entity_id)), f"State of {entity_id} is missing" assert state.state == STATE_UNKNOWN From 45ae2f473646590a5e6f8c06865135cf8979bfb1 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 23 Dec 2024 15:54:57 +0100 Subject: [PATCH 1070/1198] Set Fronius integration quality scale to gold (#133884) --- homeassistant/components/fronius/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/fronius/manifest.json b/homeassistant/components/fronius/manifest.json index 227234f9937..94d0f90b0bd 100644 --- a/homeassistant/components/fronius/manifest.json +++ b/homeassistant/components/fronius/manifest.json @@ -11,5 +11,6 @@ "documentation": "https://www.home-assistant.io/integrations/fronius", "iot_class": "local_polling", "loggers": ["pyfronius"], + "quality_scale": "gold", "requirements": ["PyFronius==0.7.3"] } From 5487e8673ce904cd4ed9a6a2fe35443153ed517f Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 23 Dec 2024 16:03:56 +0100 Subject: [PATCH 1071/1198] Update frontend to 20241223.1 (#133886) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 1f9988dff38..2d3604330f6 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.8"] + "requirements": ["home-assistant-frontend==20241223.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6863da50af3..f46248d2e1c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.8 +home-assistant-frontend==20241223.1 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 965b5673961..a087e3ff509 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241127.8 +home-assistant-frontend==20241223.1 # homeassistant.components.conversation home-assistant-intents==2024.12.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b59be622158..de9d048d72c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241127.8 +home-assistant-frontend==20241223.1 # homeassistant.components.conversation home-assistant-intents==2024.12.20 From 0cbc77ad3f3dc21ab7545098a1f2923f3d1ce3f5 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Mon, 23 Dec 2024 15:36:57 +0000 Subject: [PATCH 1072/1198] Make tplink entities unavailable if camera is off (#133877) --- homeassistant/components/tplink/binary_sensor.py | 3 ++- homeassistant/components/tplink/button.py | 7 ++++++- homeassistant/components/tplink/camera.py | 4 +++- homeassistant/components/tplink/climate.py | 5 +++-- homeassistant/components/tplink/entity.py | 13 +++++++++---- homeassistant/components/tplink/fan.py | 3 ++- homeassistant/components/tplink/light.py | 7 +++++-- homeassistant/components/tplink/number.py | 3 ++- homeassistant/components/tplink/select.py | 3 ++- homeassistant/components/tplink/sensor.py | 3 ++- homeassistant/components/tplink/siren.py | 3 ++- homeassistant/components/tplink/switch.py | 3 ++- 12 files changed, 40 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/tplink/binary_sensor.py b/homeassistant/components/tplink/binary_sensor.py index e14ecf01749..f3a7e7a7ce7 100644 --- a/homeassistant/components/tplink/binary_sensor.py +++ b/homeassistant/components/tplink/binary_sensor.py @@ -96,6 +96,7 @@ class TPLinkBinarySensorEntity(CoordinatedTPLinkFeatureEntity, BinarySensorEntit entity_description: TPLinkBinarySensorEntityDescription @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_is_on = cast(bool | None, self._feature.value) + return True diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index 6e0d34864d9..753efcf89f4 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -52,15 +52,19 @@ BUTTON_DESCRIPTIONS: Final = [ ), TPLinkButtonEntityDescription( key="pan_left", + available_fn=lambda dev: dev.is_on, ), TPLinkButtonEntityDescription( key="pan_right", + available_fn=lambda dev: dev.is_on, ), TPLinkButtonEntityDescription( key="tilt_up", + available_fn=lambda dev: dev.is_on, ), TPLinkButtonEntityDescription( key="tilt_down", + available_fn=lambda dev: dev.is_on, ), ] @@ -100,5 +104,6 @@ class TPLinkButtonEntity(CoordinatedTPLinkFeatureEntity, ButtonEntity): """Execute action.""" await self._feature.set_value(True) - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """No need to update anything.""" + return self.entity_description.available_fn(self._device) diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py index 8aa49cf3c93..413bb40b422 100644 --- a/homeassistant/components/tplink/camera.py +++ b/homeassistant/components/tplink/camera.py @@ -40,6 +40,7 @@ CAMERA_DESCRIPTIONS: tuple[TPLinkCameraEntityDescription, ...] = ( TPLinkCameraEntityDescription( key="live_view", translation_key="live_view", + available_fn=lambda dev: dev.is_on, ), ) @@ -111,9 +112,10 @@ class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): return f"{legacy_device_id(self._device)}-{self.entity_description.key}" @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_is_on = self._camera_module.is_on + return self.entity_description.available_fn(self._device) async def stream_source(self) -> str | None: """Return the source of the stream.""" diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index 75a6599959d..f53a0d093ac 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -113,7 +113,7 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): await self._state_feature.set_value(False) @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_current_temperature = cast(float | None, self._temp_feature.value) self._attr_target_temperature = cast(float | None, self._target_feature.value) @@ -131,11 +131,12 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): self._mode_feature.value, ) self._attr_hvac_action = HVACAction.OFF - return + return True self._attr_hvac_action = STATE_TO_ACTION[ cast(ThermostatState, self._mode_feature.value) ] + return True def _get_unique_id(self) -> str: """Return unique id.""" diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index d7b02b80177..935857e5db1 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -89,6 +89,7 @@ class TPLinkFeatureEntityDescription(EntityDescription): """Base class for a TPLink feature based entity description.""" deprecated_info: DeprecatedInfo | None = None + available_fn: Callable[[Device], bool] = lambda _: True @dataclass(frozen=True, kw_only=True) @@ -96,6 +97,7 @@ class TPLinkModuleEntityDescription(EntityDescription): """Base class for a TPLink module based entity description.""" deprecated_info: DeprecatedInfo | None = None + available_fn: Callable[[Device], bool] = lambda _: True def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P]( @@ -207,15 +209,18 @@ class CoordinatedTPLinkEntity(CoordinatorEntity[TPLinkDataUpdateCoordinator], AB @abstractmethod @callback - def _async_update_attrs(self) -> None: - """Platforms implement this to update the entity internals.""" + def _async_update_attrs(self) -> bool: + """Platforms implement this to update the entity internals. + + The return value is used to the set the entity available attribute. + """ raise NotImplementedError @callback def _async_call_update_attrs(self) -> None: """Call update_attrs and make entity unavailable on errors.""" try: - self._async_update_attrs() + available = self._async_update_attrs() except Exception as ex: # noqa: BLE001 if self._attr_available: _LOGGER.warning( @@ -226,7 +231,7 @@ class CoordinatedTPLinkEntity(CoordinatorEntity[TPLinkDataUpdateCoordinator], AB ) self._attr_available = False else: - self._attr_available = True + self._attr_available = available @callback def _handle_coordinator_update(self) -> None: diff --git a/homeassistant/components/tplink/fan.py b/homeassistant/components/tplink/fan.py index 64ad01eb671..a1e62e4ed69 100644 --- a/homeassistant/components/tplink/fan.py +++ b/homeassistant/components/tplink/fan.py @@ -106,7 +106,7 @@ class TPLinkFanEntity(CoordinatedTPLinkEntity, FanEntity): await self.fan_module.set_fan_speed_level(value_in_range) @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" fan_speed = self.fan_module.fan_speed_level self._attr_is_on = fan_speed != 0 @@ -114,3 +114,4 @@ class TPLinkFanEntity(CoordinatedTPLinkEntity, FanEntity): self._attr_percentage = ranged_value_to_percentage(SPEED_RANGE, fan_speed) else: self._attr_percentage = None + return True diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index f3207d754f3..91e2a784af2 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -335,7 +335,7 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): return ColorMode.HS @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" light_module = self._light_module self._attr_is_on = light_module.state.light_on is True @@ -349,6 +349,8 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): hue, saturation, _ = light_module.hsv self._attr_hs_color = hue, saturation + return True + class TPLinkLightEffectEntity(TPLinkLightEntity): """Representation of a TPLink Smart Light Strip.""" @@ -368,7 +370,7 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): _attr_supported_features = LightEntityFeature.TRANSITION | LightEntityFeature.EFFECT @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" super()._async_update_attrs() effect_module = self._effect_module @@ -381,6 +383,7 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): self._attr_effect_list = effect_list else: self._attr_effect_list = None + return True @async_refresh_after async def async_turn_on(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index 489805029ea..3f7fa9c3e0f 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -114,6 +114,7 @@ class TPLinkNumberEntity(CoordinatedTPLinkFeatureEntity, NumberEntity): await self._feature.set_value(int(value)) @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_native_value = cast(float | None, self._feature.value) + return True diff --git a/homeassistant/components/tplink/select.py b/homeassistant/components/tplink/select.py index 3755a1d0be2..5dd8e54fca8 100644 --- a/homeassistant/components/tplink/select.py +++ b/homeassistant/components/tplink/select.py @@ -91,6 +91,7 @@ class TPLinkSelectEntity(CoordinatedTPLinkFeatureEntity, SelectEntity): await self._feature.set_value(option) @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_current_option = cast(str | None, self._feature.value) + return True diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 8b7351f8d7d..da4bf72122d 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -153,7 +153,7 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): entity_description: TPLinkSensorEntityDescription @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" value = self._feature.value if value is not None and self._feature.precision_hint is not None: @@ -171,3 +171,4 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): # Map to homeassistant units and fallback to upstream one if none found if (unit := self._feature.unit) is not None: self._attr_native_unit_of_measurement = UNIT_MAPPING.get(unit, unit) + return True diff --git a/homeassistant/components/tplink/siren.py b/homeassistant/components/tplink/siren.py index c4ece56f0f6..141ea696358 100644 --- a/homeassistant/components/tplink/siren.py +++ b/homeassistant/components/tplink/siren.py @@ -56,6 +56,7 @@ class TPLinkSirenEntity(CoordinatedTPLinkEntity, SirenEntity): await self._alarm_module.stop() @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_is_on = self._alarm_module.active + return True diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 28dedc7e7a1..7a879fb3c70 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -109,6 +109,7 @@ class TPLinkSwitch(CoordinatedTPLinkFeatureEntity, SwitchEntity): await self._feature.set_value(False) @callback - def _async_update_attrs(self) -> None: + def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" self._attr_is_on = cast(bool | None, self._feature.value) + return True From abe00884eabadee06480a0e8c5fc5a136005d76a Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Mon, 23 Dec 2024 15:37:42 +0000 Subject: [PATCH 1073/1198] Use SD stream for tplink mpeg stream (#133879) --- homeassistant/components/tplink/camera.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py index 413bb40b422..4a6859a8414 100644 --- a/homeassistant/components/tplink/camera.py +++ b/homeassistant/components/tplink/camera.py @@ -7,7 +7,7 @@ import time from aiohttp import web from haffmpeg.camera import CameraMjpeg -from kasa import Credentials, Device, Module +from kasa import Credentials, Device, Module, StreamResolution from kasa.smartcam.modules import Camera as CameraModule from homeassistant.components import ffmpeg, stream @@ -96,7 +96,9 @@ class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): """Initialize a TPlink camera.""" self.entity_description = description self._camera_module = camera_module - self._video_url = camera_module.stream_rtsp_url(camera_credentials) + self._video_url = camera_module.stream_rtsp_url( + camera_credentials, stream_resolution=StreamResolution.SD + ) self._image: bytes | None = None super().__init__(device, coordinator, parent=parent) Camera.__init__(self) @@ -119,7 +121,9 @@ class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): async def stream_source(self) -> str | None: """Return the source of the stream.""" - return self._video_url + return self._camera_module.stream_rtsp_url( + self._camera_credentials, stream_resolution=StreamResolution.HD + ) async def _async_check_stream_auth(self, video_url: str) -> None: """Check for an auth error and start reauth flow.""" @@ -150,7 +154,7 @@ class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): return self._image # Don't try to capture a new image if a stream is running - if (self.stream and self.stream.available) or self._http_mpeg_stream_running: + if self._http_mpeg_stream_running: return self._image if self._can_stream and (video_url := self._video_url): From 6cbc803b2835d769b7da6d7ce598855b5f35fdfe Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Mon, 23 Dec 2024 16:38:34 +0100 Subject: [PATCH 1074/1198] Streamline Peblar translations (#133883) --- homeassistant/components/peblar/strings.json | 18 +++++++++--------- tests/components/peblar/test_binary_sensor.py | 2 +- tests/components/peblar/test_button.py | 8 ++++---- tests/components/peblar/test_coordinator.py | 4 ++-- tests/components/peblar/test_number.py | 8 ++++---- tests/components/peblar/test_select.py | 8 ++++---- tests/components/peblar/test_sensor.py | 2 +- tests/components/peblar/test_switch.py | 8 ++++---- tests/components/peblar/test_update.py | 2 +- 9 files changed, 30 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index a6fa3acf457..f6a228ca236 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -20,7 +20,7 @@ "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Reauthenticate with your Peblar RV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar's device web interface." + "description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar EV charger' web interface." }, "reconfigure": { "data": { @@ -31,7 +31,7 @@ "host": "[%key:component::peblar::config::step::user::data_description::host%]", "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface." + "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log into its web interface." }, "user": { "data": { @@ -39,10 +39,10 @@ "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "host": "The hostname or IP address of your Peblar charger on your home network.", - "password": "The same password as you use to log in to the Peblar device' local web interface." + "host": "The hostname or IP address of your Peblar EV charger on your home network.", + "password": "The same password as you use to log in to the Peblar EV charger' local web interface." }, - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log into its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." }, "zeroconf_confirm": { "data": { @@ -51,7 +51,7 @@ "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." } } }, @@ -164,13 +164,13 @@ }, "exceptions": { "authentication_error": { - "message": "An authentication failure occurred while communicating with the Peblar device." + "message": "An authentication failure occurred while communicating with the Peblar EV charger." }, "communication_error": { - "message": "An error occurred while communicating with the Peblar device: {error}" + "message": "An error occurred while communicating with the Peblar EV charger: {error}" }, "unknown_error": { - "message": "An unknown error occurred while communicating with the Peblar device: {error}" + "message": "An unknown error occurred while communicating with the Peblar EV charger: {error}" } } } diff --git a/tests/components/peblar/test_binary_sensor.py b/tests/components/peblar/test_binary_sensor.py index 670b5b67145..affcde483ea 100644 --- a/tests/components/peblar/test_binary_sensor.py +++ b/tests/components/peblar/test_binary_sensor.py @@ -23,7 +23,7 @@ async def test_entities( """Test the binary sensors entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) diff --git a/tests/components/peblar/test_button.py b/tests/components/peblar/test_button.py index e9ab377db67..a47f190a941 100644 --- a/tests/components/peblar/test_button.py +++ b/tests/components/peblar/test_button.py @@ -34,7 +34,7 @@ async def test_entities( """Test the button entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) @@ -81,7 +81,7 @@ async def test_buttons( HomeAssistantError, match=( r"An error occurred while communicating " - r"with the Peblar device: Could not connect" + r"with the Peblar EV charger: Could not connect" ), ) as excinfo: await hass.services.async_call( @@ -101,7 +101,7 @@ async def test_buttons( HomeAssistantError, match=( r"An unknown error occurred while communicating " - r"with the Peblar device: Unknown error" + r"with the Peblar EV charger: Unknown error" ), ) as excinfo: await hass.services.async_call( @@ -122,7 +122,7 @@ async def test_buttons( HomeAssistantError, match=( r"An authentication failure occurred while communicating " - r"with the Peblar device" + r"with the Peblar EV charger" ), ) as excinfo: await hass.services.async_call( diff --git a/tests/components/peblar/test_coordinator.py b/tests/components/peblar/test_coordinator.py index f438d807920..7f073af9554 100644 --- a/tests/components/peblar/test_coordinator.py +++ b/tests/components/peblar/test_coordinator.py @@ -26,7 +26,7 @@ pytestmark = [ ( PeblarConnectionError("Could not connect"), ( - "An error occurred while communicating with the Peblar device: " + "An error occurred while communicating with the Peblar EV charger: " "Could not connect" ), ), @@ -34,7 +34,7 @@ pytestmark = [ PeblarError("Unknown error"), ( "An unknown error occurred while communicating " - "with the Peblar device: Unknown error" + "with the Peblar EV charger: Unknown error" ), ), ], diff --git a/tests/components/peblar/test_number.py b/tests/components/peblar/test_number.py index 2a8fca46e91..57469fecbc6 100644 --- a/tests/components/peblar/test_number.py +++ b/tests/components/peblar/test_number.py @@ -36,7 +36,7 @@ async def test_entities( """Test the number entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) @@ -80,7 +80,7 @@ async def test_number_set_value( PeblarConnectionError("Could not connect"), ( r"An error occurred while communicating " - r"with the Peblar device: Could not connect" + r"with the Peblar EV charger: Could not connect" ), "communication_error", {"error": "Could not connect"}, @@ -89,7 +89,7 @@ async def test_number_set_value( PeblarError("Unknown error"), ( r"An unknown error occurred while communicating " - r"with the Peblar device: Unknown error" + r"with the Peblar EV charger: Unknown error" ), "unknown_error", {"error": "Unknown error"}, @@ -143,7 +143,7 @@ async def test_number_set_value_authentication_error( HomeAssistantError, match=( r"An authentication failure occurred while communicating " - r"with the Peblar device" + r"with the Peblar EV charger" ), ) as excinfo: await hass.services.async_call( diff --git a/tests/components/peblar/test_select.py b/tests/components/peblar/test_select.py index 5e4ab4609d4..be7e182dc39 100644 --- a/tests/components/peblar/test_select.py +++ b/tests/components/peblar/test_select.py @@ -41,7 +41,7 @@ async def test_entities( """Test the select entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) @@ -85,7 +85,7 @@ async def test_select_option( PeblarConnectionError("Could not connect"), ( r"An error occurred while communicating " - r"with the Peblar device: Could not connect" + r"with the Peblar EV charger: Could not connect" ), "communication_error", {"error": "Could not connect"}, @@ -94,7 +94,7 @@ async def test_select_option( PeblarError("Unknown error"), ( r"An unknown error occurred while communicating " - r"with the Peblar device: Unknown error" + r"with the Peblar EV charger: Unknown error" ), "unknown_error", {"error": "Unknown error"}, @@ -150,7 +150,7 @@ async def test_select_option_authentication_error( HomeAssistantError, match=( r"An authentication failure occurred while communicating " - r"with the Peblar device" + r"with the Peblar EV charger" ), ) as excinfo: await hass.services.async_call( diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py index bad81486838..d689e66e944 100644 --- a/tests/components/peblar/test_sensor.py +++ b/tests/components/peblar/test_sensor.py @@ -24,7 +24,7 @@ async def test_entities( """Test the sensor entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) diff --git a/tests/components/peblar/test_switch.py b/tests/components/peblar/test_switch.py index 6436ac78109..75deeb2d5d3 100644 --- a/tests/components/peblar/test_switch.py +++ b/tests/components/peblar/test_switch.py @@ -36,7 +36,7 @@ async def test_entities( """Test the switch entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) @@ -88,7 +88,7 @@ async def test_switch( PeblarConnectionError("Could not connect"), ( r"An error occurred while communicating " - r"with the Peblar device: Could not connect" + r"with the Peblar EV charger: Could not connect" ), "communication_error", {"error": "Could not connect"}, @@ -97,7 +97,7 @@ async def test_switch( PeblarError("Unknown error"), ( r"An unknown error occurred while communicating " - r"with the Peblar device: Unknown error" + r"with the Peblar EV charger: Unknown error" ), "unknown_error", {"error": "Unknown error"}, @@ -152,7 +152,7 @@ async def test_switch_authentication_error( HomeAssistantError, match=( r"An authentication failure occurred while communicating " - r"with the Peblar device" + r"with the Peblar EV charger" ), ) as excinfo: await hass.services.async_call( diff --git a/tests/components/peblar/test_update.py b/tests/components/peblar/test_update.py index 7a772fbe96c..54eb77abc24 100644 --- a/tests/components/peblar/test_update.py +++ b/tests/components/peblar/test_update.py @@ -23,7 +23,7 @@ async def test_entities( """Test the update entities.""" await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Ensure all entities are correctly assigned to the Peblar device + # Ensure all entities are correctly assigned to the Peblar EV charger device_entry = device_registry.async_get_device( identifiers={(DOMAIN, "23-45-A4O-MOF")} ) From bbb5f9e7173debcd3b412cef41e09ab8df56369b Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Mon, 23 Dec 2024 16:40:38 +0100 Subject: [PATCH 1075/1198] Preload supported color properties in fritzbox lights (#133798) --- .../components/fritzbox/coordinator.py | 17 ++++++- homeassistant/components/fritzbox/light.py | 48 ++++++++----------- 2 files changed, 35 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index 52fa3ba1a12..a6a30ffdc6a 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -27,6 +27,7 @@ class FritzboxCoordinatorData: devices: dict[str, FritzhomeDevice] templates: dict[str, FritzhomeTemplate] + supported_color_properties: dict[str, tuple[dict, list]] class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorData]): @@ -49,7 +50,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.new_devices: set[str] = set() self.new_templates: set[str] = set() - self.data = FritzboxCoordinatorData({}, {}) + self.data = FritzboxCoordinatorData({}, {}, {}) async def async_setup(self) -> None: """Set up the coordinator.""" @@ -120,6 +121,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat devices = self.fritz.get_devices() device_data = {} + supported_color_properties = self.data.supported_color_properties for device in devices: # assume device as unavailable, see #55799 if ( @@ -136,6 +138,13 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat device_data[device.ain] = device + # pre-load supported colors and color temps for new devices + if device.has_color and device.ain not in supported_color_properties: + supported_color_properties[device.ain] = ( + device.get_colors(), + device.get_color_temps(), + ) + template_data = {} if self.has_templates: templates = self.fritz.get_templates() @@ -145,7 +154,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.new_devices = device_data.keys() - self.data.devices.keys() self.new_templates = template_data.keys() - self.data.templates.keys() - return FritzboxCoordinatorData(devices=device_data, templates=template_data) + return FritzboxCoordinatorData( + devices=device_data, + templates=template_data, + supported_color_properties=supported_color_properties, + ) async def _async_update_data(self) -> FritzboxCoordinatorData: """Fetch all device data.""" diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index d347f6898c0..36cb7dc8cff 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -57,7 +57,6 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): ) -> None: """Initialize the FritzboxLight entity.""" super().__init__(coordinator, ain, None) - self._supported_hs: dict[int, list[int]] = {} self._attr_supported_color_modes = {ColorMode.ONOFF} if self.data.has_color: @@ -65,6 +64,26 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): elif self.data.has_level: self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + (supported_colors, supported_color_temps) = ( + coordinator.data.supported_color_properties.get(self.data.ain, ({}, [])) + ) + + # Fritz!DECT 500 only supports 12 values for hue, with 3 saturations each. + # Map supported colors to dict {hue: [sat1, sat2, sat3]} for easier lookup + self._supported_hs: dict[int, list[int]] = {} + for values in supported_colors.values(): + hue = int(values[0][0]) + self._supported_hs[hue] = [ + int(values[0][1]), + int(values[1][1]), + int(values[2][1]), + ] + + if supported_color_temps: + # only available for color bulbs + self._attr_max_color_temp_kelvin = int(max(supported_color_temps)) + self._attr_min_color_temp_kelvin = int(min(supported_color_temps)) + @property def is_on(self) -> bool: """If the light is currently on or off.""" @@ -148,30 +167,3 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): """Turn the light off.""" await self.hass.async_add_executor_job(self.data.set_state_off) await self.coordinator.async_refresh() - - async def async_added_to_hass(self) -> None: - """Get light attributes from device after entity is added to hass.""" - await super().async_added_to_hass() - - def _get_color_data() -> tuple[dict, list]: - return (self.data.get_colors(), self.data.get_color_temps()) - - ( - supported_colors, - supported_color_temps, - ) = await self.hass.async_add_executor_job(_get_color_data) - - if supported_color_temps: - # only available for color bulbs - self._attr_max_color_temp_kelvin = int(max(supported_color_temps)) - self._attr_min_color_temp_kelvin = int(min(supported_color_temps)) - - # Fritz!DECT 500 only supports 12 values for hue, with 3 saturations each. - # Map supported colors to dict {hue: [sat1, sat2, sat3]} for easier lookup - for values in supported_colors.values(): - hue = int(values[0][0]) - self._supported_hs[hue] = [ - int(values[0][1]), - int(values[1][1]), - int(values[2][1]), - ] From c2f6e5036e1fbf8eba666378fd8142db45bf11ac Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 23 Dec 2024 15:56:12 +0000 Subject: [PATCH 1076/1198] Bump version to 2025.1.0b0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index eed8d73a4ee..6cdb7f5fb07 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0.dev0" +PATCH_VERSION: Final = "0b0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 369f6f40921..8c66e5a3bdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0.dev0" +version = "2025.1.0b0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 7ce563b0b4b964cb2259fb72d5ee33d4f3ef3903 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Mon, 23 Dec 2024 22:49:59 +0100 Subject: [PATCH 1077/1198] Catch ClientConnectorError and TimeOutError in APSystems (#132027) --- homeassistant/components/apsystems/number.py | 10 +++++++++- tests/components/apsystems/test_number.py | 13 ++++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index 6463d10f3e8..b5ed60a7754 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -2,6 +2,8 @@ from __future__ import annotations +from aiohttp import ClientConnectorError + from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode from homeassistant.const import UnitOfPower from homeassistant.core import HomeAssistant @@ -45,7 +47,13 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): async def async_update(self) -> None: """Set the state with the value fetched from the inverter.""" - self._attr_native_value = await self._api.get_max_power() + try: + status = await self._api.get_max_power() + except (TimeoutError, ClientConnectorError): + self._attr_available = False + else: + self._attr_available = True + self._attr_native_value = status async def async_set_native_value(self, value: float) -> None: """Set the desired output power.""" diff --git a/tests/components/apsystems/test_number.py b/tests/components/apsystems/test_number.py index 5868bd3da34..912759b4a17 100644 --- a/tests/components/apsystems/test_number.py +++ b/tests/components/apsystems/test_number.py @@ -12,7 +12,7 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -46,6 +46,17 @@ async def test_number( await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "50" + mock_apsystems.get_max_power.side_effect = TimeoutError() + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50.1}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_apsystems") From bb371c87d55383583238a0c0b9cde1178470cc77 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:47:26 -0800 Subject: [PATCH 1078/1198] Fix a history stats bug when window and tracked state change simultaneously (#133770) --- .../components/history_stats/data.py | 14 ++- tests/components/history_stats/test_sensor.py | 99 +++++++++++++++++++ 2 files changed, 110 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/history_stats/data.py b/homeassistant/components/history_stats/data.py index f9b79d74cb4..83528b73f6f 100644 --- a/homeassistant/components/history_stats/data.py +++ b/homeassistant/components/history_stats/data.py @@ -118,9 +118,7 @@ class HistoryStats: <= current_period_end_timestamp ): self._history_current_period.append( - HistoryState( - new_state.state, new_state.last_changed.timestamp() - ) + HistoryState(new_state.state, new_state.last_changed_timestamp) ) new_data = True if not new_data and current_period_end_timestamp < now_timestamp: @@ -131,6 +129,16 @@ class HistoryStats: await self._async_history_from_db( current_period_start_timestamp, current_period_end_timestamp ) + if event and (new_state := event.data["new_state"]) is not None: + if ( + current_period_start_timestamp + <= floored_timestamp(new_state.last_changed) + <= current_period_end_timestamp + ): + self._history_current_period.append( + HistoryState(new_state.state, new_state.last_changed_timestamp) + ) + self._previous_run_before_start = False seconds_matched, match_count = self._async_compute_seconds_and_changes( diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index d60203676e6..3039612d1a0 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -1465,6 +1465,105 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None assert hass.states.get("sensor.sensor4").state == "50.0" +async def test_state_change_during_window_rollover( + recorder_mock: Recorder, + hass: HomeAssistant, +) -> None: + """Test when the tracked sensor and the start/end window change during the same update.""" + await hass.config.async_set_time_zone("UTC") + utcnow = dt_util.utcnow() + start_time = utcnow.replace(hour=23, minute=0, second=0, microsecond=0) + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.state": [ + ha.State( + "binary_sensor.state", + "on", + last_changed=start_time - timedelta(hours=11), + last_updated=start_time - timedelta(hours=11), + ), + ] + } + + # The test begins at 23:00, and queries from the database that the sensor has been on since 12:00. + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), + ): + await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor1", + "state": "on", + "start": "{{ today_at() }}", + "end": "{{ now() }}", + "type": "time", + } + ] + }, + ) + await hass.async_block_till_done() + + await async_update_entity(hass, "sensor.sensor1") + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "11.0" + + # Advance 59 minutes, to record the last minute update just before midnight, just like a real system would do. + t2 = start_time + timedelta(minutes=59, microseconds=300) + with freeze_time(t2): + async_fire_time_changed(hass, t2) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "11.98" + + # One minute has passed and the time has now rolled over into a new day, resetting the recorder window. The sensor will then query the database for updates, + # and will see that the sensor is ON starting from midnight. + t3 = t2 + timedelta(minutes=1) + + def _fake_states_t3(*args, **kwargs): + return { + "binary_sensor.state": [ + ha.State( + "binary_sensor.state", + "on", + last_changed=t3.replace(hour=0, minute=0, second=0, microsecond=0), + last_updated=t3.replace(hour=0, minute=0, second=0, microsecond=0), + ), + ] + } + + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states_t3, + ), + freeze_time(t3), + ): + # The sensor turns off around this time, before the sensor does its normal polled update. + hass.states.async_set("binary_sensor.state", "off") + await hass.async_block_till_done(wait_background_tasks=True) + + assert hass.states.get("sensor.sensor1").state == "0.0" + + # More time passes, and the history stats does a polled update again. It should be 0 since the sensor has been off since midnight. + t4 = t3 + timedelta(minutes=10) + with freeze_time(t4): + async_fire_time_changed(hass, t4) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + + @pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"]) async def test_end_time_with_microseconds_zeroed( time_zone: str, From 80955ba82188e07f1b28b37dc51a22aca9f0b634 Mon Sep 17 00:00:00 2001 From: Jordi Date: Tue, 24 Dec 2024 08:01:50 +0100 Subject: [PATCH 1079/1198] Add Harvey virtual integration (#133874) Add harvey virtual integration --- homeassistant/components/harvey/__init__.py | 1 + homeassistant/components/harvey/manifest.json | 6 ++++++ homeassistant/generated/integrations.json | 5 +++++ 3 files changed, 12 insertions(+) create mode 100644 homeassistant/components/harvey/__init__.py create mode 100644 homeassistant/components/harvey/manifest.json diff --git a/homeassistant/components/harvey/__init__.py b/homeassistant/components/harvey/__init__.py new file mode 100644 index 00000000000..e40d1799a64 --- /dev/null +++ b/homeassistant/components/harvey/__init__.py @@ -0,0 +1 @@ +"""Virtual integration: Harvey.""" diff --git a/homeassistant/components/harvey/manifest.json b/homeassistant/components/harvey/manifest.json new file mode 100644 index 00000000000..3cb2a1b9aff --- /dev/null +++ b/homeassistant/components/harvey/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "harvey", + "name": "Harvey", + "integration_type": "virtual", + "supported_by": "aquacell" +} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index ad4af2f024c..005fb7f694f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2475,6 +2475,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "harvey": { + "name": "Harvey", + "integration_type": "virtual", + "supported_by": "aquacell" + }, "hassio": { "name": "Home Assistant Supervisor", "integration_type": "hub", From efabb82cb6c5907903b6fbb73e6158548d01b5d1 Mon Sep 17 00:00:00 2001 From: Martin Mrazik Date: Mon, 23 Dec 2024 22:26:38 +0100 Subject: [PATCH 1080/1198] Map RGB+CCT to RGB for WLED (#133900) --- homeassistant/components/wled/const.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/wled/const.py b/homeassistant/components/wled/const.py index 69ff6ccb1fa..8d09867a46e 100644 --- a/homeassistant/components/wled/const.py +++ b/homeassistant/components/wled/const.py @@ -53,7 +53,9 @@ LIGHT_CAPABILITIES_COLOR_MODE_MAPPING: dict[LightCapability, list[ColorMode]] = ColorMode.COLOR_TEMP, ], LightCapability.RGB_COLOR | LightCapability.COLOR_TEMPERATURE: [ - ColorMode.RGBWW, + # Technically this is RGBWW but wled does not support RGBWW colors (with warm and cold white separately) + # but rather RGB + CCT which does not have a direct mapping in HA + ColorMode.RGB, ], LightCapability.WHITE_CHANNEL | LightCapability.COLOR_TEMPERATURE: [ ColorMode.COLOR_TEMP, From 2b8240746a760b210ca73b1346bc7b9146c118c5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 23 Dec 2024 12:38:59 -1000 Subject: [PATCH 1081/1198] Sort integration platforms preload list (#133905) * Sort integration platforms preload list https://github.com/home-assistant/core/pull/133856#discussion_r1895385026 * sort * Sort them all --------- Co-authored-by: Franck Nijhof --- homeassistant/loader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 78c89b94765..93dc7677bba 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -65,20 +65,20 @@ _LOGGER = logging.getLogger(__name__) # This list can be extended by calling async_register_preload_platform # BASE_PRELOAD_PLATFORMS = [ + "backup", "config", "config_flow", "diagnostics", "energy", "group", - "logbook", "hardware", "intent", + "logbook", "media_source", "recorder", "repairs", "system_health", "trigger", - "backup", ] From bed186cce4cf5c66151cf7e855789a984525ec5d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 23 Dec 2024 11:19:28 -1000 Subject: [PATCH 1082/1198] Ensure cloud and recorder backup platforms do not have to wait for the import executor (#133907) * Ensure cloud and recorder backup platforms do not have to wait for the import executor partially fixes #133904 * backup.backup as well --- homeassistant/components/backup/__init__.py | 4 ++++ homeassistant/components/cloud/__init__.py | 9 ++++++++- homeassistant/components/recorder/__init__.py | 9 ++++++++- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index f1a6f3be196..ab324a44e3b 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -5,6 +5,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType +# Pre-import backup to avoid it being imported +# later when the import executor is busy and delaying +# startup +from . import backup # noqa: F401 from .agent import ( BackupAgent, BackupAgentError, diff --git a/homeassistant/components/cloud/__init__.py b/homeassistant/components/cloud/__init__.py index 80c02571d24..80b00237fd3 100644 --- a/homeassistant/components/cloud/__init__.py +++ b/homeassistant/components/cloud/__init__.py @@ -36,7 +36,14 @@ from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.signal_type import SignalType -from . import account_link, http_api +# Pre-import backup to avoid it being imported +# later when the import executor is busy and delaying +# startup +from . import ( + account_link, + backup, # noqa: F401 + http_api, +) from .client import CloudClient from .const import ( CONF_ACCOUNT_LINK_SERVER, diff --git a/homeassistant/components/recorder/__init__.py b/homeassistant/components/recorder/__init__.py index 8564827d839..a40760c67f4 100644 --- a/homeassistant/components/recorder/__init__.py +++ b/homeassistant/components/recorder/__init__.py @@ -28,7 +28,14 @@ from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.event_type import EventType -from . import entity_registry, websocket_api +# Pre-import backup to avoid it being imported +# later when the import executor is busy and delaying +# startup +from . import ( + backup, # noqa: F401 + entity_registry, + websocket_api, +) from .const import ( # noqa: F401 CONF_DB_INTEGRITY_CHECK, DOMAIN, From d3666ecf8a894fb90e8a450970b58f08b6fd776a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 23 Dec 2024 09:20:44 -1000 Subject: [PATCH 1083/1198] Fix duplicate call to async_register_preload_platform (#133909) --- homeassistant/helpers/integration_platform.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/helpers/integration_platform.py b/homeassistant/helpers/integration_platform.py index a3eb19657e8..4ded7444989 100644 --- a/homeassistant/helpers/integration_platform.py +++ b/homeassistant/helpers/integration_platform.py @@ -175,6 +175,9 @@ async def async_process_integration_platforms( else: integration_platforms = hass.data[DATA_INTEGRATION_PLATFORMS] + # Tell the loader that it should try to pre-load the integration + # for any future components that are loaded so we can reduce the + # amount of import executor usage. async_register_preload_platform(hass, platform_name) top_level_components = hass.config.top_level_components.copy() process_job = HassJob( @@ -187,10 +190,6 @@ async def async_process_integration_platforms( integration_platform = IntegrationPlatform( platform_name, process_job, top_level_components ) - # Tell the loader that it should try to pre-load the integration - # for any future components that are loaded so we can reduce the - # amount of import executor usage. - async_register_preload_platform(hass, platform_name) integration_platforms.append(integration_platform) if not top_level_components: return From 657e5b73b6fdd1f4ec0607761747432817104717 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Mon, 23 Dec 2024 20:34:36 +0000 Subject: [PATCH 1084/1198] Add cronsim to default dependencies (#133913) --- homeassistant/package_constraints.txt | 1 + pyproject.toml | 1 + requirements.txt | 1 + 3 files changed, 3 insertions(+) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index f46248d2e1c..b88fef0f64f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -25,6 +25,7 @@ bluetooth-data-tools==1.20.0 cached-ipaddress==0.8.0 certifi>=2021.5.30 ciso8601==2.3.2 +cronsim==2.6 cryptography==44.0.0 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 diff --git a/pyproject.toml b/pyproject.toml index 8c66e5a3bdd..3e432b6e8ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ dependencies = [ "bcrypt==4.2.0", "certifi>=2021.5.30", "ciso8601==2.3.2", + "cronsim==2.6", "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration diff --git a/requirements.txt b/requirements.txt index 82405dc44ef..3f1fd48ed57 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,6 +18,7 @@ awesomeversion==24.6.0 bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.2 +cronsim==2.6 fnv-hash-fast==1.0.2 hass-nabucasa==0.87.0 httpx==0.27.2 From cf9686a802d97eb35a65f7720212237396738ab5 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Tue, 24 Dec 2024 10:59:36 +1000 Subject: [PATCH 1085/1198] Slow down polling in Teslemetry (#133924) --- homeassistant/components/teslemetry/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index e7232d0f87c..303a3250edf 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -18,7 +18,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import ENERGY_HISTORY_FIELDS, LOGGER from .helpers import flatten -VEHICLE_INTERVAL = timedelta(seconds=30) +VEHICLE_INTERVAL = timedelta(seconds=60) VEHICLE_WAIT = timedelta(minutes=15) ENERGY_LIVE_INTERVAL = timedelta(seconds=30) ENERGY_INFO_INTERVAL = timedelta(seconds=30) From 44150e9fd70beb4199c334da50f4abbfef729f8d Mon Sep 17 00:00:00 2001 From: Dave T <17680170+davet2001@users.noreply.github.com> Date: Tue, 24 Dec 2024 06:45:13 +0000 Subject: [PATCH 1086/1198] Fix missing % in string for generic camera (#133925) Fix missing % in generic camera string --- homeassistant/components/generic/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json index b3ecadacba5..45841e6255f 100644 --- a/homeassistant/components/generic/strings.json +++ b/homeassistant/components/generic/strings.json @@ -77,7 +77,7 @@ }, "error": { "unknown": "[%key:common::config_flow::error::unknown%]", - "unknown_with_details": "[%key:common::config_flow::error::unknown_with_details]", + "unknown_with_details": "[%key:component::generic::config::error::unknown_with_details%]", "already_exists": "[%key:component::generic::config::error::already_exists%]", "unable_still_load": "[%key:component::generic::config::error::unable_still_load%]", "unable_still_load_auth": "[%key:component::generic::config::error::unable_still_load_auth%]", From f23bc51b88579c7f225ee804eff9ca7116c0230a Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 24 Dec 2024 07:42:48 +0100 Subject: [PATCH 1087/1198] Fix Peblar import in data coordinator (#133926) --- homeassistant/components/peblar/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 398788f1f9f..058f2aefb3b 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -16,6 +16,7 @@ from peblar import ( PeblarEVInterface, PeblarMeter, PeblarSystem, + PeblarSystemInformation, PeblarUserConfiguration, PeblarVersions, ) @@ -24,7 +25,6 @@ from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from tests.components.peblar.conftest import PeblarSystemInformation from .const import DOMAIN, LOGGER From 4f1e9b2338c8cf5c31dca49254e465971d3d4839 Mon Sep 17 00:00:00 2001 From: G-Two <7310260+G-Two@users.noreply.github.com> Date: Tue, 24 Dec 2024 02:59:51 -0500 Subject: [PATCH 1088/1198] Stop using shared aiohttp client session for Subaru integration (#133931) --- homeassistant/components/subaru/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/subaru/__init__.py b/homeassistant/components/subaru/__init__.py index 3762b16e58b..4068507ed14 100644 --- a/homeassistant/components/subaru/__init__.py +++ b/homeassistant/components/subaru/__init__.py @@ -49,7 +49,7 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Subaru from a config entry.""" config = entry.data - websession = aiohttp_client.async_get_clientsession(hass) + websession = aiohttp_client.async_create_clientsession(hass) try: controller = SubaruAPI( websession, From ce830719000256e75c704a06957e71e48589f479 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 24 Dec 2024 08:24:58 +0000 Subject: [PATCH 1089/1198] Bump version to 2025.1.0b1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 6cdb7f5fb07..d8c94a55e37 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b0" +PATCH_VERSION: Final = "0b1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 3e432b6e8ad..dbbe6dd7110 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b0" +version = "2025.1.0b1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 502fbe65eefa50e86b2e1deea60354601b7449c2 Mon Sep 17 00:00:00 2001 From: Claudio Ruggeri - CR-Tech <41435902+crug80@users.noreply.github.com> Date: Tue, 24 Dec 2024 13:57:18 +0100 Subject: [PATCH 1090/1198] Fix reload modbus component issue (#133820) fix issue 116675 --- homeassistant/components/modbus/__init__.py | 39 ++++++++---- homeassistant/components/modbus/modbus.py | 3 - .../modbus/fixtures/configuration_2.yaml | 12 ++++ .../modbus/fixtures/configuration_empty.yaml | 0 tests/components/modbus/test_init.py | 60 ++++++++++++++----- 5 files changed, 85 insertions(+), 29 deletions(-) create mode 100644 tests/components/modbus/fixtures/configuration_2.yaml create mode 100644 tests/components/modbus/fixtures/configuration_empty.yaml diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index 48f8c726836..bbd2ba5c02d 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -46,9 +46,13 @@ from homeassistant.const import ( CONF_TYPE, CONF_UNIQUE_ID, CONF_UNIT_OF_MEASUREMENT, + SERVICE_RELOAD, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_platform import async_get_platforms +from homeassistant.helpers.reload import async_integration_yaml_config +from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType from .const import ( @@ -451,18 +455,29 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Modbus component.""" if DOMAIN not in config: return True + + async def _reload_config(call: Event | ServiceCall) -> None: + """Reload Modbus.""" + if DOMAIN not in hass.data: + _LOGGER.error("Modbus cannot reload, because it was never loaded") + return + hubs = hass.data[DOMAIN] + for name in hubs: + await hubs[name].async_close() + reset_platforms = async_get_platforms(hass, DOMAIN) + for reset_platform in reset_platforms: + _LOGGER.debug("Reload modbus resetting platform: %s", reset_platform.domain) + await reset_platform.async_reset() + reload_config = await async_integration_yaml_config(hass, DOMAIN) + if not reload_config: + _LOGGER.debug("Modbus not present anymore") + return + _LOGGER.debug("Modbus reloading") + await async_modbus_setup(hass, reload_config) + + async_register_admin_service(hass, DOMAIN, SERVICE_RELOAD, _reload_config) + return await async_modbus_setup( hass, config, ) - - -async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None: - """Release modbus resources.""" - if DOMAIN not in hass.data: - _LOGGER.error("Modbus cannot reload, because it was never loaded") - return - _LOGGER.debug("Modbus reloading") - hubs = hass.data[DOMAIN] - for name in hubs: - await hubs[name].async_close() diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index efce44d7979..8c8a879ead6 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -34,7 +34,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType from .const import ( @@ -125,8 +124,6 @@ async def async_modbus_setup( ) -> bool: """Set up Modbus component.""" - await async_setup_reload_service(hass, DOMAIN, [DOMAIN]) - if config[DOMAIN]: config[DOMAIN] = check_config(hass, config[DOMAIN]) if not config[DOMAIN]: diff --git a/tests/components/modbus/fixtures/configuration_2.yaml b/tests/components/modbus/fixtures/configuration_2.yaml new file mode 100644 index 00000000000..3f7b062c4cb --- /dev/null +++ b/tests/components/modbus/fixtures/configuration_2.yaml @@ -0,0 +1,12 @@ +modbus: + type: "tcp" + host: "testHost" + port: 5001 + name: "testModbus" + sensors: + - name: "dummy" + address: 117 + slave: 0 + - name: "dummy_2" + address: 118 + slave: 1 diff --git a/tests/components/modbus/fixtures/configuration_empty.yaml b/tests/components/modbus/fixtures/configuration_empty.yaml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 0cfa7ba8b24..5dd3f6e9033 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -25,7 +25,6 @@ import voluptuous as vol from homeassistant import config as hass_config from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.modbus import async_reset_platform from homeassistant.components.modbus.const import ( ATTR_ADDRESS, ATTR_HUB, @@ -1159,22 +1158,61 @@ async def test_integration_reload( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus, - freezer: FrozenDateTimeFactory, ) -> None: """Run test for integration reload.""" caplog.set_level(logging.DEBUG) caplog.clear() - yaml_path = get_fixture_path("configuration.yaml", "modbus") + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10)) + await hass.async_block_till_done() + + yaml_path = get_fixture_path("configuration.yaml", DOMAIN) with mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): - await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True) + await hass.services.async_call( + DOMAIN, + SERVICE_RELOAD, + {}, + blocking=True, + ) await hass.async_block_till_done() - for _ in range(4): - freezer.tick(timedelta(seconds=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() assert "Modbus reloading" in caplog.text + state_sensor_1 = hass.states.get("sensor.dummy") + state_sensor_2 = hass.states.get("sensor.dummy_2") + assert state_sensor_1 + assert not state_sensor_2 + + caplog.clear() + yaml_path = get_fixture_path("configuration_2.yaml", DOMAIN) + with mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): + await hass.services.async_call( + DOMAIN, + SERVICE_RELOAD, + {}, + blocking=True, + ) + await hass.async_block_till_done() + assert "Modbus reloading" in caplog.text + state_sensor_1 = hass.states.get("sensor.dummy") + state_sensor_2 = hass.states.get("sensor.dummy_2") + assert state_sensor_1 + assert state_sensor_2 + + caplog.clear() + yaml_path = get_fixture_path("configuration_empty.yaml", DOMAIN) + with mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): + await hass.services.async_call( + DOMAIN, + SERVICE_RELOAD, + {}, + blocking=True, + ) + await hass.async_block_till_done() + assert "Modbus not present anymore" in caplog.text + state_sensor_1 = hass.states.get("sensor.dummy") + state_sensor_2 = hass.states.get("sensor.dummy_2") + assert not state_sensor_1 + assert not state_sensor_2 @pytest.mark.parametrize("do_config", [{}]) @@ -1227,9 +1265,3 @@ async def test_no_entities(hass: HomeAssistant) -> None: ] } assert await async_setup_component(hass, DOMAIN, config) is False - - -async def test_reset_platform(hass: HomeAssistant) -> None: - """Run test for async_reset_platform.""" - await async_reset_platform(hass, "modbus") - assert DOMAIN not in hass.data From 5d7a22fa7655099e45af849605e117f7d5f2de4e Mon Sep 17 00:00:00 2001 From: Khole <29937485+KJonline@users.noreply.github.com> Date: Tue, 24 Dec 2024 09:42:35 +0000 Subject: [PATCH 1091/1198] Hive: Fix error when device goes offline (#133848) --- homeassistant/components/hive/binary_sensor.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/hive/binary_sensor.py b/homeassistant/components/hive/binary_sensor.py index d14d98bcf50..d2938896f92 100644 --- a/homeassistant/components/hive/binary_sensor.py +++ b/homeassistant/components/hive/binary_sensor.py @@ -113,12 +113,17 @@ class HiveBinarySensorEntity(HiveEntity, BinarySensorEntity): await self.hive.session.updateData(self.device) self.device = await self.hive.sensor.getSensor(self.device) self.attributes = self.device.get("attributes", {}) - self._attr_is_on = self.device["status"]["state"] + if self.device["hiveType"] != "Connectivity": - self._attr_available = self.device["deviceData"].get("online") + self._attr_available = ( + self.device["deviceData"].get("online") and "status" in self.device + ) else: self._attr_available = True + if self._attr_available: + self._attr_is_on = self.device["status"].get("state") + class HiveSensorEntity(HiveEntity, BinarySensorEntity): """Hive Sensor Entity.""" From 4ca17dbb9eafc9db71d9649028e89d08a4de38a5 Mon Sep 17 00:00:00 2001 From: Philipp Danner Date: Tue, 24 Dec 2024 14:00:34 +0100 Subject: [PATCH 1092/1198] fix "Slow" response leads to "Could not find a charging station" #124129 (#133889) fix #124129 --- homeassistant/components/keba/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/keba/manifest.json b/homeassistant/components/keba/manifest.json index d86ce053187..6427a30f000 100644 --- a/homeassistant/components/keba/manifest.json +++ b/homeassistant/components/keba/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["keba_kecontact"], "quality_scale": "legacy", - "requirements": ["keba-kecontact==1.1.0"] + "requirements": ["keba-kecontact==1.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a087e3ff509..42dd4546e8b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1248,7 +1248,7 @@ justnimbus==0.7.4 kaiterra-async-client==1.0.0 # homeassistant.components.keba -keba-kecontact==1.1.0 +keba-kecontact==1.3.0 # homeassistant.components.kegtron kegtron-ble==0.4.0 From 7b2fc282e57e2203748213ee8c1493d34f80b81f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 24 Dec 2024 10:15:21 +0100 Subject: [PATCH 1093/1198] Update apprise to v1.9.1 (#133936) --- homeassistant/components/apprise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apprise/manifest.json b/homeassistant/components/apprise/manifest.json index 4f3c4d7ef4e..ebe27d42471 100644 --- a/homeassistant/components/apprise/manifest.json +++ b/homeassistant/components/apprise/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_push", "loggers": ["apprise"], "quality_scale": "legacy", - "requirements": ["apprise==1.9.0"] + "requirements": ["apprise==1.9.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 42dd4546e8b..c3988ae69f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -470,7 +470,7 @@ anthropic==0.31.2 apple_weatherkit==1.1.3 # homeassistant.components.apprise -apprise==1.9.0 +apprise==1.9.1 # homeassistant.components.aprs aprslib==0.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index de9d048d72c..4365e33b8fa 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -443,7 +443,7 @@ anthropic==0.31.2 apple_weatherkit==1.1.3 # homeassistant.components.apprise -apprise==1.9.0 +apprise==1.9.1 # homeassistant.components.aprs aprslib==0.7.2 From ef05133a663126b9b0a3dbaaef3fc48de657239b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 24 Dec 2024 10:17:02 +0100 Subject: [PATCH 1094/1198] Use SignedSession in Xbox (#133938) --- homeassistant/components/xbox/__init__.py | 10 ++-------- homeassistant/components/xbox/api.py | 12 ++++-------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/xbox/__init__.py b/homeassistant/components/xbox/__init__.py index 6ab46cea069..5282a34903a 100644 --- a/homeassistant/components/xbox/__init__.py +++ b/homeassistant/components/xbox/__init__.py @@ -10,11 +10,7 @@ from xbox.webapi.api.provider.smartglass.models import SmartglassConsoleList from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - aiohttp_client, - config_entry_oauth2_flow, - config_validation as cv, -) +from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv from . import api from .const import DOMAIN @@ -40,9 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) ) session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) - auth = api.AsyncConfigEntryAuth( - aiohttp_client.async_get_clientsession(hass), session - ) + auth = api.AsyncConfigEntryAuth(session) client = XboxLiveClient(auth) consoles: SmartglassConsoleList = await client.smartglass.get_console_list() diff --git a/homeassistant/components/xbox/api.py b/homeassistant/components/xbox/api.py index a0c2d4cfb16..d4c47e4cc39 100644 --- a/homeassistant/components/xbox/api.py +++ b/homeassistant/components/xbox/api.py @@ -1,24 +1,20 @@ """API for xbox bound to Home Assistant OAuth.""" -from aiohttp import ClientSession from xbox.webapi.authentication.manager import AuthenticationManager from xbox.webapi.authentication.models import OAuth2TokenResponse +from xbox.webapi.common.signed_session import SignedSession -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.util.dt import utc_from_timestamp class AsyncConfigEntryAuth(AuthenticationManager): """Provide xbox authentication tied to an OAuth2 based config entry.""" - def __init__( - self, - websession: ClientSession, - oauth_session: config_entry_oauth2_flow.OAuth2Session, - ) -> None: + def __init__(self, oauth_session: OAuth2Session) -> None: """Initialize xbox auth.""" # Leaving out client credentials as they are handled by Home Assistant - super().__init__(websession, "", "", "") + super().__init__(SignedSession(), "", "", "") self._oauth_session = oauth_session self.oauth = self._get_oauth_token() From 6e7d09583147d23e8d81ad8718c41d52fa29da63 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 24 Dec 2024 13:44:09 +0100 Subject: [PATCH 1095/1198] Update Jinja2 to 3.1.5 (#133951) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b88fef0f64f..620eb4c00ed 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -39,7 +39,7 @@ home-assistant-frontend==20241223.1 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 -Jinja2==3.1.4 +Jinja2==3.1.5 lru-dict==1.3.0 mutagen==1.47.0 orjson==3.10.12 diff --git a/pyproject.toml b/pyproject.toml index dbbe6dd7110..3ada9fa51c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ dependencies = [ "httpx==0.27.2", "home-assistant-bluetooth==1.13.0", "ifaddr==0.2.0", - "Jinja2==3.1.4", + "Jinja2==3.1.5", "lru-dict==1.3.0", "PyJWT==2.10.1", # PyJWT has loose dependency. We want the latest one. diff --git a/requirements.txt b/requirements.txt index 3f1fd48ed57..0d898edcd4b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,7 +24,7 @@ hass-nabucasa==0.87.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 -Jinja2==3.1.4 +Jinja2==3.1.5 lru-dict==1.3.0 PyJWT==2.10.1 cryptography==44.0.0 From 9242b67e0d6cc825cfba6bcbcd3c5e6ea07ac41e Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 24 Dec 2024 16:41:36 +0100 Subject: [PATCH 1096/1198] Update frontend to 20241224.0 (#133963) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 2d3604330f6..4a70889c1d2 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241223.1"] + "requirements": ["home-assistant-frontend==20241224.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 620eb4c00ed..a66137ef8c3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241223.1 +home-assistant-frontend==20241224.0 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index c3988ae69f1..fa2082b50e0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241223.1 +home-assistant-frontend==20241224.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4365e33b8fa..715cb26d398 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241223.1 +home-assistant-frontend==20241224.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 From d415b7bc8dc344902ab269ae24673f752ae6906d Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 24 Dec 2024 16:42:54 +0100 Subject: [PATCH 1097/1198] Bump version to 2025.1.0b2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index d8c94a55e37..42407f46fb5 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b1" +PATCH_VERSION: Final = "0b2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 3ada9fa51c7..95cc634a333 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b1" +version = "2025.1.0b2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 03fb1362187e5b2ae73ae43f1f195da20b64be54 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 27 Dec 2024 00:24:47 +0100 Subject: [PATCH 1098/1198] Fix swiss public transport line field none (#133964) * fix #133116 The line can theoretically be none, when no line info is available (lets say walking sections first?) * fix line field * add unit test with missing line field --- .../components/swiss_public_transport/coordinator.py | 4 ++-- .../swiss_public_transport/fixtures/connections.json | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index 59602e7b982..c4cf2390dd0 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -113,7 +113,7 @@ class SwissPublicTransportDataUpdateCoordinator( destination=self._opendata.to_name, remaining_time=str(self.remaining_time(connections[i]["departure"])), delay=connections[i]["delay"], - line=connections[i]["line"], + line=connections[i].get("line"), ) for i in range(limit) if len(connections) > i and connections[i] is not None @@ -134,7 +134,7 @@ class SwissPublicTransportDataUpdateCoordinator( "train_number": connection["train_number"], "transfers": connection["transfers"], "delay": connection["delay"], - "line": connection["line"], + "line": connection.get("line"), } for connection in await self.fetch_connections(limit) ] diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json index 7e61206c366..1e8e5022bdf 100644 --- a/tests/components/swiss_public_transport/fixtures/connections.json +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -23,8 +23,7 @@ "platform": 2, "transfers": 0, "duration": "10", - "delay": 0, - "line": "T10" + "delay": 0 }, { "departure": "2024-01-06T18:06:00+0100", From f0e8360401e145a1448fece86c421738a39f21fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 26 Dec 2024 07:48:55 -1000 Subject: [PATCH 1099/1198] Ensure all states have been migrated to use timestamps (#134007) --- .../components/recorder/db_schema.py | 2 +- .../components/recorder/migration.py | 17 ++- .../recorder/test_migration_from_schema_32.py | 140 ++++++++++++++++++ 3 files changed, 156 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index fa4162f4183..2afbed9cb75 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 47 +SCHEMA_VERSION = 48 _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index d57db03f90e..8c9252ba28b 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -1976,6 +1976,17 @@ class _SchemaVersion47Migrator(_SchemaVersionMigrator, target_version=47): ) +class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48): + def _apply_update(self) -> None: + """Version specific update method.""" + # https://github.com/home-assistant/core/issues/134002 + # If the system has unmigrated states rows, we need to + # ensure they are migrated now so the new optimized + # queries can be used. For most systems, this should + # be very fast and nothing will be migrated. + _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) + + def _migrate_statistics_columns_to_timestamp_removing_duplicates( hass: HomeAssistant, instance: Recorder, @@ -2109,7 +2120,8 @@ def _migrate_columns_to_timestamp( connection.execute( text( 'UPDATE events set time_fired_ts=strftime("%s",time_fired) + ' - "cast(substr(time_fired,-7) AS FLOAT);" + "cast(substr(time_fired,-7) AS FLOAT) " + "WHERE time_fired_ts is NULL;" ) ) connection.execute( @@ -2117,7 +2129,8 @@ def _migrate_columns_to_timestamp( 'UPDATE states set last_updated_ts=strftime("%s",last_updated) + ' "cast(substr(last_updated,-7) AS FLOAT), " 'last_changed_ts=strftime("%s",last_changed) + ' - "cast(substr(last_changed,-7) AS FLOAT);" + "cast(substr(last_changed,-7) AS FLOAT) " + " WHERE last_updated_ts is NULL;" ) ) elif engine.dialect.name == SupportedDialect.MYSQL: diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 3cc654c0fa1..0624955b0e9 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -2142,3 +2142,143 @@ async def test_stats_migrate_times( ) await hass.async_stop() + + +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_cleanup_unmigrated_state_timestamps( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Ensure schema 48 migration cleans up any unmigrated state timestamps.""" + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] + + test_uuid = uuid.uuid4() + uuid_hex = test_uuid.hex + + def _object_as_dict(obj): + return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} + + def _insert_states(): + with session_scope(hass=hass) as session: + state1 = old_db_schema.States( + entity_id="state.test_state1", + last_updated=datetime.datetime( + 2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC + ), + last_updated_ts=None, + last_changed=datetime.datetime( + 2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC + ), + last_changed_ts=None, + context_id=uuid_hex, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ) + state2 = old_db_schema.States( + entity_id="state.test_state2", + last_updated=datetime.datetime( + 2016, 10, 28, 20, 13, 52, 552529, tzinfo=datetime.UTC + ), + last_updated_ts=None, + last_changed=datetime.datetime( + 2016, 10, 28, 20, 13, 52, 452529, tzinfo=datetime.UTC + ), + last_changed_ts=None, + context_id=None, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ) + session.add_all((state1, state2)) + # There is a default of now() for last_updated_ts so make sure it's not set + session.query(old_db_schema.States).update( + {old_db_schema.States.last_updated_ts: None} + ) + state3 = old_db_schema.States( + entity_id="state.already_migrated", + last_updated=None, + last_updated_ts=1477685632.452529, + last_changed=None, + last_changed_ts=1477685632.452529, + context_id=uuid_hex, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ) + session.add_all((state3,)) + + with session_scope(hass=hass, read_only=True) as session: + states = session.query(old_db_schema.States).all() + assert len(states) == 3 + + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) + + await async_wait_recording_done(hass) + now = dt_util.utcnow() + await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() + + def _fetch_migrated_states(): + with session_scope(hass=hass) as session: + states = session.query(States).all() + assert len(states) == 3 + return {state.state_id: _object_as_dict(state) for state in states} + + # Run again with new schema, let migration run + async with async_test_home_assistant() as hass: + with ( + freeze_time(now), + instrument_migration(hass) as instrumented_migration, + ): + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ) as instance: + # Check the context ID migrator is considered non-live + assert recorder.util.async_migration_is_live(hass) is False + instrumented_migration.migration_stall.set() + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_metadata_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(states_by_metadata_id) == 3 + for state in states_by_metadata_id.values(): + assert state["last_updated_ts"] is not None + + by_entity_id = { + state["entity_id"]: state for state in states_by_metadata_id.values() + } + assert by_entity_id["state.test_state1"]["last_updated_ts"] == 1477685632.452529 + assert by_entity_id["state.test_state2"]["last_updated_ts"] == 1477685632.552529 + assert ( + by_entity_id["state.already_migrated"]["last_updated_ts"] == 1477685632.452529 + ) From ef2af44795088fb6cb2d3719116feb4808c027da Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Thu, 26 Dec 2024 01:25:13 +0100 Subject: [PATCH 1100/1198] Bump pylamarzocco to 1.4.3 (#134008) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 309b858c77c..71d2278b51b 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.2"] + "requirements": ["pylamarzocco==1.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index fa2082b50e0..2988073f2a3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2043,7 +2043,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.2 +pylamarzocco==1.4.3 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 715cb26d398..c13cad719ca 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1657,7 +1657,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.2 +pylamarzocco==1.4.3 # homeassistant.components.lastfm pylast==5.1.0 From 1957ab1ccfa1c4f0f64a71b50ade6a0219c1e330 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 26 Dec 2024 00:53:20 -0800 Subject: [PATCH 1101/1198] Improve Google Tasks error messages (#134023) --- homeassistant/components/google_tasks/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_tasks/api.py b/homeassistant/components/google_tasks/api.py index 2a294b84654..475f98443a6 100644 --- a/homeassistant/components/google_tasks/api.py +++ b/homeassistant/components/google_tasks/api.py @@ -115,7 +115,7 @@ class AsyncConfigEntryAuth: def response_handler(_, response, exception: HttpError) -> None: if exception is not None: raise GoogleTasksApiError( - f"Google Tasks API responded with error ({exception.status_code})" + f"Google Tasks API responded with error ({exception.reason or exception.status_code})" ) from exception if response: data = json.loads(response) @@ -152,7 +152,7 @@ class AsyncConfigEntryAuth: result = await self._hass.async_add_executor_job(request.execute) except HttpError as err: raise GoogleTasksApiError( - f"Google Tasks API responded with error ({err.status_code})" + f"Google Tasks API responded with: {err.reason or err.status_code})" ) from err if result: _raise_if_error(result) From c11bdcc9498e851e58e1107aa7fec13e0d6d3001 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 28 Dec 2024 21:38:04 +0100 Subject: [PATCH 1102/1198] Fix Nord Pool empty response (#134033) * Fix Nord Pool empty response * Mods * reset validate prices --- .../components/nordpool/coordinator.py | 17 ++- .../components/nordpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/nordpool/conftest.py | 8 - tests/components/nordpool/test_coordinator.py | 9 +- tests/components/nordpool/test_sensor.py | 139 +++++++++++++++++- 7 files changed, 155 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py index 0c9a7e9f337..a6cfd40c323 100644 --- a/homeassistant/components/nordpool/coordinator.py +++ b/homeassistant/components/nordpool/coordinator.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from collections.abc import Callable from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -73,7 +72,7 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]): self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) ) data = await self.api_call() - if data: + if data and data.entries: self.async_set_updated_data(data) async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None: @@ -90,18 +89,20 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]): self.config_entry.data[CONF_AREAS], ) except ( - NordPoolEmptyResponseError, NordPoolResponseError, NordPoolError, ) as error: LOGGER.debug("Connection error: %s", error) - if retry > 0: - next_run = (4 - retry) * 15 - LOGGER.debug("Wait %d seconds for next try", next_run) - await asyncio.sleep(next_run) - return await self.api_call(retry - 1) self.async_set_update_error(error) + if data: + current_day = dt_util.utcnow().strftime("%Y-%m-%d") + for entry in data.entries: + if entry.requested_date == current_day: + LOGGER.debug("Data for current day found") + return data + + self.async_set_update_error(NordPoolEmptyResponseError("No current day data")) return data def merge_price_entries(self) -> list[DeliveryPeriodEntry]: diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json index 215494e10a0..b096d2bd506 100644 --- a/homeassistant/components/nordpool/manifest.json +++ b/homeassistant/components/nordpool/manifest.json @@ -8,6 +8,6 @@ "iot_class": "cloud_polling", "loggers": ["pynordpool"], "quality_scale": "platinum", - "requirements": ["pynordpool==0.2.3"], + "requirements": ["pynordpool==0.2.4"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 2988073f2a3..0c48fe1ab2c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2118,7 +2118,7 @@ pynetio==0.1.9.1 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.3 +pynordpool==0.2.4 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c13cad719ca..e092afbe528 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1720,7 +1720,7 @@ pynetgear==0.10.10 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.3 +pynordpool==0.2.4 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py index 1c26c7f84eb..ca1e2a05a0b 100644 --- a/tests/components/nordpool/conftest.py +++ b/tests/components/nordpool/conftest.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import AsyncGenerator import json from typing import Any -from unittest.mock import patch from pynordpool import API, NordPoolClient import pytest @@ -20,13 +19,6 @@ from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.fixture(autouse=True) -async def no_sleep() -> AsyncGenerator[None]: - """No sleeping.""" - with patch("homeassistant.components.nordpool.coordinator.asyncio.sleep"): - yield - - @pytest.fixture async def load_int(hass: HomeAssistant, get_client: NordPoolClient) -> MockConfigEntry: """Set up the Nord Pool integration in Home Assistant.""" diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py index 7647fe4bdfe..71c4644ea95 100644 --- a/tests/components/nordpool/test_coordinator.py +++ b/tests/components/nordpool/test_coordinator.py @@ -55,7 +55,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert mock_data.call_count == 4 + assert mock_data.call_count == 1 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE @@ -69,7 +69,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert mock_data.call_count == 4 + assert mock_data.call_count == 1 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Authentication error" in caplog.text @@ -84,7 +84,8 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert mock_data.call_count == 4 + # Empty responses does not raise + assert mock_data.call_count == 3 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Empty response" in caplog.text @@ -99,7 +100,7 @@ async def test_coordinator( freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert mock_data.call_count == 4 + assert mock_data.call_count == 1 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Response error" in caplog.text diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py index a1a27b5feec..60be1ee3258 100644 --- a/tests/components/nordpool/test_sensor.py +++ b/tests/components/nordpool/test_sensor.py @@ -2,14 +2,22 @@ from __future__ import annotations +from datetime import timedelta +from http import HTTPStatus +from typing import Any + +from freezegun.api import FrozenDateTimeFactory +from pynordpool import API import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import snapshot_platform +from tests.common import async_fire_time_changed, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") @@ -59,3 +67,132 @@ async def test_sensor_no_previous_price( assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z + + +@pytest.mark.freeze_time("2024-11-05T11:00:01+01:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_empty_response( + hass: HomeAssistant, + load_int: ConfigEntry, + load_json: list[dict[str, Any]], + aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Nord Pool sensor with empty response.""" + + responses = list(load_json) + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.92737" + assert last_price.state == "1.03132" + assert next_price.state == "0.92505" + + aioclient_mock.clear_requests() + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-04", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=responses[1], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=responses[0], + ) + # Future date without data should return 204 + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-06", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + status=HTTPStatus.NO_CONTENT, + ) + + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # All prices should be known as tomorrow is not loaded by sensors + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.92505" + assert last_price.state == "0.92737" + assert next_price.state == "0.94949" + + aioclient_mock.clear_requests() + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-04", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=responses[1], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=responses[0], + ) + # Future date without data should return 204 + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-06", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + status=HTTPStatus.NO_CONTENT, + ) + + freezer.move_to("2024-11-05T22:00:01+00:00") + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # Current and last price should be known, next price should be unknown + # as api responds with empty data (204) + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.28914" + assert last_price.state == "0.5223" + assert next_price.state == STATE_UNKNOWN From 15b80c59fcfb859c482e171ce0455b18b5dfc6b3 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 27 Dec 2024 21:33:37 +0100 Subject: [PATCH 1103/1198] Cleanup devices in Nord Pool from reconfiguration (#134043) * Cleanup devices in Nord Pool from reconfiguration * Mods * Mod --- homeassistant/components/nordpool/__init__.py | 40 ++- .../nordpool/fixtures/delivery_period_nl.json | 229 ++++++++++++++++++ tests/components/nordpool/test_init.py | 107 +++++++- 3 files changed, 366 insertions(+), 10 deletions(-) create mode 100644 tests/components/nordpool/fixtures/delivery_period_nl.json diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py index 83f8edc8a8d..77f4b263b54 100644 --- a/homeassistant/components/nordpool/__init__.py +++ b/homeassistant/components/nordpool/__init__.py @@ -5,11 +5,11 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from .const import DOMAIN, PLATFORMS +from .const import CONF_AREAS, DOMAIN, LOGGER, PLATFORMS from .coordinator import NordPoolDataUpdateCoordinator from .services import async_setup_services @@ -25,10 +25,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: NordPoolConfigEntry +) -> bool: """Set up Nord Pool from a config entry.""" - coordinator = NordPoolDataUpdateCoordinator(hass, entry) + await cleanup_device(hass, config_entry) + + coordinator = NordPoolDataUpdateCoordinator(hass, config_entry) await coordinator.fetch_data(dt_util.utcnow()) if not coordinator.last_update_success: raise ConfigEntryNotReady( @@ -36,13 +40,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> translation_key="initial_update_failed", translation_placeholders={"error": str(coordinator.last_exception)}, ) - entry.runtime_data = coordinator + config_entry.runtime_data = coordinator - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: NordPoolConfigEntry +) -> bool: """Unload Nord Pool config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + +async def cleanup_device( + hass: HomeAssistant, config_entry: NordPoolConfigEntry +) -> None: + """Cleanup device and entities.""" + device_reg = dr.async_get(hass) + + entries = dr.async_entries_for_config_entry(device_reg, config_entry.entry_id) + for area in config_entry.data[CONF_AREAS]: + for entry in entries: + if entry.identifiers == {(DOMAIN, area)}: + continue + + LOGGER.debug("Removing device %s", entry.name) + device_reg.async_update_device( + entry.id, remove_config_entry_id=config_entry.entry_id + ) diff --git a/tests/components/nordpool/fixtures/delivery_period_nl.json b/tests/components/nordpool/fixtures/delivery_period_nl.json new file mode 100644 index 00000000000..cd326e05d01 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period_nl.json @@ -0,0 +1,229 @@ +{ + "deliveryDateCET": "2024-11-05", + "version": 2, + "updatedAt": "2024-11-04T11:58:10.7711584Z", + "deliveryAreas": ["NL"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T00:00:00Z", + "entryPerArea": { + "NL": 83.63 + } + }, + { + "deliveryStart": "2024-11-05T00:00:00Z", + "deliveryEnd": "2024-11-05T01:00:00Z", + "entryPerArea": { + "NL": 94.0 + } + }, + { + "deliveryStart": "2024-11-05T01:00:00Z", + "deliveryEnd": "2024-11-05T02:00:00Z", + "entryPerArea": { + "NL": 90.68 + } + }, + { + "deliveryStart": "2024-11-05T02:00:00Z", + "deliveryEnd": "2024-11-05T03:00:00Z", + "entryPerArea": { + "NL": 91.3 + } + }, + { + "deliveryStart": "2024-11-05T03:00:00Z", + "deliveryEnd": "2024-11-05T04:00:00Z", + "entryPerArea": { + "NL": 94.0 + } + }, + { + "deliveryStart": "2024-11-05T04:00:00Z", + "deliveryEnd": "2024-11-05T05:00:00Z", + "entryPerArea": { + "NL": 96.09 + } + }, + { + "deliveryStart": "2024-11-05T05:00:00Z", + "deliveryEnd": "2024-11-05T06:00:00Z", + "entryPerArea": { + "NL": 106.0 + } + }, + { + "deliveryStart": "2024-11-05T06:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "entryPerArea": { + "NL": 135.99 + } + }, + { + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T08:00:00Z", + "entryPerArea": { + "NL": 136.21 + } + }, + { + "deliveryStart": "2024-11-05T08:00:00Z", + "deliveryEnd": "2024-11-05T09:00:00Z", + "entryPerArea": { + "NL": 118.23 + } + }, + { + "deliveryStart": "2024-11-05T09:00:00Z", + "deliveryEnd": "2024-11-05T10:00:00Z", + "entryPerArea": { + "NL": 105.87 + } + }, + { + "deliveryStart": "2024-11-05T10:00:00Z", + "deliveryEnd": "2024-11-05T11:00:00Z", + "entryPerArea": { + "NL": 95.28 + } + }, + { + "deliveryStart": "2024-11-05T11:00:00Z", + "deliveryEnd": "2024-11-05T12:00:00Z", + "entryPerArea": { + "NL": 94.92 + } + }, + { + "deliveryStart": "2024-11-05T12:00:00Z", + "deliveryEnd": "2024-11-05T13:00:00Z", + "entryPerArea": { + "NL": 99.25 + } + }, + { + "deliveryStart": "2024-11-05T13:00:00Z", + "deliveryEnd": "2024-11-05T14:00:00Z", + "entryPerArea": { + "NL": 107.98 + } + }, + { + "deliveryStart": "2024-11-05T14:00:00Z", + "deliveryEnd": "2024-11-05T15:00:00Z", + "entryPerArea": { + "NL": 149.86 + } + }, + { + "deliveryStart": "2024-11-05T15:00:00Z", + "deliveryEnd": "2024-11-05T16:00:00Z", + "entryPerArea": { + "NL": 303.24 + } + }, + { + "deliveryStart": "2024-11-05T16:00:00Z", + "deliveryEnd": "2024-11-05T17:00:00Z", + "entryPerArea": { + "NL": 472.99 + } + }, + { + "deliveryStart": "2024-11-05T17:00:00Z", + "deliveryEnd": "2024-11-05T18:00:00Z", + "entryPerArea": { + "NL": 431.02 + } + }, + { + "deliveryStart": "2024-11-05T18:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "entryPerArea": { + "NL": 320.33 + } + }, + { + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T20:00:00Z", + "entryPerArea": { + "NL": 169.7 + } + }, + { + "deliveryStart": "2024-11-05T20:00:00Z", + "deliveryEnd": "2024-11-05T21:00:00Z", + "entryPerArea": { + "NL": 129.9 + } + }, + { + "deliveryStart": "2024-11-05T21:00:00Z", + "deliveryEnd": "2024-11-05T22:00:00Z", + "entryPerArea": { + "NL": 117.77 + } + }, + { + "deliveryStart": "2024-11-05T22:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "entryPerArea": { + "NL": 110.03 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "averagePricePerArea": { + "NL": { + "average": 98.96, + "min": 83.63, + "max": 135.99 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "averagePricePerArea": { + "NL": { + "average": 202.93, + "min": 94.92, + "max": 472.99 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "averagePricePerArea": { + "NL": { + "average": 131.85, + "min": 110.03, + "max": 169.7 + } + } + } + ], + "currency": "EUR", + "exchangeRate": 1, + "areaStates": [ + { + "state": "Final", + "areas": ["NL"] + } + ], + "areaAverages": [ + { + "areaCode": "NL", + "price": 156.43 + } + ] +} diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py index 3b1fc1fd8ec..c9b6167ff3c 100644 --- a/tests/components/nordpool/test_init.py +++ b/tests/components/nordpool/test_init.py @@ -2,9 +2,11 @@ from __future__ import annotations +import json from unittest.mock import patch from pynordpool import ( + API, NordPoolClient, NordPoolConnectionError, NordPoolEmptyResponseError, @@ -13,13 +15,17 @@ from pynordpool import ( ) import pytest -from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.const import CONF_CURRENCY from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import device_registry as dr, entity_registry as er from . import ENTRY_CONFIG -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") @@ -71,3 +77,100 @@ async def test_initial_startup_fails( await hass.async_block_till_done(wait_background_tasks=True) assert entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_reconfigure_cleans_up_device( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + get_client: NordPoolClient, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test clean up devices due to reconfiguration.""" + nl_json_file = load_fixture("delivery_period_nl.json", DOMAIN) + load_nl_json = json.loads(nl_json_file) + + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.LOADED + + assert device_registry.async_get_device(identifiers={(DOMAIN, "SE3")}) + assert device_registry.async_get_device(identifiers={(DOMAIN, "SE4")}) + assert entity_registry.async_get("sensor.nord_pool_se3_current_price") + assert entity_registry.async_get("sensor.nord_pool_se4_current_price") + assert hass.states.get("sensor.nord_pool_se3_current_price") + assert hass.states.get("sensor.nord_pool_se4_current_price") + + aioclient_mock.clear_requests() + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-04", + "market": "DayAhead", + "deliveryArea": "NL", + "currency": "EUR", + }, + json=load_nl_json, + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "NL", + "currency": "EUR", + }, + json=load_nl_json, + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-06", + "market": "DayAhead", + "deliveryArea": "NL", + "currency": "EUR", + }, + json=load_nl_json, + ) + + result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["NL"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "areas": [ + "NL", + ], + "currency": "EUR", + } + await hass.async_block_till_done(wait_background_tasks=True) + + assert device_registry.async_get_device(identifiers={(DOMAIN, "NL")}) + assert entity_registry.async_get("sensor.nord_pool_nl_current_price") + assert hass.states.get("sensor.nord_pool_nl_current_price") + + assert not device_registry.async_get_device(identifiers={(DOMAIN, "SE3")}) + assert not entity_registry.async_get("sensor.nord_pool_se3_current_price") + assert not hass.states.get("sensor.nord_pool_se3_current_price") + assert not device_registry.async_get_device(identifiers={(DOMAIN, "SE4")}) + assert not entity_registry.async_get("sensor.nord_pool_se4_current_price") + assert not hass.states.get("sensor.nord_pool_se4_current_price") From b84ae2abc377e7336b273022ba14bc40ea4bac3c Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 26 Dec 2024 23:03:50 -0500 Subject: [PATCH 1104/1198] Bump aiorussound to 4.1.1 (#134058) * Bump aiorussound to 4.1.1 * Trigger Build * Trigger Build --- homeassistant/components/russound_rio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index ab77ca3ab6a..f1d3671970d 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["aiorussound"], "quality_scale": "silver", - "requirements": ["aiorussound==4.1.0"] + "requirements": ["aiorussound==4.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0c48fe1ab2c..abc3f2777b1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -356,7 +356,7 @@ aioridwell==2024.01.0 aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==4.1.0 +aiorussound==4.1.1 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e092afbe528..304416e4dd6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -338,7 +338,7 @@ aioridwell==2024.01.0 aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==4.1.0 +aiorussound==4.1.1 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 From 1a909d3a8a636a75483c3d90ec3a3aace116a7c0 Mon Sep 17 00:00:00 2001 From: Aaron Bach Date: Sun, 29 Dec 2024 09:23:44 -0700 Subject: [PATCH 1105/1198] Change SimpliSafe websocket reconnection log to `DEBUG`-level (#134063) * Change SimpliSafe websocket reconnection log to `DEBUG`-level * revert --- homeassistant/components/simplisafe/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/simplisafe/__init__.py b/homeassistant/components/simplisafe/__init__.py index b72519f9734..2f19c5117a4 100644 --- a/homeassistant/components/simplisafe/__init__.py +++ b/homeassistant/components/simplisafe/__init__.py @@ -485,7 +485,7 @@ class SimpliSafe: except Exception as err: # noqa: BLE001 LOGGER.error("Unknown exception while connecting to websocket: %s", err) - LOGGER.warning("Reconnecting to websocket") + LOGGER.debug("Reconnecting to websocket") await self._async_cancel_websocket_loop() self._websocket_reconnect_task = self._hass.async_create_task( self._async_start_websocket_loop() From f6a9cd38c05718d31e22b0199b65a3fd26f03bd1 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Fri, 27 Dec 2024 05:01:10 -0500 Subject: [PATCH 1106/1198] Remove timeout from Russound RIO initialization (#134070) --- .../components/russound_rio/__init__.py | 6 ++---- .../components/russound_rio/config_flow.py | 17 +++++++---------- homeassistant/components/russound_rio/const.py | 3 --- 3 files changed, 9 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index b068fbd1892..fedf5d8c686 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -1,6 +1,5 @@ """The russound_rio component.""" -import asyncio import logging from aiorussound import RussoundClient, RussoundTcpConnectionHandler @@ -11,7 +10,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS +from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS PLATFORMS = [Platform.MEDIA_PLAYER] @@ -40,8 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> await client.register_state_update_callbacks(_connection_update_callback) try: - async with asyncio.timeout(CONNECT_TIMEOUT): - await client.connect() + await client.connect() except RUSSOUND_RIO_EXCEPTIONS as err: raise ConfigEntryNotReady( translation_domain=DOMAIN, diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index e5efd309a23..f7f2e5b1d00 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio import logging from typing import Any @@ -17,7 +16,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv -from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS +from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS DATA_SCHEMA = vol.Schema( { @@ -45,10 +44,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): client = RussoundClient(RussoundTcpConnectionHandler(host, port)) try: - async with asyncio.timeout(CONNECT_TIMEOUT): - await client.connect() - controller = client.controllers[1] - await client.disconnect() + await client.connect() + controller = client.controllers[1] + await client.disconnect() except RUSSOUND_RIO_EXCEPTIONS: _LOGGER.exception("Could not connect to Russound RIO") errors["base"] = "cannot_connect" @@ -90,10 +88,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): # Connection logic is repeated here since this method will be removed in future releases client = RussoundClient(RussoundTcpConnectionHandler(host, port)) try: - async with asyncio.timeout(CONNECT_TIMEOUT): - await client.connect() - controller = client.controllers[1] - await client.disconnect() + await client.connect() + controller = client.controllers[1] + await client.disconnect() except RUSSOUND_RIO_EXCEPTIONS: _LOGGER.exception("Could not connect to Russound RIO") return self.async_abort( diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index af52e89d399..a142ba8641d 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -16,9 +16,6 @@ RUSSOUND_RIO_EXCEPTIONS = ( asyncio.CancelledError, ) - -CONNECT_TIMEOUT = 15 - MP_FEATURES_BY_FLAG = { FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE } From bd786b53eecd7a3c90b3c3d443951d5ca1e00228 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 27 Dec 2024 12:59:52 +0100 Subject: [PATCH 1107/1198] Fix KNX config flow translations and add data descriptions (#134078) * Fix KNX config flow translations and add data descriptions * Update strings.json * typo --- homeassistant/components/knx/strings.json | 84 +++++++++++++++-------- 1 file changed, 56 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 6c717c932b8..80ff1105e15 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -3,23 +3,30 @@ "step": { "connection_type": { "title": "KNX connection", - "description": "Please enter the connection type we should use for your KNX connection. \n AUTOMATIC - The integration takes care of the connectivity to your KNX Bus by performing a gateway scan. \n TUNNELING - The integration will connect to your KNX bus via tunneling. \n ROUTING - The integration will connect to your KNX bus via routing.", + "description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.", "data": { "connection_type": "KNX Connection Type" + }, + "data_description": { + "connection_type": "Please select the connection type you want to use for your KNX connection." } }, "tunnel": { "title": "Tunnel", - "description": "Please select a gateway from the list.", "data": { - "gateway": "KNX Tunnel Connection" + "gateway": "Please select a gateway from the list." + }, + "data_description": { + "gateway": "Select a KNX tunneling interface you want use for the connection." } }, "tcp_tunnel_endpoint": { - "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", - "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "title": "Tunnel endpoint", "data": { - "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + "tunnel_endpoint_ia": "Select the tunnel endpoint used for the connection." + }, + "data_description": { + "tunnel_endpoint_ia": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option." } }, "manual_tunnel": { @@ -27,23 +34,24 @@ "description": "Please enter the connection information of your tunneling device.", "data": { "tunneling_type": "KNX Tunneling Type", - "port": "[%key:common::config_flow::data::port%]", "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", "route_back": "Route back / NAT mode", "local_ip": "Local IP interface" }, "data_description": { - "port": "Port of the KNX/IP tunneling device.", + "tunneling_type": "Select the tunneling type of your KNX/IP tunneling device. Older interfaces may only support `UDP`.", "host": "IP address or hostname of the KNX/IP tunneling device.", + "port": "Port used by the KNX/IP tunneling device.", "route_back": "Enable if your KNXnet/IP tunneling server is behind NAT. Only applies for UDP connections.", "local_ip": "Local IP or interface name used for the connection from Home Assistant. Leave blank to use auto-discovery." } }, "secure_key_source_menu_tunnel": { "title": "KNX IP-Secure", - "description": "Select how you want to configure KNX/IP Secure.", + "description": "How do you want to configure KNX/IP Secure?", "menu_options": { - "secure_knxkeys": "Use a `.knxkeys` file containing IP secure keys", + "secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys", "secure_tunnel_manual": "Configure IP secure credentials manually" } }, @@ -57,20 +65,23 @@ }, "secure_knxkeys": { "title": "Import KNX Keyring", - "description": "Please select a `.knxkeys` file to import.", + "description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.", "data": { "knxkeys_file": "Keyring file", - "knxkeys_password": "The password to decrypt the `.knxkeys` file" + "knxkeys_password": "Keyring password" }, "data_description": { - "knxkeys_password": "This was set when exporting the file from ETS." + "knxkeys_file": "Select a `.knxkeys` file. This can be exported from ETS.", + "knxkeys_password": "The password to open the `.knxkeys` file was set when exporting." } }, "knxkeys_tunnel_select": { - "title": "Tunnel endpoint", - "description": "Select the tunnel endpoint used for the connection.", + "title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]", "data": { - "user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option." + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]" + }, + "data_description": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]" } }, "secure_tunnel_manual": { @@ -82,7 +93,7 @@ "device_authentication": "Device authentication password" }, "data_description": { - "user_id": "This is often tunnel number +1. So 'Tunnel 2' would have User-ID '3'.", + "user_id": "This usually is tunnel number +1. So first tunnel in the list presented in ETS would have User-ID `2`.", "user_password": "Password for the specific tunnel connection set in the 'Properties' panel of the tunnel in ETS.", "device_authentication": "This is set in the 'IP' panel of the interface in ETS." } @@ -95,8 +106,8 @@ "sync_latency_tolerance": "Network latency tolerance" }, "data_description": { - "backbone_key": "Can be seen in the 'Security' report of an ETS project. Eg. '00112233445566778899AABBCCDDEEFF'", - "sync_latency_tolerance": "Default is 1000." + "backbone_key": "Can be seen in the 'Security' report of your ETS project. Eg. `00112233445566778899AABBCCDDEEFF`", + "sync_latency_tolerance": "Should be equal to the backbone configuration of your ETS project. Default is `1000`" } }, "routing": { @@ -104,13 +115,16 @@ "description": "Please configure the routing options.", "data": { "individual_address": "Individual address", - "routing_secure": "Use KNX IP Secure", + "routing_secure": "KNX IP Secure Routing", "multicast_group": "Multicast group", "multicast_port": "Multicast port", "local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]" }, "data_description": { "individual_address": "KNX address to be used by Home Assistant, e.g. `0.0.4`", + "routing_secure": "Select if your installation uses encrypted communication according to the KNX IP Secure standard. This setting requires compatible devices and configuration. You'll be prompted for credentials in the next step.", + "multicast_group": "Multicast group used by your installation. Default is `224.0.23.12`", + "multicast_port": "Multicast port used by your installation. Default is `3671`", "local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]" } } @@ -148,7 +162,7 @@ }, "data_description": { "state_updater": "Set default for reading states from the KNX Bus. When disabled, Home Assistant will not actively retrieve entity states from the KNX Bus. Can be overridden by `sync_state` entity options.", - "rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: 0 or 20 to 40", + "rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`", "telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}" } }, @@ -157,20 +171,27 @@ "description": "[%key:component::knx::config::step::connection_type::description%]", "data": { "connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]" + }, + "data_description": { + "connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]" } }, "tunnel": { "title": "[%key:component::knx::config::step::tunnel::title%]", - "description": "[%key:component::knx::config::step::tunnel::description%]", "data": { "gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]" + }, + "data_description": { + "gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]" } }, "tcp_tunnel_endpoint": { - "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", - "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]", "data": { - "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]" + }, + "data_description": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]" } }, "manual_tunnel": { @@ -184,6 +205,7 @@ "local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]" }, "data_description": { + "tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]", "port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]", "host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]", "route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]", @@ -214,14 +236,17 @@ "knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]" }, "data_description": { + "knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]", "knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]" } }, "knxkeys_tunnel_select": { - "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", - "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]", "data": { - "user_id": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]" + }, + "data_description": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]" } }, "secure_tunnel_manual": { @@ -262,6 +287,9 @@ }, "data_description": { "individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]", + "routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]", + "multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]", + "multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]", "local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]" } } From 7032361bf5da98348974e2f716cc8419378e791d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 27 Dec 2024 17:52:33 +0100 Subject: [PATCH 1108/1198] Make google tasks recoverable (#134092) --- homeassistant/components/google_tasks/api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_tasks/api.py b/homeassistant/components/google_tasks/api.py index 475f98443a6..f51c5103b87 100644 --- a/homeassistant/components/google_tasks/api.py +++ b/homeassistant/components/google_tasks/api.py @@ -9,6 +9,7 @@ from google.oauth2.credentials import Credentials from googleapiclient.discovery import Resource, build from googleapiclient.errors import HttpError from googleapiclient.http import BatchHttpRequest, HttpRequest +from httplib2 import ServerNotFoundError from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -150,7 +151,7 @@ class AsyncConfigEntryAuth: async def _execute(self, request: HttpRequest | BatchHttpRequest) -> Any: try: result = await self._hass.async_add_executor_job(request.execute) - except HttpError as err: + except (HttpError, ServerNotFoundError) as err: raise GoogleTasksApiError( f"Google Tasks API responded with: {err.reason or err.status_code})" ) from err From 3120a90f2690fd11b0bbf86318878f0ecd10c7e8 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 29 Dec 2024 14:26:59 +0100 Subject: [PATCH 1109/1198] Make elevenlabs recoverable (#134094) * Make elevenlabs recoverable * Add tests for entry setup * Use the same fixtures for setup and config flow * Update tests/components/elevenlabs/test_setup.py Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com> --------- Co-authored-by: Simon Sorg Co-authored-by: G Johansson Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com> --- .../components/elevenlabs/__init__.py | 9 ++- tests/components/elevenlabs/conftest.py | 55 +++++++++++++++---- .../components/elevenlabs/test_config_flow.py | 11 +++- tests/components/elevenlabs/test_setup.py | 36 ++++++++++++ 4 files changed, 97 insertions(+), 14 deletions(-) create mode 100644 tests/components/elevenlabs/test_setup.py diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index e8a378d56c6..e5807fec67c 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -6,11 +6,16 @@ from dataclasses import dataclass from elevenlabs import AsyncElevenLabs, Model from elevenlabs.core import ApiError +from httpx import ConnectError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers.httpx_client import get_async_client from .const import CONF_MODEL @@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) - model_id = entry.options[CONF_MODEL] try: model = await get_model_by_id(client, model_id) + except ConnectError as err: + raise ConfigEntryNotReady("Failed to connect") from err except ApiError as err: raise ConfigEntryAuthFailed("Auth failed") from err diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index d410f8bccdd..1c261e2947a 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, patch from elevenlabs.core import ApiError from elevenlabs.types import GetVoicesResponse +from httpx import ConnectError import pytest from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE @@ -34,21 +35,55 @@ def _client_mock(): @pytest.fixture def mock_async_client() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" - with patch( - "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", - return_value=_client_mock(), - ) as mock_async_client: + with ( + patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", + return_value=_client_mock(), + ) as mock_async_client, + patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + new=mock_async_client, + ), + ): yield mock_async_client @pytest.fixture -def mock_async_client_fail() -> Generator[AsyncMock]: +def mock_async_client_api_error() -> Generator[AsyncMock]: + """Override async ElevenLabs client with ApiError side effect.""" + client_mock = _client_mock() + client_mock.models.get_all.side_effect = ApiError + client_mock.voices.get_all.side_effect = ApiError + + with ( + patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", + return_value=client_mock, + ) as mock_async_client, + patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + new=mock_async_client, + ), + ): + yield mock_async_client + + +@pytest.fixture +def mock_async_client_connect_error() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" - with patch( - "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", - return_value=_client_mock(), - ) as mock_async_client: - mock_async_client.side_effect = ApiError + client_mock = _client_mock() + client_mock.models.get_all.side_effect = ConnectError("Unknown") + client_mock.voices.get_all.side_effect = ConnectError("Unknown") + with ( + patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", + return_value=client_mock, + ) as mock_async_client, + patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + new=mock_async_client, + ), + ): yield mock_async_client diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py index 95e7ab5214e..7eeb0a6eb46 100644 --- a/tests/components/elevenlabs/test_config_flow.py +++ b/tests/components/elevenlabs/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant.components.elevenlabs.const import ( CONF_CONFIGURE_VOICE, CONF_MODEL, @@ -56,7 +58,10 @@ async def test_user_step( async def test_invalid_api_key( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client_api_error: AsyncMock, + request: pytest.FixtureRequest, ) -> None: """Test user step with invalid api key.""" @@ -77,8 +82,8 @@ async def test_invalid_api_key( mock_setup_entry.assert_not_called() - # Reset the side effect - mock_async_client_fail.side_effect = None + # Use a working client + request.getfixturevalue("mock_async_client") result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/elevenlabs/test_setup.py b/tests/components/elevenlabs/test_setup.py new file mode 100644 index 00000000000..18b90ca3561 --- /dev/null +++ b/tests/components/elevenlabs/test_setup.py @@ -0,0 +1,36 @@ +"""Tests for the ElevenLabs TTS entity.""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_setup( + hass: HomeAssistant, + mock_async_client: MagicMock, + mock_entry: MockConfigEntry, +) -> None: + """Test entry setup without any exceptions.""" + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + assert mock_entry.state == ConfigEntryState.LOADED + # Unload + await hass.config_entries.async_unload(mock_entry.entry_id) + assert mock_entry.state == ConfigEntryState.NOT_LOADED + + +async def test_setup_connect_error( + hass: HomeAssistant, + mock_async_client_connect_error: MagicMock, + mock_entry: MockConfigEntry, +) -> None: + """Test entry setup with a connection error.""" + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + # Ensure is not ready + assert mock_entry.state == ConfigEntryState.SETUP_RETRY From 1874eec8b34a269df2eb690fd25e1262ef6b36e5 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 27 Dec 2024 20:21:12 +0100 Subject: [PATCH 1110/1198] Bump python-homeassistant-analytics to 0.8.1 (#134101) --- homeassistant/components/analytics_insights/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/analytics_insights/manifest.json b/homeassistant/components/analytics_insights/manifest.json index 841cf1caf42..bf99d89e073 100644 --- a/homeassistant/components/analytics_insights/manifest.json +++ b/homeassistant/components/analytics_insights/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["python_homeassistant_analytics"], - "requirements": ["python-homeassistant-analytics==0.8.0"], + "requirements": ["python-homeassistant-analytics==0.8.1"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index abc3f2777b1..5bbad424f1d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2360,7 +2360,7 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.8.0 +python-homeassistant-analytics==0.8.1 # homeassistant.components.homewizard python-homewizard-energy==v7.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 304416e4dd6..0714206ed5a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1902,7 +1902,7 @@ python-fullykiosk==0.0.14 # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.8.0 +python-homeassistant-analytics==0.8.1 # homeassistant.components.homewizard python-homewizard-energy==v7.0.0 From 951baa3972f0956b5bfcc2029fbaa7f06d1a5ded Mon Sep 17 00:00:00 2001 From: Aaron Bach Date: Fri, 27 Dec 2024 12:04:35 -0700 Subject: [PATCH 1111/1198] Bump `pytile` to 2024.12.0 (#134103) --- homeassistant/components/tile/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tile/manifest.json b/homeassistant/components/tile/manifest.json index 8dceddcb77f..f8acbc0bf1a 100644 --- a/homeassistant/components/tile/manifest.json +++ b/homeassistant/components/tile/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pytile"], - "requirements": ["pytile==2023.12.0"] + "requirements": ["pytile==2024.12.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5bbad424f1d..efd969cb543 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2442,7 +2442,7 @@ python-vlc==3.0.18122 pythonegardia==1.0.52 # homeassistant.components.tile -pytile==2023.12.0 +pytile==2024.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0714206ed5a..96a25319338 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1966,7 +1966,7 @@ python-technove==1.3.1 python-telegram-bot[socks]==21.5 # homeassistant.components.tile -pytile==2023.12.0 +pytile==2024.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 From bd243f68a48d3275dc75f8e92c18fea856ac7274 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 28 Dec 2024 13:13:07 +0100 Subject: [PATCH 1112/1198] Bump yt-dlp to 2024.12.23 (#134131) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 21c07607573..144904fe58c 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.13"], + "requirements": ["yt-dlp[default]==2024.12.23"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index efd969cb543..2b32db06322 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3082,7 +3082,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.13 +yt-dlp[default]==2024.12.23 # homeassistant.components.zabbix zabbix-utils==2.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 96a25319338..34c535a7832 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2477,7 +2477,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.13 +yt-dlp[default]==2024.12.23 # homeassistant.components.zamg zamg==0.3.6 From ef873663465f7ac127c5d30e69b1d9588dc7ef0a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 28 Dec 2024 15:36:23 +0100 Subject: [PATCH 1113/1198] Add missing device classes in scrape (#134141) --- homeassistant/components/scrape/strings.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/scrape/strings.json b/homeassistant/components/scrape/strings.json index 42cf3001b75..27115836157 100644 --- a/homeassistant/components/scrape/strings.json +++ b/homeassistant/components/scrape/strings.json @@ -141,8 +141,10 @@ "options": { "apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]", "aqi": "[%key:component::sensor::entity_component::aqi::name%]", + "area": "[%key:component::sensor::entity_component::area::name%]", "atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]", "battery": "[%key:component::sensor::entity_component::battery::name%]", + "blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]", "carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", "carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]", "conductivity": "[%key:component::sensor::entity_component::conductivity::name%]", From 291dd6dc66886628a4e058469cdf6b7198b431f0 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 29 Dec 2024 16:39:37 +0100 Subject: [PATCH 1114/1198] Update knx-frontend to 2024.12.26.233449 (#134184) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 55c19443aa0..8d18f11c798 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -12,7 +12,7 @@ "requirements": [ "xknx==3.4.0", "xknxproject==3.8.1", - "knx-frontend==2024.11.16.205004" + "knx-frontend==2024.12.26.233449" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 2b32db06322..f52514eac04 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1260,7 +1260,7 @@ kiwiki-client==0.1.1 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.11.16.205004 +knx-frontend==2024.12.26.233449 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 34c535a7832..41bf2f7835d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1062,7 +1062,7 @@ kegtron-ble==0.4.0 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.11.16.205004 +knx-frontend==2024.12.26.233449 # homeassistant.components.konnected konnected==1.2.0 From 394b2be40a685f384541addb79e16a3a6c4a0cff Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 29 Dec 2024 14:07:45 +0100 Subject: [PATCH 1115/1198] Make PEGELONLINE recoverable (#134199) --- .../components/pegel_online/__init__.py | 7 +++++- tests/components/pegel_online/test_init.py | 22 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/pegel_online/__init__.py b/homeassistant/components/pegel_online/__init__.py index 2c465342493..30e5f4d2a38 100644 --- a/homeassistant/components/pegel_online/__init__.py +++ b/homeassistant/components/pegel_online/__init__.py @@ -5,10 +5,12 @@ from __future__ import annotations import logging from aiopegelonline import PegelOnline +from aiopegelonline.const import CONNECT_ERRORS from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_STATION @@ -28,7 +30,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PegelOnlineConfigEntry) _LOGGER.debug("Setting up station with uuid %s", station_uuid) api = PegelOnline(async_get_clientsession(hass)) - station = await api.async_get_station_details(station_uuid) + try: + station = await api.async_get_station_details(station_uuid) + except CONNECT_ERRORS as err: + raise ConfigEntryNotReady("Failed to connect") from err coordinator = PegelOnlineDataUpdateCoordinator(hass, entry.title, api, station) diff --git a/tests/components/pegel_online/test_init.py b/tests/components/pegel_online/test_init.py index c1b8f1861c4..ac153193983 100644 --- a/tests/components/pegel_online/test_init.py +++ b/tests/components/pegel_online/test_init.py @@ -10,6 +10,7 @@ from homeassistant.components.pegel_online.const import ( DOMAIN, MIN_TIME_BETWEEN_UPDATES, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util import utcnow @@ -24,6 +25,27 @@ from .const import ( from tests.common import MockConfigEntry, async_fire_time_changed +async def test_setup_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Tests error during config entry setup.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG_ENTRY_DATA_DRESDEN, + unique_id=MOCK_CONFIG_ENTRY_DATA_DRESDEN[CONF_STATION], + ) + entry.add_to_hass(hass) + with patch("homeassistant.components.pegel_online.PegelOnline") as pegelonline: + pegelonline.return_value = PegelOnlineMock( + station_details=MOCK_STATION_DETAILS_DRESDEN, + station_measurements=MOCK_STATION_MEASUREMENT_DRESDEN, + ) + pegelonline().override_side_effect(ClientError("Boom")) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + async def test_update_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: From a38839b420779054cdd2831e0d053d2d35ce4ae1 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 29 Dec 2024 15:08:15 +0100 Subject: [PATCH 1116/1198] Make feedreader recoverable (#134202) raise ConfigEntryNotReady on connection errors during setup --- .../components/feedreader/coordinator.py | 7 ++++++- tests/components/feedreader/test_init.py | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/feedreader/coordinator.py b/homeassistant/components/feedreader/coordinator.py index f45b303946a..fc338d63268 100644 --- a/homeassistant/components/feedreader/coordinator.py +++ b/homeassistant/components/feedreader/coordinator.py @@ -14,6 +14,7 @@ import feedparser from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.storage import Store from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util @@ -101,7 +102,11 @@ class FeedReaderCoordinator( async def async_setup(self) -> None: """Set up the feed manager.""" - feed = await self._async_fetch_feed() + try: + feed = await self._async_fetch_feed() + except UpdateFailed as err: + raise ConfigEntryNotReady from err + self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"]) if feed_author := feed["feed"].get("author"): self.feed_author = html.unescape(feed_author) diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index bc7a66dc86e..9a2575bf591 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -11,6 +11,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.feedreader.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util @@ -52,6 +53,23 @@ async def test_setup( assert not events +async def test_setup_error( + hass: HomeAssistant, + feed_one_event, +) -> None: + """Test setup error.""" + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get" + ) as feedreader: + feedreader.side_effect = urllib.error.URLError("Test") + feedreader.return_value = feed_one_event + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + async def test_storage_data_writing( hass: HomeAssistant, events: list[Event], From 0470bff9a221e8d8c876bcc4e59017f3c148168f Mon Sep 17 00:00:00 2001 From: Lucas Gasenzer Date: Sun, 29 Dec 2024 18:03:41 +0100 Subject: [PATCH 1117/1198] Fix Wake on LAN Port input as Box instead of Slider (#134216) --- homeassistant/components/wake_on_lan/services.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/wake_on_lan/services.yaml b/homeassistant/components/wake_on_lan/services.yaml index 48d3df5c4f9..e7c048daf64 100644 --- a/homeassistant/components/wake_on_lan/services.yaml +++ b/homeassistant/components/wake_on_lan/services.yaml @@ -15,3 +15,4 @@ send_magic_packet: number: min: 1 max: 65535 + mode: "box" From 52e47f55c87ce5dc36a24f36141d05b95ab0fd0b Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Sun, 29 Dec 2024 11:56:27 -0600 Subject: [PATCH 1118/1198] Bump VoIP utils to 0.2.2 (#134219) --- homeassistant/components/voip/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 7dd2e797058..ed7f11f8fbc 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["voip-utils==0.2.1"] + "requirements": ["voip-utils==0.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index f52514eac04..5055b1842f2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2960,7 +2960,7 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.2.1 +voip-utils==0.2.2 # homeassistant.components.volkszaehler volkszaehler==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 41bf2f7835d..5f5a4008f31 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2376,7 +2376,7 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.2.1 +voip-utils==0.2.2 # homeassistant.components.volvooncall volvooncall==0.10.3 From 352d5d14a33ca3c57ab0ac532c64c2f3e6dfdc75 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 29 Dec 2024 13:35:46 -0500 Subject: [PATCH 1119/1198] Bump frontend to 20241229.0 (#134225) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 4a70889c1d2..ce40ce35a65 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241224.0"] + "requirements": ["home-assistant-frontend==20241229.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index a66137ef8c3..1d4e86e9671 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241224.0 +home-assistant-frontend==20241229.0 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 5055b1842f2..20dd8e7709b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241224.0 +home-assistant-frontend==20241229.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5f5a4008f31..7bb70d209ff 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241224.0 +home-assistant-frontend==20241229.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 From b05b9b9a33746601331591abab9efdb37045b5d9 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 29 Dec 2024 18:37:17 +0000 Subject: [PATCH 1120/1198] Bump version to 2025.1.0b3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 42407f46fb5..91b31959854 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b2" +PATCH_VERSION: Final = "0b3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 95cc634a333..7fdc8b16719 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b2" +version = "2025.1.0b3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From cf9ccc6fb424f9a59a5680606a94eee252935a57 Mon Sep 17 00:00:00 2001 From: Paul Daumlechner Date: Sun, 29 Dec 2024 21:00:26 +0100 Subject: [PATCH 1121/1198] Bump pyvlx to 0.2.26 (#115483) --- homeassistant/components/velux/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velux/manifest.json b/homeassistant/components/velux/manifest.json index c3576aca925..053b7fcc594 100644 --- a/homeassistant/components/velux/manifest.json +++ b/homeassistant/components/velux/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/velux", "iot_class": "local_polling", "loggers": ["pyvlx"], - "requirements": ["pyvlx==0.2.21"] + "requirements": ["pyvlx==0.2.26"] } diff --git a/requirements_all.txt b/requirements_all.txt index 20dd8e7709b..b66662c2756 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2491,7 +2491,7 @@ pyvesync==2.1.12 pyvizio==0.1.61 # homeassistant.components.velux -pyvlx==0.2.21 +pyvlx==0.2.26 # homeassistant.components.volumio pyvolumio==0.1.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7bb70d209ff..1f8fcd4476f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2006,7 +2006,7 @@ pyvesync==2.1.12 pyvizio==0.1.61 # homeassistant.components.velux -pyvlx==0.2.21 +pyvlx==0.2.26 # homeassistant.components.volumio pyvolumio==0.1.5 From 2f8a92c7253184486bfea6ba54b3bacb72a2f6ac Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 30 Dec 2024 13:47:16 +0100 Subject: [PATCH 1122/1198] Make triggers and condition for monetary sensor consistent (#131184) --- homeassistant/components/sensor/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index 0bc370398b5..d44d621f82d 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -23,7 +23,7 @@ "is_illuminance": "Current {entity_name} illuminance", "is_irradiance": "Current {entity_name} irradiance", "is_moisture": "Current {entity_name} moisture", - "is_monetary": "Current {entity_name} money", + "is_monetary": "Current {entity_name} balance", "is_nitrogen_dioxide": "Current {entity_name} nitrogen dioxide concentration level", "is_nitrogen_monoxide": "Current {entity_name} nitrogen monoxide concentration level", "is_nitrous_oxide": "Current {entity_name} nitrous oxide concentration level", @@ -75,7 +75,7 @@ "illuminance": "{entity_name} illuminance changes", "irradiance": "{entity_name} irradiance changes", "moisture": "{entity_name} moisture changes", - "monetary": "{entity_name} money changes", + "monetary": "{entity_name} balance changes", "nitrogen_dioxide": "{entity_name} nitrogen dioxide concentration changes", "nitrogen_monoxide": "{entity_name} nitrogen monoxide concentration changes", "nitrous_oxide": "{entity_name} nitrous oxide concentration changes", From 57561665453b429c93ec5ce5f1a59043cd7dce31 Mon Sep 17 00:00:00 2001 From: Adam Goode Date: Mon, 30 Dec 2024 06:05:33 -0500 Subject: [PATCH 1123/1198] Quickly process unavailable metrics in Prometheus (#133219) --- .../components/prometheus/__init__.py | 547 ++++++++++-------- 1 file changed, 293 insertions(+), 254 deletions(-) diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index c243bf90dc0..ab012847bba 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -2,8 +2,9 @@ from __future__ import annotations +from collections import defaultdict from collections.abc import Callable -from contextlib import suppress +from dataclasses import astuple, dataclass import logging import string from typing import Any, cast @@ -158,6 +159,22 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return True +@dataclass(frozen=True, slots=True) +class MetricNameWithLabelValues: + """Class to represent a metric with its label values. + + The prometheus client library doesn't easily allow us to get back the + information we put into it. Specifically, it is very expensive to query + which label values have been set for metrics. + + This class is used to hold a bit of data we need to efficiently remove + labelsets from metrics. + """ + + metric_name: str + label_values: tuple[str, ...] + + class PrometheusMetrics: """Model all of the metrics which should be exposed to Prometheus.""" @@ -191,6 +208,9 @@ class PrometheusMetrics: else: self.metrics_prefix = "" self._metrics: dict[str, MetricWrapperBase] = {} + self._metrics_by_entity_id: dict[str, set[MetricNameWithLabelValues]] = ( + defaultdict(set) + ) self._climate_units = climate_units def handle_state_changed_event(self, event: Event[EventStateChangedData]) -> None: @@ -202,10 +222,12 @@ class PrometheusMetrics: _LOGGER.debug("Filtered out entity %s", state.entity_id) return - if (old_state := event.data.get("old_state")) is not None and ( - old_friendly_name := old_state.attributes.get(ATTR_FRIENDLY_NAME) + if ( + old_state := event.data.get("old_state") + ) is not None and old_state.attributes.get( + ATTR_FRIENDLY_NAME ) != state.attributes.get(ATTR_FRIENDLY_NAME): - self._remove_labelsets(old_state.entity_id, old_friendly_name) + self._remove_labelsets(old_state.entity_id) self.handle_state(state) @@ -215,30 +237,32 @@ class PrometheusMetrics: _LOGGER.debug("Handling state update for %s", entity_id) labels = self._labels(state) - state_change = self._metric( - "state_change", prometheus_client.Counter, "The number of state changes" - ) - state_change.labels(**labels).inc() - entity_available = self._metric( + self._metric( + "state_change", + prometheus_client.Counter, + "The number of state changes", + labels, + ).inc() + + self._metric( "entity_available", prometheus_client.Gauge, "Entity is available (not in the unavailable or unknown state)", - ) - entity_available.labels(**labels).set(float(state.state not in IGNORED_STATES)) + labels, + ).set(float(state.state not in IGNORED_STATES)) - last_updated_time_seconds = self._metric( + self._metric( "last_updated_time_seconds", prometheus_client.Gauge, "The last_updated timestamp", - ) - last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp()) + labels, + ).set(state.last_updated.timestamp()) if state.state in IGNORED_STATES: self._remove_labelsets( entity_id, - None, - {state_change, entity_available, last_updated_time_seconds}, + {"state_change", "entity_available", "last_updated_time_seconds"}, ) else: domain, _ = hacore.split_entity_id(entity_id) @@ -274,67 +298,68 @@ class PrometheusMetrics: def _remove_labelsets( self, entity_id: str, - friendly_name: str | None = None, - ignored_metrics: set[MetricWrapperBase] | None = None, + ignored_metric_names: set[str] | None = None, ) -> None: """Remove labelsets matching the given entity id from all non-ignored metrics.""" - if ignored_metrics is None: - ignored_metrics = set() - for metric in list(self._metrics.values()): - if metric in ignored_metrics: + if ignored_metric_names is None: + ignored_metric_names = set() + metric_set = self._metrics_by_entity_id[entity_id] + removed_metrics = set() + for metric in metric_set: + metric_name, label_values = astuple(metric) + if metric_name in ignored_metric_names: continue - for sample in cast(list[prometheus_client.Metric], metric.collect())[ - 0 - ].samples: - if sample.labels["entity"] == entity_id and ( - not friendly_name or sample.labels["friendly_name"] == friendly_name - ): - _LOGGER.debug( - "Removing labelset from %s for entity_id: %s", - sample.name, - entity_id, - ) - with suppress(KeyError): - metric.remove(*sample.labels.values()) + + _LOGGER.debug( + "Removing labelset %s from %s for entity_id: %s", + label_values, + metric_name, + entity_id, + ) + removed_metrics.add(metric) + self._metrics[metric_name].remove(*label_values) + metric_set -= removed_metrics + if not metric_set: + del self._metrics_by_entity_id[entity_id] def _handle_attributes(self, state: State) -> None: for key, value in state.attributes.items(): - metric = self._metric( + try: + value = float(value) + except (ValueError, TypeError): + continue + + self._metric( f"{state.domain}_attr_{key.lower()}", prometheus_client.Gauge, f"{key} attribute of {state.domain} entity", - ) - - try: - value = float(value) - metric.labels(**self._labels(state)).set(value) - except (ValueError, TypeError): - pass + self._labels(state), + ).set(value) def _metric[_MetricBaseT: MetricWrapperBase]( self, - metric: str, + metric_name: str, factory: type[_MetricBaseT], documentation: str, - extra_labels: list[str] | None = None, + labels: dict[str, str], ) -> _MetricBaseT: - labels = ["entity", "friendly_name", "domain"] - if extra_labels is not None: - labels.extend(extra_labels) - try: - return cast(_MetricBaseT, self._metrics[metric]) + metric = cast(_MetricBaseT, self._metrics[metric_name]) except KeyError: full_metric_name = self._sanitize_metric_name( - f"{self.metrics_prefix}{metric}" + f"{self.metrics_prefix}{metric_name}" ) - self._metrics[metric] = factory( + self._metrics[metric_name] = factory( full_metric_name, documentation, - labels, + labels.keys(), registry=prometheus_client.REGISTRY, ) - return cast(_MetricBaseT, self._metrics[metric]) + metric = cast(_MetricBaseT, self._metrics[metric_name]) + self._metrics_by_entity_id[labels["entity"]].add( + MetricNameWithLabelValues(metric_name, tuple(labels.values())) + ) + return metric.labels(**labels) @staticmethod def _sanitize_metric_name(metric: str) -> str: @@ -356,67 +381,90 @@ class PrometheusMetrics: return value @staticmethod - def _labels(state: State) -> dict[str, Any]: - return { + def _labels( + state: State, + extra_labels: dict[str, str] | None = None, + ) -> dict[str, Any]: + if extra_labels is None: + extra_labels = {} + labels = { "entity": state.entity_id, "domain": state.domain, "friendly_name": state.attributes.get(ATTR_FRIENDLY_NAME), } + if not labels.keys().isdisjoint(extra_labels.keys()): + conflicting_keys = labels.keys() & extra_labels.keys() + raise ValueError( + f"extra_labels contains conflicting keys: {conflicting_keys}" + ) + return labels | extra_labels def _battery(self, state: State) -> None: - if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is not None: - metric = self._metric( - "battery_level_percent", - prometheus_client.Gauge, - "Battery level as a percentage of its capacity", - ) - try: - value = float(battery_level) - metric.labels(**self._labels(state)).set(value) - except ValueError: - pass + if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is None: + return + + try: + value = float(battery_level) + except ValueError: + return + + self._metric( + "battery_level_percent", + prometheus_client.Gauge, + "Battery level as a percentage of its capacity", + self._labels(state), + ).set(value) def _handle_binary_sensor(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + self._metric( "binary_sensor_state", prometheus_client.Gauge, "State of the binary sensor (0/1)", - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _handle_input_boolean(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + self._metric( "input_boolean_state", prometheus_client.Gauge, "State of the input boolean (0/1)", - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _numeric_handler(self, state: State, domain: str, title: str) -> None: + if (value := self.state_as_number(state)) is None: + return + if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)): metric = self._metric( f"{domain}_state_{unit}", prometheus_client.Gauge, f"State of the {title} measured in {unit}", + self._labels(state), ) else: metric = self._metric( f"{domain}_state", prometheus_client.Gauge, f"State of the {title}", + self._labels(state), ) - if (value := self.state_as_number(state)) is not None: - if ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == UnitOfTemperature.FAHRENHEIT - ): - value = TemperatureConverter.convert( - value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - metric.labels(**self._labels(state)).set(value) + if ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfTemperature.FAHRENHEIT + ): + value = TemperatureConverter.convert( + value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS + ) + + metric.set(value) def _handle_input_number(self, state: State) -> None: self._numeric_handler(state, "input_number", "input number") @@ -425,88 +473,99 @@ class PrometheusMetrics: self._numeric_handler(state, "number", "number") def _handle_device_tracker(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + self._metric( "device_tracker_state", prometheus_client.Gauge, "State of the device tracker (0/1)", - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _handle_person(self, state: State) -> None: - metric = self._metric( - "person_state", prometheus_client.Gauge, "State of the person (0/1)" - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is None: + return + + self._metric( + "person_state", + prometheus_client.Gauge, + "State of the person (0/1)", + self._labels(state), + ).set(value) def _handle_cover(self, state: State) -> None: - metric = self._metric( - "cover_state", - prometheus_client.Gauge, - "State of the cover (0/1)", - ["state"], - ) - cover_states = [STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING] for cover_state in cover_states: - metric.labels(**dict(self._labels(state), state=cover_state)).set( - float(cover_state == state.state) + metric = self._metric( + "cover_state", + prometheus_client.Gauge, + "State of the cover (0/1)", + self._labels(state, {"state": cover_state}), ) + metric.set(float(cover_state == state.state)) position = state.attributes.get(ATTR_CURRENT_POSITION) if position is not None: - position_metric = self._metric( + self._metric( "cover_position", prometheus_client.Gauge, "Position of the cover (0-100)", - ) - position_metric.labels(**self._labels(state)).set(float(position)) + self._labels(state), + ).set(float(position)) tilt_position = state.attributes.get(ATTR_CURRENT_TILT_POSITION) if tilt_position is not None: - tilt_position_metric = self._metric( + self._metric( "cover_tilt_position", prometheus_client.Gauge, "Tilt Position of the cover (0-100)", - ) - tilt_position_metric.labels(**self._labels(state)).set(float(tilt_position)) + self._labels(state), + ).set(float(tilt_position)) def _handle_light(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + brightness = state.attributes.get(ATTR_BRIGHTNESS) + if state.state == STATE_ON and brightness is not None: + value = float(brightness) / 255.0 + value = value * 100 + + self._metric( "light_brightness_percent", prometheus_client.Gauge, "Light brightness percentage (0..100)", - ) - - if (value := self.state_as_number(state)) is not None: - brightness = state.attributes.get(ATTR_BRIGHTNESS) - if state.state == STATE_ON and brightness is not None: - value = float(brightness) / 255.0 - value = value * 100 - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _handle_lock(self, state: State) -> None: - metric = self._metric( - "lock_state", prometheus_client.Gauge, "State of the lock (0/1)" - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is None: + return + + self._metric( + "lock_state", + prometheus_client.Gauge, + "State of the lock (0/1)", + self._labels(state), + ).set(value) def _handle_climate_temp( self, state: State, attr: str, metric_name: str, metric_description: str ) -> None: - if (temp := state.attributes.get(attr)) is not None: - if self._climate_units == UnitOfTemperature.FAHRENHEIT: - temp = TemperatureConverter.convert( - temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - metric = self._metric( - metric_name, - prometheus_client.Gauge, - metric_description, + if (temp := state.attributes.get(attr)) is None: + return + + if self._climate_units == UnitOfTemperature.FAHRENHEIT: + temp = TemperatureConverter.convert( + temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS ) - metric.labels(**self._labels(state)).set(temp) + self._metric( + metric_name, + prometheus_client.Gauge, + metric_description, + self._labels(state), + ).set(temp) def _handle_climate(self, state: State) -> None: self._handle_climate_temp( @@ -535,90 +594,75 @@ class PrometheusMetrics: ) if current_action := state.attributes.get(ATTR_HVAC_ACTION): - metric = self._metric( - "climate_action", - prometheus_client.Gauge, - "HVAC action", - ["action"], - ) for action in HVACAction: - metric.labels(**dict(self._labels(state), action=action.value)).set( - float(action == current_action) - ) + self._metric( + "climate_action", + prometheus_client.Gauge, + "HVAC action", + self._labels(state, {"action": action.value}), + ).set(float(action == current_action)) current_mode = state.state available_modes = state.attributes.get(ATTR_HVAC_MODES) if current_mode and available_modes: - metric = self._metric( - "climate_mode", - prometheus_client.Gauge, - "HVAC mode", - ["mode"], - ) for mode in available_modes: - metric.labels(**dict(self._labels(state), mode=mode)).set( - float(mode == current_mode) - ) + self._metric( + "climate_mode", + prometheus_client.Gauge, + "HVAC mode", + self._labels(state, {"mode": mode}), + ).set(float(mode == current_mode)) preset_mode = state.attributes.get(ATTR_PRESET_MODE) available_preset_modes = state.attributes.get(ATTR_PRESET_MODES) if preset_mode and available_preset_modes: - preset_metric = self._metric( - "climate_preset_mode", - prometheus_client.Gauge, - "Preset mode enum", - ["mode"], - ) for mode in available_preset_modes: - preset_metric.labels(**dict(self._labels(state), mode=mode)).set( - float(mode == preset_mode) - ) + self._metric( + "climate_preset_mode", + prometheus_client.Gauge, + "Preset mode enum", + self._labels(state, {"mode": mode}), + ).set(float(mode == preset_mode)) fan_mode = state.attributes.get(ATTR_FAN_MODE) available_fan_modes = state.attributes.get(ATTR_FAN_MODES) if fan_mode and available_fan_modes: - fan_mode_metric = self._metric( - "climate_fan_mode", - prometheus_client.Gauge, - "Fan mode enum", - ["mode"], - ) for mode in available_fan_modes: - fan_mode_metric.labels(**dict(self._labels(state), mode=mode)).set( - float(mode == fan_mode) - ) + self._metric( + "climate_fan_mode", + prometheus_client.Gauge, + "Fan mode enum", + self._labels(state, {"mode": mode}), + ).set(float(mode == fan_mode)) def _handle_humidifier(self, state: State) -> None: humidifier_target_humidity_percent = state.attributes.get(ATTR_HUMIDITY) if humidifier_target_humidity_percent: - metric = self._metric( + self._metric( "humidifier_target_humidity_percent", prometheus_client.Gauge, "Target Relative Humidity", - ) - metric.labels(**self._labels(state)).set(humidifier_target_humidity_percent) + self._labels(state), + ).set(humidifier_target_humidity_percent) - metric = self._metric( - "humidifier_state", - prometheus_client.Gauge, - "State of the humidifier (0/1)", - ) if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._metric( + "humidifier_state", + prometheus_client.Gauge, + "State of the humidifier (0/1)", + self._labels(state), + ).set(value) current_mode = state.attributes.get(ATTR_MODE) available_modes = state.attributes.get(ATTR_AVAILABLE_MODES) if current_mode and available_modes: - metric = self._metric( - "humidifier_mode", - prometheus_client.Gauge, - "Humidifier Mode", - ["mode"], - ) for mode in available_modes: - metric.labels(**dict(self._labels(state), mode=mode)).set( - float(mode == current_mode) - ) + self._metric( + "humidifier_mode", + prometheus_client.Gauge, + "Humidifier Mode", + self._labels(state, {"mode": mode}), + ).set(float(mode == current_mode)) def _handle_sensor(self, state: State) -> None: unit = self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)) @@ -628,22 +672,24 @@ class PrometheusMetrics: if metric is not None: break - if metric is not None: + if metric is not None and (value := self.state_as_number(state)) is not None: documentation = "State of the sensor" if unit: documentation = f"Sensor data measured in {unit}" - _metric = self._metric(metric, prometheus_client.Gauge, documentation) - - if (value := self.state_as_number(state)) is not None: - if ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == UnitOfTemperature.FAHRENHEIT - ): - value = TemperatureConverter.convert( - value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - _metric.labels(**self._labels(state)).set(value) + if ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfTemperature.FAHRENHEIT + ): + value = TemperatureConverter.convert( + value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS + ) + self._metric( + metric, + prometheus_client.Gauge, + documentation, + self._labels(state), + ).set(value) self._battery(state) @@ -702,114 +748,107 @@ class PrometheusMetrics: return units.get(unit, default) def _handle_switch(self, state: State) -> None: - metric = self._metric( - "switch_state", prometheus_client.Gauge, "State of the switch (0/1)" - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._metric( + "switch_state", + prometheus_client.Gauge, + "State of the switch (0/1)", + self._labels(state), + ).set(value) self._handle_attributes(state) def _handle_fan(self, state: State) -> None: - metric = self._metric( - "fan_state", prometheus_client.Gauge, "State of the fan (0/1)" - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._metric( + "fan_state", + prometheus_client.Gauge, + "State of the fan (0/1)", + self._labels(state), + ).set(value) fan_speed_percent = state.attributes.get(ATTR_PERCENTAGE) if fan_speed_percent is not None: - fan_speed_metric = self._metric( + self._metric( "fan_speed_percent", prometheus_client.Gauge, "Fan speed percent (0-100)", - ) - fan_speed_metric.labels(**self._labels(state)).set(float(fan_speed_percent)) + self._labels(state), + ).set(float(fan_speed_percent)) fan_is_oscillating = state.attributes.get(ATTR_OSCILLATING) if fan_is_oscillating is not None: - fan_oscillating_metric = self._metric( + self._metric( "fan_is_oscillating", prometheus_client.Gauge, "Whether the fan is oscillating (0/1)", - ) - fan_oscillating_metric.labels(**self._labels(state)).set( - float(fan_is_oscillating) - ) + self._labels(state), + ).set(float(fan_is_oscillating)) fan_preset_mode = state.attributes.get(ATTR_PRESET_MODE) available_modes = state.attributes.get(ATTR_PRESET_MODES) if fan_preset_mode and available_modes: - fan_preset_metric = self._metric( - "fan_preset_mode", - prometheus_client.Gauge, - "Fan preset mode enum", - ["mode"], - ) for mode in available_modes: - fan_preset_metric.labels(**dict(self._labels(state), mode=mode)).set( - float(mode == fan_preset_mode) - ) + self._metric( + "fan_preset_mode", + prometheus_client.Gauge, + "Fan preset mode enum", + self._labels(state, {"mode": mode}), + ).set(float(mode == fan_preset_mode)) fan_direction = state.attributes.get(ATTR_DIRECTION) - if fan_direction is not None: - fan_direction_metric = self._metric( + if fan_direction in {DIRECTION_FORWARD, DIRECTION_REVERSE}: + self._metric( "fan_direction_reversed", prometheus_client.Gauge, "Fan direction reversed (bool)", - ) - if fan_direction == DIRECTION_FORWARD: - fan_direction_metric.labels(**self._labels(state)).set(0) - elif fan_direction == DIRECTION_REVERSE: - fan_direction_metric.labels(**self._labels(state)).set(1) + self._labels(state), + ).set(float(fan_direction == DIRECTION_REVERSE)) def _handle_zwave(self, state: State) -> None: self._battery(state) def _handle_automation(self, state: State) -> None: - metric = self._metric( + self._metric( "automation_triggered_count", prometheus_client.Counter, "Count of times an automation has been triggered", - ) - - metric.labels(**self._labels(state)).inc() + self._labels(state), + ).inc() def _handle_counter(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + self._metric( "counter_value", prometheus_client.Gauge, "Value of counter entities", - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _handle_update(self, state: State) -> None: - metric = self._metric( + if (value := self.state_as_number(state)) is None: + return + + self._metric( "update_state", prometheus_client.Gauge, "Update state, indicating if an update is available (0/1)", - ) - if (value := self.state_as_number(state)) is not None: - metric.labels(**self._labels(state)).set(value) + self._labels(state), + ).set(value) def _handle_alarm_control_panel(self, state: State) -> None: current_state = state.state if current_state: - metric = self._metric( - "alarm_control_panel_state", - prometheus_client.Gauge, - "State of the alarm control panel (0/1)", - ["state"], - ) - for alarm_state in AlarmControlPanelState: - metric.labels(**dict(self._labels(state), state=alarm_state.value)).set( - float(alarm_state.value == current_state) - ) + self._metric( + "alarm_control_panel_state", + prometheus_client.Gauge, + "State of the alarm control panel (0/1)", + self._labels(state, {"state": alarm_state.value}), + ).set(float(alarm_state.value == current_state)) class PrometheusView(HomeAssistantView): From e22685640c5e9da86d7e198ea52bebfb02f1e731 Mon Sep 17 00:00:00 2001 From: Alberto Geniola Date: Mon, 30 Dec 2024 13:46:53 +0100 Subject: [PATCH 1124/1198] Bump elmax-api (#133845) --- homeassistant/components/elmax/config_flow.py | 4 +++- homeassistant/components/elmax/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index 3bb01efd3d5..09e0bc0d260 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): port=self._panel_direct_port, ) ) - ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert) + ssl_context = await self.hass.async_add_executor_job( + build_direct_ssl_context, self._panel_direct_ssl_cert + ) # Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs. client_api_url = get_direct_api_url( diff --git a/homeassistant/components/elmax/manifest.json b/homeassistant/components/elmax/manifest.json index dfa20326d0c..f4b184c0475 100644 --- a/homeassistant/components/elmax/manifest.json +++ b/homeassistant/components/elmax/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/elmax", "iot_class": "cloud_polling", "loggers": ["elmax_api"], - "requirements": ["elmax-api==0.0.6.3"], + "requirements": ["elmax-api==0.0.6.4rc0"], "zeroconf": [ { "type": "_elmax-ssl._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index b66662c2756..1c215b31351 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -827,7 +827,7 @@ eliqonline==1.2.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.3 +elmax-api==0.0.6.4rc0 # homeassistant.components.elvia elvia==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1f8fcd4476f..007aacda6bf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -702,7 +702,7 @@ elgato==5.1.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.3 +elmax-api==0.0.6.4rc0 # homeassistant.components.elvia elvia==0.1.0 From 45fd7fb6d5d1372f6d080aff57cce1ab95240324 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 30 Dec 2024 13:38:48 +0100 Subject: [PATCH 1125/1198] Fix duplicate sensor disk entities in Systemmonitor (#134139) --- .../components/systemmonitor/sensor.py | 19 +++++---- tests/components/systemmonitor/test_sensor.py | 42 ++++++++++++++++++- 2 files changed, 51 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/systemmonitor/sensor.py b/homeassistant/components/systemmonitor/sensor.py index ef1153f09e8..048d7cefd6c 100644 --- a/homeassistant/components/systemmonitor/sensor.py +++ b/homeassistant/components/systemmonitor/sensor.py @@ -429,16 +429,17 @@ async def async_setup_entry( is_enabled = check_legacy_resource( f"{_type}_{argument}", legacy_resources ) - loaded_resources.add(slugify(f"{_type}_{argument}")) - entities.append( - SystemMonitorSensor( - coordinator, - sensor_description, - entry.entry_id, - argument, - is_enabled, + if (_add := slugify(f"{_type}_{argument}")) not in loaded_resources: + loaded_resources.add(_add) + entities.append( + SystemMonitorSensor( + coordinator, + sensor_description, + entry.entry_id, + argument, + is_enabled, + ) ) - ) continue if _type.startswith("ipv"): diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index 6d22c5354a4..a5f5e7623e9 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -5,7 +5,7 @@ import socket from unittest.mock import Mock, patch from freezegun.api import FrozenDateTimeFactory -from psutil._common import sdiskusage, shwtemp, snetio, snicaddr +from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr import pytest from syrupy.assertion import SnapshotAssertion @@ -504,3 +504,43 @@ async def test_remove_obsolete_entities( entity_registry.async_get("sensor.systemmonitor_network_out_veth54321") is not None ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_no_duplicate_disk_entities( + hass: HomeAssistant, + mock_psutil: Mock, + mock_os: Mock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the sensor.""" + mock_psutil.disk_usage.return_value = sdiskusage( + 500 * 1024**3, 300 * 1024**3, 200 * 1024**3, 60.0 + ) + mock_psutil.disk_partitions.return_value = [ + sdiskpart("test", "/", "ext4", ""), + sdiskpart("test2", "/media/share", "ext4", ""), + sdiskpart("test3", "/incorrect", "", ""), + sdiskpart("test4", "/media/frigate", "ext4", ""), + sdiskpart("test4", "/media/FRIGATE", "ext4", ""), + sdiskpart("hosts", "/etc/hosts", "bind", ""), + sdiskpart("proc", "/proc/run", "proc", ""), + ] + + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + data={}, + options={ + "binary_sensor": {"process": ["python3", "pip"]}, + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + disk_sensor = hass.states.get("sensor.system_monitor_disk_usage_media_frigate") + assert disk_sensor is not None + assert disk_sensor.state == "60.0" + + assert "Platform systemmonitor does not generate unique IDs." not in caplog.text From 0873d27d7b3b88970896a7672a95e4f506cac46e Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sat, 28 Dec 2024 21:34:01 +0100 Subject: [PATCH 1126/1198] Fix Onkyo volume rounding (#134157) --- homeassistant/components/onkyo/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 76194672bb7..97a82fc8a1a 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -427,7 +427,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): """ # HA_VOL * (MAX VOL / 100) * VOL_RESOLUTION self._update_receiver( - "volume", int(volume * (self._max_volume / 100) * self._volume_resolution) + "volume", round(volume * (self._max_volume / 100) * self._volume_resolution) ) async def async_volume_up(self) -> None: From ea51ecd384cf7b1a811b795a10239eed541b7085 Mon Sep 17 00:00:00 2001 From: tronikos Date: Sun, 29 Dec 2024 11:44:33 -0800 Subject: [PATCH 1127/1198] Bump opower to 0.8.7 (#134228) * Bump opower to 0.8.7 * update deps --- homeassistant/components/opower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index 593e4cf34b8..bd68cc84d13 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.8.6"] + "requirements": ["opower==0.8.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1c215b31351..175a3913be9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1570,7 +1570,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.8.6 +opower==0.8.7 # homeassistant.components.oralb oralb-ble==0.17.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 007aacda6bf..1e0ebfcf904 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1306,7 +1306,7 @@ openhomedevice==2.2.0 openwebifpy==4.3.0 # homeassistant.components.opower -opower==0.8.6 +opower==0.8.7 # homeassistant.components.oralb oralb-ble==0.17.6 From c402eaec3f9819ac4320e56d4da80ca8cb8e1288 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 29 Dec 2024 21:36:49 +0100 Subject: [PATCH 1128/1198] Bump aiopegelonline to 0.1.1 (#134230) bump aiopegelonline to 0.1.1 --- homeassistant/components/pegel_online/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/pegel_online/manifest.json b/homeassistant/components/pegel_online/manifest.json index 443e8c58467..0a0f31532b1 100644 --- a/homeassistant/components/pegel_online/manifest.json +++ b/homeassistant/components/pegel_online/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aiopegelonline"], - "requirements": ["aiopegelonline==0.1.0"] + "requirements": ["aiopegelonline==0.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 175a3913be9..e3e54acf7b3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -321,7 +321,7 @@ aioopenexchangerates==0.6.8 aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.1.0 +aiopegelonline==0.1.1 # homeassistant.components.acmeda aiopulse==0.4.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1e0ebfcf904..67ae0ebbbf3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -303,7 +303,7 @@ aioopenexchangerates==0.6.8 aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.1.0 +aiopegelonline==0.1.1 # homeassistant.components.acmeda aiopulse==0.4.6 From a627fa70a7c78e585a37bbe74549fd71fdc1b040 Mon Sep 17 00:00:00 2001 From: tronikos Date: Mon, 30 Dec 2024 00:13:51 -0800 Subject: [PATCH 1129/1198] Avoid KeyError for ignored entries in async_step_zeroconf of Android TV Remote (#134250) --- homeassistant/components/androidtv_remote/config_flow.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/androidtv_remote/config_flow.py b/homeassistant/components/androidtv_remote/config_flow.py index 3500e4ff47b..4df25247881 100644 --- a/homeassistant/components/androidtv_remote/config_flow.py +++ b/homeassistant/components/androidtv_remote/config_flow.py @@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): # and one of them, which could end up being in discovery_info.host, is from a # different device. If any of the discovery_info.ip_addresses matches the # existing host, don't update the host. - if existing_config_entry and len(discovery_info.ip_addresses) > 1: + if ( + existing_config_entry + # Ignored entries don't have host + and CONF_HOST in existing_config_entry.data + and len(discovery_info.ip_addresses) > 1 + ): existing_host = existing_config_entry.data[CONF_HOST] if existing_host != self.host: if existing_host in [ From 7456ce1c0170c41217bccd53bd06794edc554971 Mon Sep 17 00:00:00 2001 From: tronikos Date: Mon, 30 Dec 2024 00:20:35 -0800 Subject: [PATCH 1130/1198] Fix 400 This voice does not support speaking rate or pitch parameters at this time for Google Cloud Journey voices (#134255) --- .../components/google_cloud/const.py | 4 ++++ .../components/google_cloud/helpers.py | 9 +++++--- homeassistant/components/google_cloud/tts.py | 21 ++++++++++++++++--- 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/google_cloud/const.py b/homeassistant/components/google_cloud/const.py index f416d36483a..16b1463f0f3 100644 --- a/homeassistant/components/google_cloud/const.py +++ b/homeassistant/components/google_cloud/const.py @@ -20,6 +20,10 @@ CONF_GAIN = "gain" CONF_PROFILES = "profiles" CONF_TEXT_TYPE = "text_type" +DEFAULT_SPEED = 1.0 +DEFAULT_PITCH = 0 +DEFAULT_GAIN = 0 + # STT constants CONF_STT_MODEL = "stt_model" diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index f6e89fae7fa..f1adc42b4cd 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -31,7 +31,10 @@ from .const import ( CONF_SPEED, CONF_TEXT_TYPE, CONF_VOICE, + DEFAULT_GAIN, DEFAULT_LANG, + DEFAULT_PITCH, + DEFAULT_SPEED, ) DEFAULT_VOICE = "" @@ -104,15 +107,15 @@ def tts_options_schema( ), vol.Optional( CONF_SPEED, - default=defaults.get(CONF_SPEED, 1.0), + default=defaults.get(CONF_SPEED, DEFAULT_SPEED), ): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)), vol.Optional( CONF_PITCH, - default=defaults.get(CONF_PITCH, 0), + default=defaults.get(CONF_PITCH, DEFAULT_PITCH), ): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)), vol.Optional( CONF_GAIN, - default=defaults.get(CONF_GAIN, 0), + default=defaults.get(CONF_GAIN, DEFAULT_GAIN), ): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)), vol.Optional( CONF_PROFILES, diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index c3a8254ad90..7f22dda4faf 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -35,7 +35,10 @@ from .const import ( CONF_SPEED, CONF_TEXT_TYPE, CONF_VOICE, + DEFAULT_GAIN, DEFAULT_LANG, + DEFAULT_PITCH, + DEFAULT_SPEED, DOMAIN, ) from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema @@ -191,11 +194,23 @@ class BaseGoogleCloudProvider: ssml_gender=gender, name=voice, ), + # Avoid: "This voice does not support speaking rate or pitch parameters at this time." + # by not specifying the fields unless they differ from the defaults audio_config=texttospeech.AudioConfig( audio_encoding=encoding, - speaking_rate=options[CONF_SPEED], - pitch=options[CONF_PITCH], - volume_gain_db=options[CONF_GAIN], + speaking_rate=( + options[CONF_SPEED] + if options[CONF_SPEED] != DEFAULT_SPEED + else None + ), + pitch=( + options[CONF_PITCH] + if options[CONF_PITCH] != DEFAULT_PITCH + else None + ), + volume_gain_db=( + options[CONF_GAIN] if options[CONF_GAIN] != DEFAULT_GAIN else None + ), effects_profile_id=options[CONF_PROFILES], ), ) From 077c9e62b47ac8082810ee64e6a0f7b69bf30b37 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Mon, 30 Dec 2024 12:27:32 +0100 Subject: [PATCH 1131/1198] Bump pylamarzocco to 1.4.5 (#134259) * Bump pylamarzocco to 1.4.4 * Bump pylamarzocco to 1.4.5 --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 71d2278b51b..6b586a5cfb8 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.3"] + "requirements": ["pylamarzocco==1.4.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index e3e54acf7b3..89fb362204e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2043,7 +2043,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.3 +pylamarzocco==1.4.5 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 67ae0ebbbf3..94b8cc6871d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1657,7 +1657,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.3 +pylamarzocco==1.4.5 # homeassistant.components.lastfm pylast==5.1.0 From d9057fc43ec6f11c532c1a6fc3bc4489904334f1 Mon Sep 17 00:00:00 2001 From: Arne Keller Date: Mon, 30 Dec 2024 14:42:46 +0100 Subject: [PATCH 1132/1198] ollama: update to 0.4.5 (#134265) --- homeassistant/components/ollama/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/ollama/test_conversation.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/ollama/manifest.json b/homeassistant/components/ollama/manifest.json index dca4c2dd6be..dbecbf87e4e 100644 --- a/homeassistant/components/ollama/manifest.json +++ b/homeassistant/components/ollama/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/ollama", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["ollama==0.3.3"] + "requirements": ["ollama==0.4.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 89fb362204e..3ff99ee2955 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1528,7 +1528,7 @@ oemthermostat==1.1.1 ohme==1.2.0 # homeassistant.components.ollama -ollama==0.3.3 +ollama==0.4.5 # homeassistant.components.omnilogic omnilogic==0.4.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 94b8cc6871d..6c763fca83d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1276,7 +1276,7 @@ odp-amsterdam==6.0.2 ohme==1.2.0 # homeassistant.components.ollama -ollama==0.3.3 +ollama==0.4.5 # homeassistant.components.omnilogic omnilogic==0.4.5 diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index 66dc8a0c603..3202b42d9b3 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -51,8 +51,8 @@ async def test_chat( assert args["model"] == "test model" assert args["messages"] == [ - Message({"role": "system", "content": prompt}), - Message({"role": "user", "content": "test message"}), + Message(role="system", content=prompt), + Message(role="user", content="test message"), ] assert ( From 0c732510049e7e89da6c9962322d3477bfb5975e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Mon, 30 Dec 2024 16:22:30 +0100 Subject: [PATCH 1133/1198] Remove excessive period at end of action name (#134272) --- homeassistant/components/zwave_js/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 0c3ca6313d4..fc63b7e9119 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -290,7 +290,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]" } }, - "name": "Bulk set partial configuration parameters (advanced)." + "name": "Bulk set partial configuration parameters (advanced)" }, "clear_lock_usercode": { "description": "Clears a user code from a lock.", From 623e1b08b8beadf601df35dc2699f44b4906d749 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Mon, 30 Dec 2024 16:47:58 +0000 Subject: [PATCH 1134/1198] Bump aiomealie to 0.9.5 (#134274) --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index c555fcbc3d6..6e55abcdcad 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.9.4"] + "requirements": ["aiomealie==0.9.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3ff99ee2955..49c801260fb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -294,7 +294,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.4 +aiomealie==0.9.5 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6c763fca83d..42b3d42c9d7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -276,7 +276,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.4 +aiomealie==0.9.5 # homeassistant.components.modern_forms aiomodernforms==0.1.8 From 82f0e8cc19e749fec98edc239e5c8a9ec3023fd2 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 30 Dec 2024 20:04:50 +0100 Subject: [PATCH 1135/1198] Update frontend to 20241230.0 (#134284) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index ce40ce35a65..01fe363d69e 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241229.0"] + "requirements": ["home-assistant-frontend==20241230.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1d4e86e9671..d1ccc31a0ed 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241229.0 +home-assistant-frontend==20241230.0 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 49c801260fb..dee52f46c3b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241229.0 +home-assistant-frontend==20241230.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 42b3d42c9d7..02cf1e06481 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241229.0 +home-assistant-frontend==20241230.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 From c10175e25c90ba62bc81a750b44c699d7771914b Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 30 Dec 2024 20:06:44 +0100 Subject: [PATCH 1136/1198] Bump version to 2025.1.0b4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 91b31959854..e45608ce9bb 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b3" +PATCH_VERSION: Final = "0b4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 7fdc8b16719..6219a7cee8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b3" +version = "2025.1.0b4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From fbd6cf72441988740d102060f68561bb4d709300 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 31 Dec 2024 23:06:42 +0100 Subject: [PATCH 1137/1198] Improve Mealie set mealplan service (#130606) * Improve Mealie set mealplan service * Fix * Fix --- homeassistant/components/mealie/services.py | 2 +- homeassistant/components/mealie/strings.json | 4 +-- .../mealie/snapshots/test_services.ambr | 26 +++++++++++++++++++ tests/components/mealie/test_services.py | 6 +++++ 4 files changed, 35 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/services.py b/homeassistant/components/mealie/services.py index f195be37b11..ca8c28f9d13 100644 --- a/homeassistant/components/mealie/services.py +++ b/homeassistant/components/mealie/services.py @@ -92,7 +92,7 @@ SERVICE_SET_MEALPLAN_SCHEMA = vol.Any( [x.lower() for x in MealplanEntryType] ), vol.Required(ATTR_NOTE_TITLE): str, - vol.Required(ATTR_NOTE_TEXT): str, + vol.Optional(ATTR_NOTE_TEXT): str, } ), ) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index e80db7ab3b0..fa63252e837 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -229,8 +229,8 @@ "description": "The type of dish to set the recipe to." }, "recipe_id": { - "name": "[%key:component::mealie::services::get_recipe::fields::recipe_id::name%]", - "description": "[%key:component::mealie::services::get_recipe::fields::recipe_id::description%]" + "name": "Recipe ID", + "description": "The recipe ID or the slug of the recipe to get." }, "note_title": { "name": "Meal note title", diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr index 4f9ee6a5c09..56626c7b5c4 100644 --- a/tests/components/mealie/snapshots/test_services.ambr +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -758,6 +758,32 @@ }), }) # --- +# name: test_service_set_mealplan[payload2-kwargs2] + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- # name: test_service_set_random_mealplan dict({ 'mealplan': dict({ diff --git a/tests/components/mealie/test_services.py b/tests/components/mealie/test_services.py index 1c8c6f19de7..63668379490 100644 --- a/tests/components/mealie/test_services.py +++ b/tests/components/mealie/test_services.py @@ -250,6 +250,12 @@ async def test_service_set_random_mealplan( }, {"recipe_id": None, "note_title": "Note Title", "note_text": "Note Text"}, ), + ( + { + ATTR_NOTE_TITLE: "Note Title", + }, + {"recipe_id": None, "note_title": "Note Title", "note_text": None}, + ), ], ) async def test_service_set_mealplan( From 54fa30c2b8a5ca06fa8ac7b66b58ef4980fc76ad Mon Sep 17 00:00:00 2001 From: Brynley McDonald Date: Wed, 1 Jan 2025 02:28:24 +1300 Subject: [PATCH 1138/1198] Update Flick Electric API (#133475) --- .../components/flick_electric/__init__.py | 62 +- .../components/flick_electric/config_flow.py | 157 ++++- .../components/flick_electric/const.py | 2 + .../components/flick_electric/coordinator.py | 47 ++ .../components/flick_electric/manifest.json | 2 +- .../components/flick_electric/sensor.py | 64 +- .../components/flick_electric/strings.json | 11 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/flick_electric/__init__.py | 50 ++ .../flick_electric/test_config_flow.py | 594 +++++++++++++++++- tests/components/flick_electric/test_init.py | 135 ++++ 12 files changed, 1046 insertions(+), 82 deletions(-) create mode 100644 homeassistant/components/flick_electric/coordinator.py create mode 100644 tests/components/flick_electric/test_init.py diff --git a/homeassistant/components/flick_electric/__init__.py b/homeassistant/components/flick_electric/__init__.py index a963d199c5a..190947e4c6f 100644 --- a/homeassistant/components/flick_electric/__init__.py +++ b/homeassistant/components/flick_electric/__init__.py @@ -20,7 +20,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client -from .const import CONF_TOKEN_EXPIRY, DOMAIN +from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY +from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator _LOGGER = logging.getLogger(__name__) @@ -29,24 +30,67 @@ CONF_ID_TOKEN = "id_token" PLATFORMS = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool: """Set up Flick Electric from a config entry.""" auth = HassFlickAuth(hass, entry) - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth) + coordinator = FlickElectricDataCoordinator( + hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF] + ) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version > 2: + return False + + if config_entry.version == 1: + api = FlickAPI(HassFlickAuth(hass, config_entry)) + + accounts = await api.getCustomerAccounts() + active_accounts = [ + account for account in accounts if account["status"] == "active" + ] + + # A single active account can be auto-migrated + if (len(active_accounts)) == 1: + account = active_accounts[0] + + new_data = {**config_entry.data} + new_data[CONF_ACCOUNT_ID] = account["id"] + new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"] + hass.config_entries.async_update_entry( + config_entry, + title=account["address"], + unique_id=account["id"], + data=new_data, + version=2, + ) + return True + + config_entry.async_start_reauth(hass, data={**config_entry.data}) + return False + + return True class HassFlickAuth(AbstractFlickAuth): diff --git a/homeassistant/components/flick_electric/config_flow.py b/homeassistant/components/flick_electric/config_flow.py index 8a2455b9d14..b6b7327fcb0 100644 --- a/homeassistant/components/flick_electric/config_flow.py +++ b/homeassistant/components/flick_electric/config_flow.py @@ -1,14 +1,18 @@ """Config Flow for Flick Electric integration.""" import asyncio +from collections.abc import Mapping import logging from typing import Any -from pyflick.authentication import AuthException, SimpleFlickAuth +from aiohttp import ClientResponseError +from pyflick import FlickAPI +from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET +from pyflick.types import APIException, AuthException, CustomerAccount import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, @@ -17,12 +21,18 @@ from homeassistant.const import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) -from .const import DOMAIN +from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN _LOGGER = logging.getLogger(__name__) -DATA_SCHEMA = vol.Schema( +LOGIN_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, @@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema( class FlickConfigFlow(ConfigFlow, domain=DOMAIN): """Flick config flow.""" - VERSION = 1 + VERSION = 2 + auth: AbstractFlickAuth + accounts: list[CustomerAccount] + data: dict[str, Any] - async def _validate_input(self, user_input): - auth = SimpleFlickAuth( + async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool: + self.auth = SimpleFlickAuth( username=user_input[CONF_USERNAME], password=user_input[CONF_PASSWORD], websession=aiohttp_client.async_get_clientsession(self.hass), @@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN): try: async with asyncio.timeout(60): - token = await auth.async_get_access_token() - except TimeoutError as err: + token = await self.auth.async_get_access_token() + except (TimeoutError, ClientResponseError) as err: raise CannotConnect from err except AuthException as err: raise InvalidAuth from err return token is not None + async def async_step_select_account( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """Ask user to select account.""" + + errors = {} + if user_input is not None and CONF_ACCOUNT_ID in user_input: + self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID] + self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref( + user_input[CONF_ACCOUNT_ID] + ) + try: + # Ensure supply node is active + await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF]) + except (APIException, ClientResponseError): + errors["base"] = "cannot_connect" + except AuthException: + # We should never get here as we have a valid token + return self.async_abort(reason="no_permissions") + else: + # Supply node is active + return await self._async_create_entry() + + try: + self.accounts = await FlickAPI(self.auth).getCustomerAccounts() + except (APIException, ClientResponseError): + errors["base"] = "cannot_connect" + + active_accounts = [a for a in self.accounts if a["status"] == "active"] + + if len(active_accounts) == 0: + return self.async_abort(reason="no_accounts") + + if len(active_accounts) == 1: + self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"] + self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref( + active_accounts[0]["id"] + ) + + return await self._async_create_entry() + + return self.async_show_form( + step_id="select_account", + data_schema=vol.Schema( + { + vol.Required(CONF_ACCOUNT_ID): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict( + value=account["id"], label=account["address"] + ) + for account in active_accounts + ], + mode=SelectSelectorMode.LIST, + ) + ) + } + ), + errors=errors, + ) + async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: """Handle gathering login info.""" errors = {} if user_input is not None: try: - await self._validate_input(user_input) + await self._validate_auth(user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: @@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - await self.async_set_unique_id( - f"flick_electric_{user_input[CONF_USERNAME]}" - ) - self._abort_if_unique_id_configured() - - return self.async_create_entry( - title=f"Flick Electric: {user_input[CONF_USERNAME]}", - data=user_input, - ) + self.data = dict(user_input) + return await self.async_step_select_account(user_input) return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + step_id="user", data_schema=LOGIN_SCHEMA, errors=errors ) + async def async_step_reauth( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication.""" + + self.data = {**user_input} + + return await self.async_step_user(user_input) + + async def _async_create_entry(self) -> ConfigFlowResult: + """Create an entry for the flow.""" + + await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID]) + + account = self._get_account(self.data[CONF_ACCOUNT_ID]) + + if self.source == SOURCE_REAUTH: + # Migration completed + if self._get_reauth_entry().version == 1: + self.hass.config_entries.async_update_entry( + self._get_reauth_entry(), + unique_id=self.unique_id, + data=self.data, + version=self.VERSION, + ) + + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + unique_id=self.unique_id, + title=account["address"], + data=self.data, + ) + + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=account["address"], + data=self.data, + ) + + def _get_account(self, account_id: str) -> CustomerAccount: + """Get the account for the account ID.""" + return next(a for a in self.accounts if a["id"] == account_id) + + def _get_supply_node_ref(self, account_id: str) -> str: + """Get the supply node ref for the account.""" + return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF] + class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" diff --git a/homeassistant/components/flick_electric/const.py b/homeassistant/components/flick_electric/const.py index de1942096b5..0f94aa909b7 100644 --- a/homeassistant/components/flick_electric/const.py +++ b/homeassistant/components/flick_electric/const.py @@ -3,6 +3,8 @@ DOMAIN = "flick_electric" CONF_TOKEN_EXPIRY = "expires" +CONF_ACCOUNT_ID = "account_id" +CONF_SUPPLY_NODE_REF = "supply_node_ref" ATTR_START_AT = "start_at" ATTR_END_AT = "end_at" diff --git a/homeassistant/components/flick_electric/coordinator.py b/homeassistant/components/flick_electric/coordinator.py new file mode 100644 index 00000000000..474efc5297d --- /dev/null +++ b/homeassistant/components/flick_electric/coordinator.py @@ -0,0 +1,47 @@ +"""Data Coordinator for Flick Electric.""" + +import asyncio +from datetime import timedelta +import logging + +import aiohttp +from pyflick import FlickAPI, FlickPrice +from pyflick.types import APIException, AuthException + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + +SCAN_INTERVAL = timedelta(minutes=5) + +type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator] + + +class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]): + """Coordinator for flick power price.""" + + def __init__( + self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str + ) -> None: + """Initialize FlickElectricDataCoordinator.""" + super().__init__( + hass, + _LOGGER, + name="Flick Electric", + update_interval=SCAN_INTERVAL, + ) + self.supply_node_ref = supply_node_ref + self._api = api + + async def _async_update_data(self) -> FlickPrice: + """Fetch pricing data from Flick Electric.""" + try: + async with asyncio.timeout(60): + return await self._api.getPricing(self.supply_node_ref) + except AuthException as err: + raise ConfigEntryAuthFailed from err + except (APIException, aiohttp.ClientResponseError) as err: + raise UpdateFailed from err diff --git a/homeassistant/components/flick_electric/manifest.json b/homeassistant/components/flick_electric/manifest.json index 0b1f2677d6a..3aee25995a9 100644 --- a/homeassistant/components/flick_electric/manifest.json +++ b/homeassistant/components/flick_electric/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyflick"], - "requirements": ["PyFlick==0.0.2"] + "requirements": ["PyFlick==1.1.2"] } diff --git a/homeassistant/components/flick_electric/sensor.py b/homeassistant/components/flick_electric/sensor.py index 347109c66c0..147d00c943d 100644 --- a/homeassistant/components/flick_electric/sensor.py +++ b/homeassistant/components/flick_electric/sensor.py @@ -1,74 +1,72 @@ """Support for Flick Electric Pricing data.""" -import asyncio from datetime import timedelta +from decimal import Decimal import logging from typing import Any -from pyflick import FlickAPI, FlickPrice - from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CURRENCY_CENT, UnitOfEnergy from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.dt import utcnow +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT, DOMAIN +from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT +from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator _LOGGER = logging.getLogger(__name__) - SCAN_INTERVAL = timedelta(minutes=5) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FlickConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Flick Sensor Setup.""" - api: FlickAPI = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data - async_add_entities([FlickPricingSensor(api)], True) + async_add_entities([FlickPricingSensor(coordinator)]) -class FlickPricingSensor(SensorEntity): +class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity): """Entity object for Flick Electric sensor.""" _attr_attribution = "Data provided by Flick Electric" _attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}" _attr_has_entity_name = True _attr_translation_key = "power_price" - _attributes: dict[str, Any] = {} - def __init__(self, api: FlickAPI) -> None: + def __init__(self, coordinator: FlickElectricDataCoordinator) -> None: """Entity object for Flick Electric sensor.""" - self._api: FlickAPI = api - self._price: FlickPrice = None + super().__init__(coordinator) + + self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing" @property - def native_value(self): + def native_value(self) -> Decimal: """Return the state of the sensor.""" - return self._price.price + # The API should return a unit price with quantity of 1.0 when no start/end time is provided + if self.coordinator.data.quantity != 1: + _LOGGER.warning( + "Unexpected quantity for unit price: %s", self.coordinator.data + ) + return self.coordinator.data.cost @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" - return self._attributes + components: dict[str, Decimal] = {} - async def async_update(self) -> None: - """Get the Flick Pricing data from the web service.""" - if self._price and self._price.end_at >= utcnow(): - return # Power price data is still valid - - async with asyncio.timeout(60): - self._price = await self._api.getPricing() - - _LOGGER.debug("Pricing data: %s", self._price) - - self._attributes[ATTR_START_AT] = self._price.start_at - self._attributes[ATTR_END_AT] = self._price.end_at - for component in self._price.components: + for component in self.coordinator.data.components: if component.charge_setter not in ATTR_COMPONENTS: _LOGGER.warning("Found unknown component: %s", component.charge_setter) continue - self._attributes[component.charge_setter] = float(component.value) + components[component.charge_setter] = component.value + + return { + ATTR_START_AT: self.coordinator.data.start_at, + ATTR_END_AT: self.coordinator.data.end_at, + **components, + } diff --git a/homeassistant/components/flick_electric/strings.json b/homeassistant/components/flick_electric/strings.json index 8b55bef939e..4b1fd300e2b 100644 --- a/homeassistant/components/flick_electric/strings.json +++ b/homeassistant/components/flick_electric/strings.json @@ -9,6 +9,12 @@ "client_id": "Client ID (optional)", "client_secret": "Client Secret (optional)" } + }, + "select_account": { + "title": "Select account", + "data": { + "account_id": "Account" + } } }, "error": { @@ -17,7 +23,10 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "no_permissions": "Cannot get pricing for this account. Please check user permissions.", + "no_accounts": "No services are active on this Flick account" } }, "entity": { diff --git a/requirements_all.txt b/requirements_all.txt index dee52f46c3b..438690ac560 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -48,7 +48,7 @@ ProgettiHWSW==0.1.3 PyChromecast==14.0.5 # homeassistant.components.flick_electric -PyFlick==0.0.2 +PyFlick==1.1.2 # homeassistant.components.flume PyFlume==0.6.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 02cf1e06481..ebf6ac82782 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 PyChromecast==14.0.5 # homeassistant.components.flick_electric -PyFlick==0.0.2 +PyFlick==1.1.2 # homeassistant.components.flume PyFlume==0.6.5 diff --git a/tests/components/flick_electric/__init__.py b/tests/components/flick_electric/__init__.py index 7ba25e6c180..36936cad047 100644 --- a/tests/components/flick_electric/__init__.py +++ b/tests/components/flick_electric/__init__.py @@ -1 +1,51 @@ """Tests for the Flick Electric integration.""" + +from pyflick.types import FlickPrice + +from homeassistant.components.flick_electric.const import ( + CONF_ACCOUNT_ID, + CONF_SUPPLY_NODE_REF, +) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +CONF = { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_ACCOUNT_ID: "1234", + CONF_SUPPLY_NODE_REF: "123", +} + + +def _mock_flick_price(): + return FlickPrice( + { + "cost": "0.25", + "quantity": "1.0", + "status": "final", + "start_at": "2024-01-01T00:00:00Z", + "end_at": "2024-01-01T00:00:00Z", + "type": "flat", + "components": [ + { + "charge_method": "kwh", + "charge_setter": "network", + "value": "1.00", + "single_unit_price": "1.00", + "quantity": "1.0", + "unit_code": "NZD", + "charge_per": "kwh", + "flow_direction": "import", + }, + { + "charge_method": "kwh", + "charge_setter": "nonsupported", + "value": "1.00", + "single_unit_price": "1.00", + "quantity": "1.0", + "unit_code": "NZD", + "charge_per": "kwh", + "flow_direction": "import", + }, + ], + } + ) diff --git a/tests/components/flick_electric/test_config_flow.py b/tests/components/flick_electric/test_config_flow.py index 85a6495d3c5..7ac605f1c8c 100644 --- a/tests/components/flick_electric/test_config_flow.py +++ b/tests/components/flick_electric/test_config_flow.py @@ -3,29 +3,37 @@ from unittest.mock import patch from pyflick.authentication import AuthException +from pyflick.types import APIException from homeassistant import config_entries -from homeassistant.components.flick_electric.const import DOMAIN +from homeassistant.components.flick_electric.const import ( + CONF_ACCOUNT_ID, + CONF_SUPPLY_NODE_REF, + DOMAIN, +) from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from . import CONF, _mock_flick_price -CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"} +from tests.common import MockConfigEntry async def _flow_submit(hass: HomeAssistant) -> ConfigFlowResult: return await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=CONF, + data={ + CONF_USERNAME: CONF[CONF_USERNAME], + CONF_PASSWORD: CONF[CONF_PASSWORD], + }, ) async def test_form(hass: HomeAssistant) -> None: - """Test we get the form.""" + """Test we get the form with only one, with no account picker.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -38,6 +46,21 @@ async def test_form(hass: HomeAssistant) -> None: "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", return_value="123456789abcdef", ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + } + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), patch( "homeassistant.components.flick_electric.async_setup_entry", return_value=True, @@ -45,29 +68,293 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - CONF, + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Flick Electric: test-username" + assert result2["title"] == "123 Fake St" assert result2["data"] == CONF + assert result2["result"].unique_id == "1234" assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_duplicate_login(hass: HomeAssistant) -> None: - """Test uniqueness of username.""" +async def test_form_multi_account(hass: HomeAssistant) -> None: + """Test the form when multiple accounts are available.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + patch( + "homeassistant.components.flick_electric.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "select_account" + assert len(mock_setup_entry.mock_calls) == 0 + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {"account_id": "5678"}, + ) + + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "456 Fake St" + assert result3["data"] == { + **CONF, + CONF_SUPPLY_NODE_REF: "456", + CONF_ACCOUNT_ID: "5678", + } + assert result3["result"].unique_id == "5678" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_reauth_token(hass: HomeAssistant) -> None: + """Test reauth flow when username/password is wrong.""" entry = MockConfigEntry( domain=DOMAIN, - data=CONF, - title="Flick Electric: test-username", - unique_id="flick_electric_test-username", + data={**CONF}, + title="123 Fake St", + unique_id="1234", + version=2, ) entry.add_to_hass(hass) - with patch( - "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", - return_value="123456789abcdef", + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + side_effect=AuthException, + ), + ): + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "user" + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + patch( + "homeassistant.config_entries.ConfigEntries.async_update_entry", + return_value=True, + ) as mock_update_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert len(mock_update_entry.mock_calls) > 0 + + +async def test_form_reauth_migrate(hass: HomeAssistant) -> None: + """Test reauth flow for v1 with single account.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + title="123 Fake St", + unique_id="test-username", + version=1, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + ): + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert entry.version == 2 + assert entry.unique_id == "1234" + assert entry.data == CONF + + +async def test_form_reauth_migrate_multi_account(hass: HomeAssistant) -> None: + """Test the form when multiple accounts are available.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + title="123 Fake St", + unique_id="test-username", + version=1, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + ): + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "select_account" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"account_id": "5678"}, + ) + + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + + assert entry.version == 2 + assert entry.unique_id == "5678" + assert entry.data == { + **CONF, + CONF_ACCOUNT_ID: "5678", + CONF_SUPPLY_NODE_REF: "456", + } + + +async def test_form_duplicate_account(hass: HomeAssistant) -> None: + """Test uniqueness for account_id.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={**CONF, CONF_ACCOUNT_ID: "1234", CONF_SUPPLY_NODE_REF: "123"}, + title="123 Fake St", + unique_id="1234", + version=2, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + } + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), ): result = await _flow_submit(hass) @@ -109,3 +396,280 @@ async def test_form_generic_exception(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "unknown"} + + +async def test_form_select_account_cannot_connect(hass: HomeAssistant) -> None: + """Test we handle connection errors for select account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + side_effect=APIException, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "select_account" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {"account_id": "5678"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "select_account" + assert result3["errors"] == {"base": "cannot_connect"} + + +async def test_form_select_account_invalid_auth(hass: HomeAssistant) -> None: + """Test we handle auth errors for select account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + side_effect=AuthException, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "select_account" + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + side_effect=AuthException, + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + side_effect=AuthException, + ), + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {"account_id": "5678"}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "no_permissions" + + +async def test_form_select_account_failed_to_connect(hass: HomeAssistant) -> None: + """Test we handle connection errors for select account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + side_effect=AuthException, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "select_account" + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + side_effect=APIException, + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + side_effect=APIException, + ), + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {"account_id": "5678"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["errors"] == {"base": "cannot_connect"} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + patch( + "homeassistant.components.flick_electric.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {"account_id": "5678"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "456 Fake St" + assert result4["data"] == { + **CONF, + CONF_SUPPLY_NODE_REF: "456", + CONF_ACCOUNT_ID: "5678", + } + assert result4["result"].unique_id == "5678" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_select_account_no_accounts(hass: HomeAssistant) -> None: + """Test we handle connection errors for select account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.config_flow.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "closed", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + ], + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "no_accounts" diff --git a/tests/components/flick_electric/test_init.py b/tests/components/flick_electric/test_init.py new file mode 100644 index 00000000000..e022b6e03bc --- /dev/null +++ b/tests/components/flick_electric/test_init.py @@ -0,0 +1,135 @@ +"""Test the Flick Electric config flow.""" + +from unittest.mock import patch + +from pyflick.authentication import AuthException + +from homeassistant.components.flick_electric.const import CONF_ACCOUNT_ID, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from . import CONF, _mock_flick_price + +from tests.common import MockConfigEntry + + +async def test_init_auth_failure_triggers_auth(hass: HomeAssistant) -> None: + """Test reauth flow is triggered when username/password is wrong.""" + with ( + patch( + "homeassistant.components.flick_electric.HassFlickAuth.async_get_access_token", + side_effect=AuthException, + ), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={**CONF}, + title="123 Fake St", + unique_id="1234", + version=2, + ) + entry.add_to_hass(hass) + + # Ensure setup fails + assert not await hass.config_entries.async_setup(entry.entry_id) + assert entry.state is ConfigEntryState.SETUP_ERROR + + # Ensure reauth flow is triggered + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 1 + + +async def test_init_migration_single_account(hass: HomeAssistant) -> None: + """Test migration with single account.""" + with ( + patch( + "homeassistant.components.flick_electric.HassFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + } + ], + ), + patch( + "homeassistant.components.flick_electric.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: CONF[CONF_USERNAME], + CONF_PASSWORD: CONF[CONF_PASSWORD], + }, + title=CONF_USERNAME, + unique_id=CONF_USERNAME, + version=1, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 0 + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.unique_id == CONF[CONF_ACCOUNT_ID] + assert entry.data == CONF + + +async def test_init_migration_multi_account_reauth(hass: HomeAssistant) -> None: + """Test migration triggers reauth with multiple accounts.""" + with ( + patch( + "homeassistant.components.flick_electric.HassFlickAuth.async_get_access_token", + return_value="123456789abcdef", + ), + patch( + "homeassistant.components.flick_electric.FlickAPI.getCustomerAccounts", + return_value=[ + { + "id": "1234", + "status": "active", + "address": "123 Fake St", + "main_consumer": {"supply_node_ref": "123"}, + }, + { + "id": "5678", + "status": "active", + "address": "456 Fake St", + "main_consumer": {"supply_node_ref": "456"}, + }, + ], + ), + patch( + "homeassistant.components.flick_electric.FlickAPI.getPricing", + return_value=_mock_flick_price(), + ), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: CONF[CONF_USERNAME], + CONF_PASSWORD: CONF[CONF_PASSWORD], + }, + title=CONF_USERNAME, + unique_id=CONF_USERNAME, + version=1, + ) + entry.add_to_hass(hass) + + # ensure setup fails + assert not await hass.config_entries.async_setup(entry.entry_id) + assert entry.state is ConfigEntryState.MIGRATION_ERROR + await hass.async_block_till_done() + + # Ensure reauth flow is triggered + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 1 From e303a9a2b58a038e69212805ac9413e493f33711 Mon Sep 17 00:00:00 2001 From: Dave T <17680170+davet2001@users.noreply.github.com> Date: Mon, 30 Dec 2024 23:46:42 +0000 Subject: [PATCH 1139/1198] Add stream preview to options flow in generic camera (#133927) * Add stream preview to options flow * Increase test coverage * Code review: use correct flow handler type in cast * Restore test coverage to 100% * Remove error and test that can't be triggered yet --- .../components/generic/config_flow.py | 113 ++++++++++-------- homeassistant/components/generic/strings.json | 8 +- tests/components/generic/test_config_flow.py | 98 ++++++++++++--- 3 files changed, 148 insertions(+), 71 deletions(-) diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index 83894b489f0..4b0717815c5 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -349,7 +349,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the start of the config flow.""" errors = {} - description_placeholders = {} hass = self.hass if user_input: # Secondary validation because serialised vol can't seem to handle this complexity: @@ -365,8 +364,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): ) except InvalidStreamException as err: errors[CONF_STREAM_SOURCE] = str(err) - if err.details: - errors["error_details"] = err.details self.preview_stream = None if not errors: user_input[CONF_CONTENT_TYPE] = still_format @@ -385,8 +382,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): # temporary preview for user to check the image self.preview_cam = user_input return await self.async_step_user_confirm() - if "error_details" in errors: - description_placeholders["error"] = errors.pop("error_details") elif self.user_input: user_input = self.user_input else: @@ -394,7 +389,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=build_schema(user_input), - description_placeholders=description_placeholders, errors=errors, ) @@ -412,7 +406,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): title=self.title, data={}, options=self.user_input ) register_preview(self.hass) - preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}" return self.async_show_form( step_id="user_confirm", data_schema=vol.Schema( @@ -420,7 +413,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required(CONF_CONFIRMED_OK, default=False): bool, } ), - description_placeholders={"preview_url": preview_url}, errors=None, preview="generic_camera", ) @@ -437,6 +429,7 @@ class GenericOptionsFlowHandler(OptionsFlow): def __init__(self) -> None: """Initialize Generic IP Camera options flow.""" self.preview_cam: dict[str, Any] = {} + self.preview_stream: Stream | None = None self.user_input: dict[str, Any] = {} async def async_step_init( @@ -444,42 +437,45 @@ class GenericOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage Generic IP Camera options.""" errors: dict[str, str] = {} - description_placeholders = {} hass = self.hass - if user_input is not None: - errors, still_format = await async_test_still( - hass, self.config_entry.options | user_input - ) - try: - await async_test_and_preview_stream(hass, user_input) - except InvalidStreamException as err: - errors[CONF_STREAM_SOURCE] = str(err) - if err.details: - errors["error_details"] = err.details - # Stream preview during options flow not yet implemented - - still_url = user_input.get(CONF_STILL_IMAGE_URL) - if not errors: - if still_url is None: - # If user didn't specify a still image URL, - # The automatically generated still image that stream generates - # is always jpeg - still_format = "image/jpeg" - data = { - CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get( - CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False - ), - **user_input, - CONF_CONTENT_TYPE: still_format - or self.config_entry.options.get(CONF_CONTENT_TYPE), - } - self.user_input = data - # temporary preview for user to check the image - self.preview_cam = data - return await self.async_step_confirm_still() - if "error_details" in errors: - description_placeholders["error"] = errors.pop("error_details") + if user_input: + # Secondary validation because serialised vol can't seem to handle this complexity: + if not user_input.get(CONF_STILL_IMAGE_URL) and not user_input.get( + CONF_STREAM_SOURCE + ): + errors["base"] = "no_still_image_or_stream_url" + else: + errors, still_format = await async_test_still(hass, user_input) + try: + self.preview_stream = await async_test_and_preview_stream( + hass, user_input + ) + except InvalidStreamException as err: + errors[CONF_STREAM_SOURCE] = str(err) + self.preview_stream = None + if not errors: + user_input[CONF_CONTENT_TYPE] = still_format + still_url = user_input.get(CONF_STILL_IMAGE_URL) + if still_url is None: + # If user didn't specify a still image URL, + # The automatically generated still image that stream generates + # is always jpeg + still_format = "image/jpeg" + data = { + CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get( + CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False + ), + **user_input, + CONF_CONTENT_TYPE: still_format + or self.config_entry.options.get(CONF_CONTENT_TYPE), + } + self.user_input = data + # temporary preview for user to check the image + self.preview_cam = data + return await self.async_step_user_confirm() + elif self.user_input: + user_input = self.user_input return self.async_show_form( step_id="init", data_schema=build_schema( @@ -487,15 +483,17 @@ class GenericOptionsFlowHandler(OptionsFlow): True, self.show_advanced_options, ), - description_placeholders=description_placeholders, errors=errors, ) - async def async_step_confirm_still( + async def async_step_user_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle user clicking confirm after still preview.""" if user_input: + if ha_stream := self.preview_stream: + # Kill off the temp stream we created. + await ha_stream.stop() if not user_input.get(CONF_CONFIRMED_OK): return await self.async_step_init() return self.async_create_entry( @@ -503,18 +501,22 @@ class GenericOptionsFlowHandler(OptionsFlow): data=self.user_input, ) register_preview(self.hass) - preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}" return self.async_show_form( - step_id="confirm_still", + step_id="user_confirm", data_schema=vol.Schema( { vol.Required(CONF_CONFIRMED_OK, default=False): bool, } ), - description_placeholders={"preview_url": preview_url}, errors=None, + preview="generic_camera", ) + @staticmethod + async def async_setup_preview(hass: HomeAssistant) -> None: + """Set up preview WS API.""" + websocket_api.async_register_command(hass, ws_start_preview) + class CameraImagePreview(HomeAssistantView): """Camera view to temporarily serve an image.""" @@ -556,7 +558,7 @@ class CameraImagePreview(HomeAssistantView): { vol.Required("type"): "generic_camera/start_preview", vol.Required("flow_id"): str, - vol.Optional("flow_type"): vol.Any("config_flow"), + vol.Optional("flow_type"): vol.Any("config_flow", "options_flow"), vol.Optional("user_input"): dict, } ) @@ -570,10 +572,17 @@ async def ws_start_preview( _LOGGER.debug("Generating websocket handler for generic camera preview") flow_id = msg["flow_id"] - flow = cast( - GenericIPCamConfigFlow, - hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001 - ) + flow: GenericIPCamConfigFlow | GenericOptionsFlowHandler + if msg.get("flow_type", "config_flow") == "config_flow": + flow = cast( + GenericIPCamConfigFlow, + hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001 + ) + else: # (flow type == "options flow") + flow = cast( + GenericOptionsFlowHandler, + hass.config_entries.options._progress.get(flow_id), # noqa: SLF001 + ) user_input = flow.preview_cam # Create an EntityPlatform, needed for name translations diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json index 45841e6255f..854ceb93b3e 100644 --- a/homeassistant/components/generic/strings.json +++ b/homeassistant/components/generic/strings.json @@ -67,11 +67,11 @@ "use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras" } }, - "confirm_still": { - "title": "Preview", - "description": "![Camera Still Image Preview]({preview_url})", + "user_confirm": { + "title": "Confirmation", + "description": "Please wait for previews to load...", "data": { - "confirmed_ok": "This image looks good." + "confirmed_ok": "Everything looks good." } } }, diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index f121b210c0c..4892496c486 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -93,12 +93,6 @@ async def test_form( ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm" - client = await hass_client() - preview_url = result1["description_placeholders"]["preview_url"] - # Check the preview image works. - resp = await client.get(preview_url) - assert resp.status == HTTPStatus.OK - assert await resp.read() == fakeimgbytes_png # HA should now be serving a WS connection for a preview stream. ws_client = await hass_ws_client() @@ -109,7 +103,14 @@ async def test_form( "flow_id": flow_id, }, ) - _ = await ws_client.receive_json() + json = await ws_client.receive_json() + + client = await hass_client() + still_preview_url = json["event"]["attributes"]["still_url"] + # Check the preview image works. + resp = await client.get(still_preview_url) + assert resp.status == HTTPStatus.OK + assert await resp.read() == fakeimgbytes_png result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], @@ -129,7 +130,7 @@ async def test_form( } # Check that the preview image is disabled after. - resp = await client.get(preview_url) + resp = await client.get(still_preview_url) assert resp.status == HTTPStatus.NOT_FOUND assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -207,6 +208,7 @@ async def test_form_still_preview_cam_off( mock_create_stream: _patch[MagicMock], user_flow: ConfigFlowResult, hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, ) -> None: """Test camera errors are triggered during preview.""" with ( @@ -222,10 +224,23 @@ async def test_form_still_preview_cam_off( ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm" - preview_url = result1["description_placeholders"]["preview_url"] + + # HA should now be serving a WS connection for a preview stream. + ws_client = await hass_ws_client() + flow_id = user_flow["flow_id"] + await ws_client.send_json_auto_id( + { + "type": "generic_camera/start_preview", + "flow_id": flow_id, + }, + ) + json = await ws_client.receive_json() + + client = await hass_client() + still_preview_url = json["event"]["attributes"]["still_url"] # Try to view the image, should be unavailable. client = await hass_client() - resp = await client.get(preview_url) + resp = await client.get(still_preview_url) assert resp.status == HTTPStatus.SERVICE_UNAVAILABLE @@ -706,7 +721,7 @@ async def test_form_no_route_to_host( async def test_form_stream_io_error( hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: - """Test we handle no io error when setting up stream.""" + """Test we handle an io error when setting up stream.""" with patch( "homeassistant.components.generic.config_flow.create_stream", side_effect=OSError(errno.EIO, "Input/output error"), @@ -799,7 +814,7 @@ async def test_options_template_error( user_input=data, ) assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "confirm_still" + assert result2["step_id"] == "user_confirm" result2a = await hass.config_entries.options.async_configure( result2["flow_id"], user_input={CONF_CONFIRMED_OK: True} @@ -894,7 +909,7 @@ async def test_options_only_stream( user_input=data, ) assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "confirm_still" + assert result2["step_id"] == "user_confirm" result3 = await hass.config_entries.options.async_configure( result2["flow_id"], user_input={CONF_CONFIRMED_OK: True} @@ -903,6 +918,35 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" +async def test_options_still_and_stream_not_provided( + hass: HomeAssistant, +) -> None: + """Test we show a suitable error if neither still or stream URL are provided.""" + data = TESTDATA.copy() + + mock_entry = MockConfigEntry( + title="Test Camera", + domain=DOMAIN, + data={}, + options=data, + ) + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + data.pop(CONF_STILL_IMAGE_URL) + data.pop(CONF_STREAM_SOURCE) + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=data, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "no_still_image_or_stream_url"} + + @respx.mock @pytest.mark.usefixtures("fakeimg_png") async def test_form_options_stream_worker_error( @@ -997,10 +1041,15 @@ async def test_migrate_existing_ids( @respx.mock @pytest.mark.usefixtures("fakeimg_png") async def test_use_wallclock_as_timestamps_option( - hass: HomeAssistant, mock_create_stream: _patch[MagicMock] + hass: HomeAssistant, + mock_create_stream: _patch[MagicMock], + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + fakeimgbytes_png: bytes, ) -> None: """Test the use_wallclock_as_timestamps option flow.""" + respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) mock_entry = MockConfigEntry( title="Test Camera", domain=DOMAIN, @@ -1026,6 +1075,25 @@ async def test_use_wallclock_as_timestamps_option( user_input={CONF_USE_WALLCLOCK_AS_TIMESTAMPS: True, **TESTDATA}, ) assert result2["type"] is FlowResultType.FORM + + ws_client = await hass_ws_client() + flow_id = result2["flow_id"] + await ws_client.send_json_auto_id( + { + "type": "generic_camera/start_preview", + "flow_id": flow_id, + "flow_type": "options_flow", + }, + ) + json = await ws_client.receive_json() + + client = await hass_client() + still_preview_url = json["event"]["attributes"]["still_url"] + # Check the preview image works. + resp = await client.get(still_preview_url) + assert resp.status == HTTPStatus.OK + assert await resp.read() == fakeimgbytes_png + # Test what happens if user rejects the preview result3 = await hass.config_entries.options.async_configure( result2["flow_id"], user_input={CONF_CONFIRMED_OK: False} @@ -1041,7 +1109,7 @@ async def test_use_wallclock_as_timestamps_option( user_input={CONF_USE_WALLCLOCK_AS_TIMESTAMPS: True, **TESTDATA}, ) assert result4["type"] is FlowResultType.FORM - assert result4["step_id"] == "confirm_still" + assert result4["step_id"] == "user_confirm" result5 = await hass.config_entries.options.async_configure( result4["flow_id"], user_input={CONF_CONFIRMED_OK: True}, From 229c32b0daaee63a397a239062ba40fe99fc46e2 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Sun, 29 Dec 2024 11:30:52 -0500 Subject: [PATCH 1140/1198] Bump aiocomelit to 0.10.1 (#134214) --- homeassistant/components/comelit/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index d7417ad4aad..238dede8546 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "requirements": ["aiocomelit==0.9.1"] + "requirements": ["aiocomelit==0.10.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 438690ac560..6232a47865f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -213,7 +213,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.1 +aiocomelit==0.10.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ebf6ac82782..72ddec608a4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -201,7 +201,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.1 +aiocomelit==0.10.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 From c908f823c51fcb48ccf65b5e11823ec525f6c755 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Mon, 30 Dec 2024 16:21:18 +1000 Subject: [PATCH 1141/1198] Handle missing application credentials in Tesla Fleet (#134237) * Handle missing application credentials * Add tests * Test reauth starts * Only catch ValueError --- .../components/tesla_fleet/__init__.py | 10 +++++++++- tests/components/tesla_fleet/conftest.py | 12 ++++++++++-- tests/components/tesla_fleet/test_init.py | 18 ++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index bc837aa4cac..ff50a99748e 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -64,6 +64,15 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -> bool: """Set up TeslaFleet config.""" + try: + implementation = await async_get_config_entry_implementation(hass, entry) + except ValueError as e: + # Remove invalid implementation from config entry then raise AuthFailed + hass.config_entries.async_update_entry( + entry, data={"auth_implementation": None} + ) + raise ConfigEntryAuthFailed from e + access_token = entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN] session = async_get_clientsession(hass) @@ -71,7 +80,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - scopes: list[Scope] = [Scope(s) for s in token["scp"]] region: str = token["ou_code"].lower() - implementation = await async_get_config_entry_implementation(hass, entry) oauth_session = OAuth2Session(hass, entry, implementation) refresh_lock = asyncio.Lock() diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 0dc5d87984f..2396e2a88f3 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -33,7 +33,9 @@ def mock_expires_at() -> int: return time.time() + 3600 -def create_config_entry(expires_at: int, scopes: list[Scope]) -> MockConfigEntry: +def create_config_entry( + expires_at: int, scopes: list[Scope], implementation: str = DOMAIN +) -> MockConfigEntry: """Create Tesla Fleet entry in Home Assistant.""" access_token = jwt.encode( { @@ -51,7 +53,7 @@ def create_config_entry(expires_at: int, scopes: list[Scope]) -> MockConfigEntry title=UID, unique_id=UID, data={ - "auth_implementation": DOMAIN, + "auth_implementation": implementation, "token": { "status": 0, "userid": UID, @@ -90,6 +92,12 @@ def readonly_config_entry(expires_at: int) -> MockConfigEntry: ) +@pytest.fixture +def bad_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant.""" + return create_config_entry(expires_at, SCOPES, "bad") + + @pytest.fixture(autouse=True) def mock_products() -> Generator[AsyncMock]: """Mock Tesla Fleet Api products method.""" diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index 7c17f986663..7e97096e4e8 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -30,6 +30,7 @@ from homeassistant.components.tesla_fleet.coordinator import ( from homeassistant.components.tesla_fleet.models import TeslaFleetData from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr from . import setup_platform @@ -424,3 +425,20 @@ async def test_signing( ) as mock_get_private_key: await setup_platform(hass, normal_config_entry) mock_get_private_key.assert_called_once() + + +async def test_bad_implementation( + hass: HomeAssistant, + bad_config_entry: MockConfigEntry, +) -> None: + """Test handling of a bad authentication implementation.""" + + await setup_platform(hass, bad_config_entry) + assert bad_config_entry.state is ConfigEntryState.SETUP_ERROR + + # Ensure reauth flow starts + assert any(bad_config_entry.async_get_active_flows(hass, {"reauth"})) + result = await bad_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert not result["errors"] From b89995a79fecb4dcc13d3db1cd5ff7584fd0b69e Mon Sep 17 00:00:00 2001 From: tronikos Date: Tue, 31 Dec 2024 12:52:29 -0800 Subject: [PATCH 1142/1198] Allow automations to pass any conversation_id for Google Generative AI (#134251) --- .../google_generative_ai_conversation/conversation.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 0d24ddbf39f..dad9c8a1920 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -204,9 +204,7 @@ class GoogleGenerativeAIConversationEntity( """Process a sentence.""" result = conversation.ConversationResult( response=intent.IntentResponse(language=user_input.language), - conversation_id=user_input.conversation_id - if user_input.conversation_id in self.history - else ulid.ulid_now(), + conversation_id=user_input.conversation_id or ulid.ulid_now(), ) assert result.conversation_id From a36fd0964453ebf24bd3d189663f24bda19e3d17 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 31 Dec 2024 15:01:06 +0100 Subject: [PATCH 1143/1198] Set backup manager state to completed when restore is finished (#134283) --- homeassistant/components/backup/manager.py | 3 +++ tests/components/hassio/test_backup.py | 11 ++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 9b20c82d709..9515ab89cd2 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -829,6 +829,9 @@ class BackupManager: restore_folders=restore_folders, restore_homeassistant=restore_homeassistant, ) + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED) + ) except Exception: self.async_on_backup_event( RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED) diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index c39574fd941..3c9440c41ff 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -942,7 +942,9 @@ async def test_reader_writer_restore( await client.send_json_auto_id({"type": "backup/subscribe_events"}) response = await client.receive_json() - assert response["event"] == {"manager_state": "idle"} + assert response["event"] == { + "manager_state": "idle", + } response = await client.receive_json() assert response["success"] @@ -980,6 +982,13 @@ async def test_reader_writer_restore( response = await client.receive_json() assert response["success"] + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "completed", + } + response = await client.receive_json() assert response["event"] == {"manager_state": "idle"} From c2f06fbd4775568fc92fa79bcd120786fb27f238 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Tue, 31 Dec 2024 10:31:40 +0100 Subject: [PATCH 1144/1198] Bump reolink-aio to 0.11.6 (#134286) --- homeassistant/components/reolink/camera.py | 2 +- homeassistant/components/reolink/manifest.json | 2 +- homeassistant/components/reolink/media_source.py | 2 ++ homeassistant/components/reolink/strings.json | 1 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/reolink/test_media_source.py | 8 +++++++- 7 files changed, 14 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index d9b3cb67f70..a597be3ec7a 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -100,7 +100,7 @@ async def async_setup_entry( if not entity_description.supported(reolink_data.host.api, channel): continue stream_url = await reolink_data.host.api.get_stream_source( - channel, entity_description.stream + channel, entity_description.stream, False ) if stream_url is None and "snapshots" not in entity_description.stream: continue diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index e5e8afc1d63..7d01ca808e1 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.11.5"] + "requirements": ["reolink-aio==0.11.6"] } diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 0c23bed7e2f..538a06a08f8 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -81,6 +81,8 @@ class ReolinkVODMediaSource(MediaSource): def get_vod_type() -> VodRequestType: if filename.endswith(".mp4"): + if host.api.is_nvr: + return VodRequestType.DOWNLOAD return VodRequestType.PLAYBACK if host.api.is_nvr: return VodRequestType.FLV diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 283c1d42e89..50163fa1aca 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -567,6 +567,7 @@ "stayoff": "Stay off", "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]", "alwaysonatnight": "Auto & always on at night", + "always": "Always on", "alwayson": "Always on" } }, diff --git a/requirements_all.txt b/requirements_all.txt index 6232a47865f..209c4740202 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2572,7 +2572,7 @@ renault-api==0.2.8 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.11.5 +reolink-aio==0.11.6 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 72ddec608a4..b714bed884d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2072,7 +2072,7 @@ renault-api==0.2.8 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.11.5 +reolink-aio==0.11.6 # homeassistant.components.rflink rflink==0.0.66 diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 32afd1f73ca..9c5be08e9b6 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -109,11 +109,17 @@ async def test_resolve( ) assert play_media.mime_type == TEST_MIME_TYPE_MP4 + reolink_connect.is_nvr = False + + play_media = await async_resolve_media( + hass, f"{URI_SCHEME}{DOMAIN}/{file_id}", None + ) + assert play_media.mime_type == TEST_MIME_TYPE_MP4 + file_id = ( f"FILE|{config_entry.entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" ) reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE, TEST_URL) - reolink_connect.is_nvr = False play_media = await async_resolve_media( hass, f"{URI_SCHEME}{DOMAIN}/{file_id}", None From 1064ef9dc61be0c4369a2872b1127833674554fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niels=20M=C3=BCndler?= Date: Tue, 31 Dec 2024 23:03:35 +0100 Subject: [PATCH 1145/1198] Bump pysynthru version to 0.8.0 (#134294) --- homeassistant/components/syncthru/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/syncthru/manifest.json b/homeassistant/components/syncthru/manifest.json index a93e02a51c7..461ce9bfd3a 100644 --- a/homeassistant/components/syncthru/manifest.json +++ b/homeassistant/components/syncthru/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/syncthru", "iot_class": "local_polling", "loggers": ["pysyncthru"], - "requirements": ["PySyncThru==0.7.10", "url-normalize==1.4.3"], + "requirements": ["PySyncThru==0.8.0", "url-normalize==1.4.3"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:Printer:1", diff --git a/requirements_all.txt b/requirements_all.txt index 209c4740202..65a6986b9ef 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -90,7 +90,7 @@ PySwitchbot==0.55.4 PySwitchmate==0.5.1 # homeassistant.components.syncthru -PySyncThru==0.7.10 +PySyncThru==0.8.0 # homeassistant.components.transport_nsw PyTransportNSW==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b714bed884d..e09e2c51379 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -84,7 +84,7 @@ PyRMVtransport==0.3.3 PySwitchbot==0.55.4 # homeassistant.components.syncthru -PySyncThru==0.7.10 +PySyncThru==0.8.0 # homeassistant.components.transport_nsw PyTransportNSW==0.1.1 From a7995e00938c799df83bcdadf4a84df09dfd5be6 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Tue, 31 Dec 2024 11:16:12 -0500 Subject: [PATCH 1146/1198] Bump aioshelly to 12.2.0 (#134352) --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 3489a2d06d9..29c8fd4c369 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioshelly"], - "requirements": ["aioshelly==12.1.0"], + "requirements": ["aioshelly==12.2.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 65a6986b9ef..bfd0d8320e8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -365,7 +365,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.1.0 +aioshelly==12.2.0 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e09e2c51379..0ea7592e3b1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -347,7 +347,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.1.0 +aioshelly==12.2.0 # homeassistant.components.skybell aioskybell==22.7.0 From 952363eca30493121eb43daef71b70705175a50f Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 31 Dec 2024 14:52:15 -0600 Subject: [PATCH 1147/1198] Bump hassil to 2.1.0 (#134359) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index a2ddd5f734c..4017ed82be1 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"] + "requirements": ["hassil==2.1.0", "home-assistant-intents==2024.12.20"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d1ccc31a0ed..46cd4485188 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -33,7 +33,7 @@ go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 habluetooth==3.6.0 hass-nabucasa==0.87.0 -hassil==2.0.5 +hassil==2.1.0 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241230.0 home-assistant-intents==2024.12.20 diff --git a/requirements_all.txt b/requirements_all.txt index bfd0d8320e8..9c93955e03a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1100,7 +1100,7 @@ hass-nabucasa==0.87.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.5 +hassil==2.1.0 # homeassistant.components.jewish_calendar hdate==0.11.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0ea7592e3b1..827eb5d3713 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -938,7 +938,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 # homeassistant.components.conversation -hassil==2.0.5 +hassil==2.1.0 # homeassistant.components.jewish_calendar hdate==0.11.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index bd2c9d328ac..52948484ed8 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " From f709989717e4062ff19e9c313776fd715773fc37 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 31 Dec 2024 13:04:41 -0600 Subject: [PATCH 1148/1198] Revert speech seconds to 0.3 (#134360) --- homeassistant/components/assist_pipeline/vad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index c7fe1bc10c7..d4647fafe2a 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -75,7 +75,7 @@ class AudioBuffer: class VoiceCommandSegmenter: """Segments an audio stream into voice commands.""" - speech_seconds: float = 0.1 + speech_seconds: float = 0.3 """Seconds of speech before voice command has started.""" command_seconds: float = 1.0 From 0ae4a9a9111590ba187f46215ebbacc1410a77c0 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Tue, 31 Dec 2024 23:04:28 +0100 Subject: [PATCH 1149/1198] Update frontend to 20241231.0 (#134363) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 01fe363d69e..d1bb15b5d3b 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241230.0"] + "requirements": ["home-assistant-frontend==20241231.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 46cd4485188..c97dbe11d29 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.1.0 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241230.0 +home-assistant-frontend==20241231.0 home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 9c93955e03a..b8ec2a85be8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241230.0 +home-assistant-frontend==20241231.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 827eb5d3713..f9019326d89 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241230.0 +home-assistant-frontend==20241231.0 # homeassistant.components.conversation home-assistant-intents==2024.12.20 From ab6394b26ca82c524fa0eb1c75947e3d68b4c72c Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 31 Dec 2024 22:49:29 +0100 Subject: [PATCH 1150/1198] Bump pylamarzocco to 1.4.6 (#134367) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 6b586a5cfb8..afd367b0f6e 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.5"] + "requirements": ["pylamarzocco==1.4.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index b8ec2a85be8..e57074933c0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2043,7 +2043,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.5 +pylamarzocco==1.4.6 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f9019326d89..223502ece25 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1657,7 +1657,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.5 +pylamarzocco==1.4.6 # homeassistant.components.lastfm pylast==5.1.0 From 2e21ac700111fb44eb88081dbc5c7a61ea584787 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Tue, 31 Dec 2024 22:10:20 +0000 Subject: [PATCH 1151/1198] Bump version to 2025.1.0b5 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index e45608ce9bb..d44095629f0 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b4" +PATCH_VERSION: Final = "0b5" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 6219a7cee8d..a461427b070 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b4" +version = "2025.1.0b5" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From bd5477729a6d067d447921ffc8383bf2ebd405a2 Mon Sep 17 00:00:00 2001 From: Craig Andrews Date: Thu, 2 Jan 2025 11:21:49 -0500 Subject: [PATCH 1152/1198] Improve is docker env checks (#132404) Co-authored-by: Franck Nijhof Co-authored-by: Sander Hoentjen Co-authored-by: Paulus Schoutsen Co-authored-by: Robert Resch --- homeassistant/bootstrap.py | 3 +- homeassistant/components/ffmpeg/__init__.py | 2 +- homeassistant/helpers/system_info.py | 8 +--- homeassistant/util/package.py | 11 +++++- homeassistant/util/system_info.py | 12 ++++++ tests/helpers/test_system_info.py | 12 +----- tests/util/test_package.py | 44 +++++++++++++++++++++ tests/util/test_system_info.py | 15 +++++++ 8 files changed, 85 insertions(+), 22 deletions(-) create mode 100644 homeassistant/util/system_info.py create mode 100644 tests/util/test_system_info.py diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 78c7d91fae0..f1f1835863b 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -89,7 +89,7 @@ from .helpers import ( ) from .helpers.dispatcher import async_dispatcher_send_internal from .helpers.storage import get_internal_store_manager -from .helpers.system_info import async_get_system_info, is_official_image +from .helpers.system_info import async_get_system_info from .helpers.typing import ConfigType from .setup import ( # _setup_started is marked as protected to make it clear @@ -106,6 +106,7 @@ from .util.async_ import create_eager_task from .util.hass_dict import HassKey from .util.logging import async_activate_log_queue_handler from .util.package import async_get_user_site, is_docker_env, is_virtual_env +from .util.system_info import is_official_image with contextlib.suppress(ImportError): # Ensure anyio backend is imported to avoid it being imported in the event loop diff --git a/homeassistant/components/ffmpeg/__init__.py b/homeassistant/components/ffmpeg/__init__.py index 9a88317027e..99803e9636c 100644 --- a/homeassistant/components/ffmpeg/__init__.py +++ b/homeassistant/components/ffmpeg/__init__.py @@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.entity import Entity -from homeassistant.helpers.system_info import is_official_image from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.signal_type import SignalType +from homeassistant.util.system_info import is_official_image DOMAIN = "ffmpeg" diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index 53866428332..df9679dcb08 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -5,7 +5,6 @@ from __future__ import annotations from functools import cache from getpass import getuser import logging -import os import platform from typing import TYPE_CHECKING, Any @@ -13,6 +12,7 @@ from homeassistant.const import __version__ as current_version from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from homeassistant.util.package import is_docker_env, is_virtual_env +from homeassistant.util.system_info import is_official_image from .hassio import is_hassio from .importlib import async_import_module @@ -23,12 +23,6 @@ _LOGGER = logging.getLogger(__name__) _DATA_MAC_VER = "system_info_mac_ver" -@cache -def is_official_image() -> bool: - """Return True if Home Assistant is running in an official container.""" - return os.path.isfile("/OFFICIAL_IMAGE") - - @singleton(_DATA_MAC_VER) async def async_get_mac_ver(hass: HomeAssistant) -> str: """Return the macOS version.""" diff --git a/homeassistant/util/package.py b/homeassistant/util/package.py index da0666290a1..9720bbd4ca3 100644 --- a/homeassistant/util/package.py +++ b/homeassistant/util/package.py @@ -15,6 +15,8 @@ from urllib.parse import urlparse from packaging.requirements import InvalidRequirement, Requirement +from .system_info import is_official_image + _LOGGER = logging.getLogger(__name__) @@ -28,8 +30,13 @@ def is_virtual_env() -> bool: @cache def is_docker_env() -> bool: - """Return True if we run in a docker env.""" - return Path("/.dockerenv").exists() + """Return True if we run in a container env.""" + return ( + Path("/.dockerenv").exists() + or Path("/run/.containerenv").exists() + or "KUBERNETES_SERVICE_HOST" in os.environ + or is_official_image() + ) def get_installed_versions(specifiers: set[str]) -> set[str]: diff --git a/homeassistant/util/system_info.py b/homeassistant/util/system_info.py new file mode 100644 index 00000000000..80621bd16a5 --- /dev/null +++ b/homeassistant/util/system_info.py @@ -0,0 +1,12 @@ +"""Util to gather system info.""" + +from __future__ import annotations + +from functools import cache +import os + + +@cache +def is_official_image() -> bool: + """Return True if Home Assistant is running in an official container.""" + return os.path.isfile("/OFFICIAL_IMAGE") diff --git a/tests/helpers/test_system_info.py b/tests/helpers/test_system_info.py index 2c4b95302fc..ad140834199 100644 --- a/tests/helpers/test_system_info.py +++ b/tests/helpers/test_system_info.py @@ -9,17 +9,7 @@ import pytest from homeassistant.components import hassio from homeassistant.const import __version__ as current_version from homeassistant.core import HomeAssistant -from homeassistant.helpers.system_info import async_get_system_info, is_official_image - - -async def test_is_official_image() -> None: - """Test is_official_image.""" - is_official_image.cache_clear() - with patch("homeassistant.helpers.system_info.os.path.isfile", return_value=True): - assert is_official_image() is True - is_official_image.cache_clear() - with patch("homeassistant.helpers.system_info.os.path.isfile", return_value=False): - assert is_official_image() is False +from homeassistant.helpers.system_info import async_get_system_info async def test_get_system_info(hass: HomeAssistant) -> None: diff --git a/tests/util/test_package.py b/tests/util/test_package.py index b7497d620cd..e3635dd2bea 100644 --- a/tests/util/test_package.py +++ b/tests/util/test_package.py @@ -410,3 +410,47 @@ def test_check_package_previous_failed_install() -> None: with patch("homeassistant.util.package.version", return_value=None): assert not package.is_installed(installed_package) assert not package.is_installed(f"{installed_package}=={installed_version}") + + +@pytest.mark.parametrize("dockerenv", [True, False], ids=["dockerenv", "not_dockerenv"]) +@pytest.mark.parametrize( + "containerenv", [True, False], ids=["containerenv", "not_containerenv"] +) +@pytest.mark.parametrize( + "kubernetes_service_host", [True, False], ids=["kubernetes", "not_kubernetes"] +) +@pytest.mark.parametrize( + "is_official_image", [True, False], ids=["official_image", "not_official_image"] +) +async def test_is_docker_env( + dockerenv: bool, + containerenv: bool, + kubernetes_service_host: bool, + is_official_image: bool, +) -> None: + """Test is_docker_env.""" + + def new_path_mock(path: str): + mock = Mock() + if path == "/.dockerenv": + mock.exists.return_value = dockerenv + elif path == "/run/.containerenv": + mock.exists.return_value = containerenv + return mock + + env = {} + if kubernetes_service_host: + env["KUBERNETES_SERVICE_HOST"] = "True" + + package.is_docker_env.cache_clear() + with ( + patch("homeassistant.util.package.Path", side_effect=new_path_mock), + patch( + "homeassistant.util.package.is_official_image", + return_value=is_official_image, + ), + patch.dict(os.environ, env), + ): + assert package.is_docker_env() is any( + [dockerenv, containerenv, kubernetes_service_host, is_official_image] + ) diff --git a/tests/util/test_system_info.py b/tests/util/test_system_info.py new file mode 100644 index 00000000000..270e91d37db --- /dev/null +++ b/tests/util/test_system_info.py @@ -0,0 +1,15 @@ +"""Tests for the system info helper.""" + +from unittest.mock import patch + +from homeassistant.util.system_info import is_official_image + + +async def test_is_official_image() -> None: + """Test is_official_image.""" + is_official_image.cache_clear() + with patch("homeassistant.util.system_info.os.path.isfile", return_value=True): + assert is_official_image() is True + is_official_image.cache_clear() + with patch("homeassistant.util.system_info.os.path.isfile", return_value=False): + assert is_official_image() is False From 5895aa4cdea06336b96f093bbe626436a3fad93d Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Thu, 2 Jan 2025 15:45:46 +0100 Subject: [PATCH 1153/1198] Handle backup errors more consistently (#133522) * Add backup manager and read writer errors * Clean up not needed default argument * Clean up todo comment * Trap agent bugs during upload * Always release stream * Clean up leftover * Update test for backup with automatic settings * Fix use of vol.Any * Refactor test helper * Only update successful timestamp if completed event is sent * Always delete surplus copies * Fix after rebase * Fix after rebase * Revert "Fix use of vol.Any" This reverts commit 28fd7a544899bb6ed05f771e9e608bc5b41d2b5e. * Inherit BackupReaderWriterError in IncorrectPasswordError --------- Co-authored-by: Erik Montnemery --- homeassistant/components/backup/__init__.py | 2 + homeassistant/components/backup/config.py | 6 +- homeassistant/components/backup/manager.py | 217 +++++--- homeassistant/components/backup/models.py | 6 + homeassistant/components/hassio/backup.py | 62 ++- tests/components/backup/common.py | 12 + tests/components/backup/test_manager.py | 546 +++++++++++++++++--- tests/components/backup/test_websocket.py | 166 +++++- tests/components/cloud/test_backup.py | 40 +- tests/components/hassio/test_backup.py | 295 ++++++++++- 10 files changed, 1152 insertions(+), 200 deletions(-) diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index ab324a44e3b..7d9979ce9a2 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -21,6 +21,7 @@ from .manager import ( BackupManager, BackupPlatformProtocol, BackupReaderWriter, + BackupReaderWriterError, CoreBackupReaderWriter, CreateBackupEvent, ManagerBackup, @@ -39,6 +40,7 @@ __all__ = [ "BackupAgentPlatformProtocol", "BackupPlatformProtocol", "BackupReaderWriter", + "BackupReaderWriterError", "CreateBackupEvent", "Folder", "LocalBackupAgent", diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index cdecf55848f..d58c7365c8a 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -17,7 +17,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from .const import LOGGER -from .models import Folder +from .models import BackupManagerError, Folder if TYPE_CHECKING: from .manager import BackupManager, ManagerBackup @@ -318,9 +318,9 @@ class BackupSchedule: password=config_data.create_backup.password, with_automatic_settings=True, ) + except BackupManagerError as err: + LOGGER.error("Error creating backup: %s", err) except Exception: # noqa: BLE001 - # another more specific exception will be added - # and handled in the future LOGGER.exception("Unexpected error creating automatic backup") manager.remove_next_backup_event = async_track_point_in_time( diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 9515ab89cd2..8421448f619 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -46,15 +46,11 @@ from .const import ( EXCLUDE_FROM_BACKUP, LOGGER, ) -from .models import AgentBackup, Folder +from .models import AgentBackup, BackupManagerError, Folder from .store import BackupStore from .util import make_backup_dir, read_backup, validate_password -class IncorrectPasswordError(HomeAssistantError): - """Raised when the password is incorrect.""" - - @dataclass(frozen=True, kw_only=True, slots=True) class NewBackup: """New backup class.""" @@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC): """Restore a backup.""" +class BackupReaderWriterError(HomeAssistantError): + """Backup reader/writer error.""" + + +class IncorrectPasswordError(BackupReaderWriterError): + """Raised when the password is incorrect.""" + + class BackupManager: """Define the format that backup managers can have.""" @@ -373,7 +377,9 @@ class BackupManager: ) for result in pre_backup_results: if isinstance(result, Exception): - raise result + raise BackupManagerError( + f"Error during pre-backup: {result}" + ) from result async def async_post_backup_actions(self) -> None: """Perform post backup actions.""" @@ -386,7 +392,9 @@ class BackupManager: ) for result in post_backup_results: if isinstance(result, Exception): - raise result + raise BackupManagerError( + f"Error during post-backup: {result}" + ) from result async def load_platforms(self) -> None: """Load backup platforms.""" @@ -422,11 +430,21 @@ class BackupManager: return_exceptions=True, ) for idx, result in enumerate(sync_backup_results): - if isinstance(result, Exception): + if isinstance(result, BackupReaderWriterError): + # writer errors will affect all agents + # no point in continuing + raise BackupManagerError(str(result)) from result + if isinstance(result, BackupAgentError): agent_errors[agent_ids[idx]] = result - LOGGER.exception( - "Error during backup upload - %s", result, exc_info=result - ) + continue + if isinstance(result, Exception): + # trap bugs from agents + agent_errors[agent_ids[idx]] = result + LOGGER.error("Unexpected error: %s", result, exc_info=result) + continue + if isinstance(result, BaseException): + raise result + return agent_errors async def async_get_backups( @@ -449,7 +467,7 @@ class BackupManager: agent_errors[agent_ids[idx]] = result continue if isinstance(result, BaseException): - raise result + raise result # unexpected error for agent_backup in result: if (backup_id := agent_backup.backup_id) not in backups: if known_backup := self.known_backups.get(backup_id): @@ -499,7 +517,7 @@ class BackupManager: agent_errors[agent_ids[idx]] = result continue if isinstance(result, BaseException): - raise result + raise result # unexpected error if not result: continue if backup is None: @@ -563,7 +581,7 @@ class BackupManager: agent_errors[agent_ids[idx]] = result continue if isinstance(result, BaseException): - raise result + raise result # unexpected error if not agent_errors: self.known_backups.remove(backup_id) @@ -578,7 +596,7 @@ class BackupManager: ) -> None: """Receive and store a backup file from upload.""" if self.state is not BackupManagerState.IDLE: - raise HomeAssistantError(f"Backup manager busy: {self.state}") + raise BackupManagerError(f"Backup manager busy: {self.state}") self.async_on_backup_event( ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS) ) @@ -652,6 +670,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, + raise_task_error=True, with_automatic_settings=with_automatic_settings, ) assert self._backup_finish_task @@ -669,11 +688,12 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, + raise_task_error: bool = False, with_automatic_settings: bool = False, ) -> NewBackup: """Initiate generating a backup.""" if self.state is not BackupManagerState.IDLE: - raise HomeAssistantError(f"Backup manager busy: {self.state}") + raise BackupManagerError(f"Backup manager busy: {self.state}") if with_automatic_settings: self.config.data.last_attempted_automatic_backup = dt_util.now() @@ -692,6 +712,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, + raise_task_error=raise_task_error, with_automatic_settings=with_automatic_settings, ) except Exception: @@ -714,15 +735,18 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, + raise_task_error: bool, with_automatic_settings: bool, ) -> NewBackup: """Initiate generating a backup.""" if not agent_ids: - raise HomeAssistantError("At least one agent must be selected") - if any(agent_id not in self.backup_agents for agent_id in agent_ids): - raise HomeAssistantError("Invalid agent selected") + raise BackupManagerError("At least one agent must be selected") + if invalid_agents := [ + agent_id for agent_id in agent_ids if agent_id not in self.backup_agents + ]: + raise BackupManagerError(f"Invalid agents selected: {invalid_agents}") if include_all_addons and include_addons: - raise HomeAssistantError( + raise BackupManagerError( "Cannot include all addons and specify specific addons" ) @@ -730,41 +754,64 @@ class BackupManager: name or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}" ) - new_backup, self._backup_task = await self._reader_writer.async_create_backup( - agent_ids=agent_ids, - backup_name=backup_name, - extra_metadata={ - "instance_id": await instance_id.async_get(self.hass), - "with_automatic_settings": with_automatic_settings, - }, - include_addons=include_addons, - include_all_addons=include_all_addons, - include_database=include_database, - include_folders=include_folders, - include_homeassistant=include_homeassistant, - on_progress=self.async_on_backup_event, - password=password, - ) - self._backup_finish_task = self.hass.async_create_task( + + try: + ( + new_backup, + self._backup_task, + ) = await self._reader_writer.async_create_backup( + agent_ids=agent_ids, + backup_name=backup_name, + extra_metadata={ + "instance_id": await instance_id.async_get(self.hass), + "with_automatic_settings": with_automatic_settings, + }, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + on_progress=self.async_on_backup_event, + password=password, + ) + except BackupReaderWriterError as err: + raise BackupManagerError(str(err)) from err + + backup_finish_task = self._backup_finish_task = self.hass.async_create_task( self._async_finish_backup(agent_ids, with_automatic_settings), name="backup_manager_finish_backup", ) + if not raise_task_error: + + def log_finish_task_error(task: asyncio.Task[None]) -> None: + if task.done() and not task.cancelled() and (err := task.exception()): + if isinstance(err, BackupManagerError): + LOGGER.error("Error creating backup: %s", err) + else: + LOGGER.error("Unexpected error: %s", err, exc_info=err) + + backup_finish_task.add_done_callback(log_finish_task_error) + return new_backup async def _async_finish_backup( self, agent_ids: list[str], with_automatic_settings: bool ) -> None: + """Finish a backup.""" if TYPE_CHECKING: assert self._backup_task is not None try: written_backup = await self._backup_task - except Exception as err: # noqa: BLE001 - LOGGER.debug("Generating backup failed", exc_info=err) + except Exception as err: self.async_on_backup_event( CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) ) if with_automatic_settings: self._update_issue_backup_failed() + + if isinstance(err, BackupReaderWriterError): + raise BackupManagerError(str(err)) from err + raise # unexpected error else: LOGGER.debug( "Generated new backup with backup_id %s, uploading to agents %s", @@ -777,25 +824,47 @@ class BackupManager: state=CreateBackupState.IN_PROGRESS, ) ) - agent_errors = await self._async_upload_backup( - backup=written_backup.backup, - agent_ids=agent_ids, - open_stream=written_backup.open_stream, - ) - await written_backup.release_stream() - if with_automatic_settings: - # create backup was successful, update last_completed_automatic_backup - self.config.data.last_completed_automatic_backup = dt_util.now() - self.store.save() - self._update_issue_after_agent_upload(agent_errors) - self.known_backups.add(written_backup.backup, agent_errors) + try: + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + except BaseException: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + raise # manager or unexpected error + finally: + try: + await written_backup.release_stream() + except Exception: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + raise + self.known_backups.add(written_backup.backup, agent_errors) + if agent_errors: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + else: + if with_automatic_settings: + # create backup was successful, update last_completed_automatic_backup + self.config.data.last_completed_automatic_backup = dt_util.now() + self.store.save() + + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) + ) + + if with_automatic_settings: + self._update_issue_after_agent_upload(agent_errors) # delete old backups more numerous than copies + # try this regardless of agent errors above await delete_backups_exceeding_configured_count(self) - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) - ) finally: self._backup_task = None self._backup_finish_task = None @@ -814,7 +883,7 @@ class BackupManager: ) -> None: """Initiate restoring a backup.""" if self.state is not BackupManagerState.IDLE: - raise HomeAssistantError(f"Backup manager busy: {self.state}") + raise BackupManagerError(f"Backup manager busy: {self.state}") self.async_on_backup_event( RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS) @@ -854,7 +923,7 @@ class BackupManager: """Initiate restoring a backup.""" agent = self.backup_agents[agent_id] if not await agent.async_get_backup(backup_id): - raise HomeAssistantError( + raise BackupManagerError( f"Backup {backup_id} not found in agent {agent_id}" ) @@ -1027,11 +1096,11 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_id = _generate_backup_id(date_str, backup_name) if include_addons or include_all_addons or include_folders: - raise HomeAssistantError( + raise BackupReaderWriterError( "Addons and folders are not supported by core backup" ) if not include_homeassistant: - raise HomeAssistantError("Home Assistant must be included in backup") + raise BackupReaderWriterError("Home Assistant must be included in backup") backup_task = self._hass.async_create_task( self._async_create_backup( @@ -1102,6 +1171,13 @@ class CoreBackupReaderWriter(BackupReaderWriter): password, local_agent_tar_file_path, ) + except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err: + # BackupManagerError from async_pre_backup_actions + # OSError from file operations + # TarError from tarfile + # ValueError from json_bytes + raise BackupReaderWriterError(str(err)) from err + else: backup = AgentBackup( addons=[], backup_id=backup_id, @@ -1119,12 +1195,15 @@ class CoreBackupReaderWriter(BackupReaderWriter): async_add_executor_job = self._hass.async_add_executor_job async def send_backup() -> AsyncIterator[bytes]: - f = await async_add_executor_job(tar_file_path.open, "rb") try: - while chunk := await async_add_executor_job(f.read, 2**20): - yield chunk - finally: - await async_add_executor_job(f.close) + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + except OSError as err: + raise BackupReaderWriterError(str(err)) from err async def open_backup() -> AsyncIterator[bytes]: return send_backup() @@ -1132,14 +1211,20 @@ class CoreBackupReaderWriter(BackupReaderWriter): async def remove_backup() -> None: if local_agent_tar_file_path: return - await async_add_executor_job(tar_file_path.unlink, True) + try: + await async_add_executor_job(tar_file_path.unlink, True) + except OSError as err: + raise BackupReaderWriterError(str(err)) from err return WrittenBackup( backup=backup, open_stream=open_backup, release_stream=remove_backup ) finally: # Inform integrations the backup is done - await manager.async_post_backup_actions() + try: + await manager.async_post_backup_actions() + except BackupManagerError as err: + raise BackupReaderWriterError(str(err)) from err def _mkdir_and_generate_backup_contents( self, @@ -1252,11 +1337,11 @@ class CoreBackupReaderWriter(BackupReaderWriter): """ if restore_addons or restore_folders: - raise HomeAssistantError( + raise BackupReaderWriterError( "Addons and folders are not supported in core restore" ) if not restore_homeassistant and not restore_database: - raise HomeAssistantError( + raise BackupReaderWriterError( "Home Assistant or database must be included in restore" ) diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py index a937933f04c..81c00d699c6 100644 --- a/homeassistant/components/backup/models.py +++ b/homeassistant/components/backup/models.py @@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass from enum import StrEnum from typing import Any, Self +from homeassistant.exceptions import HomeAssistantError + @dataclass(frozen=True, kw_only=True) class AddonInfo: @@ -67,3 +69,7 @@ class AgentBackup: protected=data["protected"], size=data["size"], ) + + +class BackupManagerError(HomeAssistantError): + """Backup manager error.""" diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 1b7cf930588..9edffe985ae 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -10,6 +10,7 @@ from typing import Any, cast from aiohasupervisor.exceptions import ( SupervisorBadRequestError, + SupervisorError, SupervisorNotFoundError, ) from aiohasupervisor.models import ( @@ -23,6 +24,7 @@ from homeassistant.components.backup import ( AgentBackup, BackupAgent, BackupReaderWriter, + BackupReaderWriterError, CreateBackupEvent, Folder, NewBackup, @@ -233,20 +235,23 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): ] locations = [agent.location for agent in hassio_agents] - backup = await self._client.backups.partial_backup( - supervisor_backups.PartialBackupOptions( - addons=include_addons_set, - folders=include_folders_set, - homeassistant=include_homeassistant, - name=backup_name, - password=password, - compressed=True, - location=locations or LOCATION_CLOUD_BACKUP, - homeassistant_exclude_database=not include_database, - background=True, - extra=extra_metadata, + try: + backup = await self._client.backups.partial_backup( + supervisor_backups.PartialBackupOptions( + addons=include_addons_set, + folders=include_folders_set, + homeassistant=include_homeassistant, + name=backup_name, + password=password, + compressed=True, + location=locations or LOCATION_CLOUD_BACKUP, + homeassistant_exclude_database=not include_database, + background=True, + extra=extra_metadata, + ) ) - ) + except SupervisorError as err: + raise BackupReaderWriterError(f"Error creating backup: {err}") from err backup_task = self._hass.async_create_task( self._async_wait_for_backup( backup, remove_after_upload=not bool(locations) @@ -278,22 +283,35 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): finally: unsub() if not backup_id: - raise HomeAssistantError("Backup failed") + raise BackupReaderWriterError("Backup failed") async def open_backup() -> AsyncIterator[bytes]: - return await self._client.backups.download_backup(backup_id) + try: + return await self._client.backups.download_backup(backup_id) + except SupervisorError as err: + raise BackupReaderWriterError( + f"Error downloading backup: {err}" + ) from err async def remove_backup() -> None: if not remove_after_upload: return - await self._client.backups.remove_backup( - backup_id, - options=supervisor_backups.RemoveBackupOptions( - location={LOCATION_CLOUD_BACKUP} - ), - ) + try: + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) + except SupervisorError as err: + raise BackupReaderWriterError(f"Error removing backup: {err}") from err - details = await self._client.backups.backup_info(backup_id) + try: + details = await self._client.backups.backup_info(backup_id) + except SupervisorError as err: + raise BackupReaderWriterError( + f"Error getting backup details: {err}" + ) from err return WrittenBackup( backup=_backup_details_to_agent_backup(details), diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index ffecd1c4186..4f456cc6d72 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -166,3 +166,15 @@ async def setup_backup_integration( agent._loaded_backups = True return result + + +async def setup_backup_platform( + hass: HomeAssistant, + *, + domain: str, + platform: Any, +) -> None: + """Set up a mock domain.""" + mock_platform(hass, f"{domain}.backup", platform) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 9b652edb087..4b5f43edb82 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import Generator +from dataclasses import replace from io import StringIO import json from pathlib import Path @@ -17,13 +18,15 @@ from homeassistant.components.backup import ( AgentBackup, BackupAgentPlatformProtocol, BackupManager, - BackupPlatformProtocol, + BackupReaderWriterError, Folder, LocalBackupAgent, backup as local_backup_platform, ) +from homeassistant.components.backup.agent import BackupAgentError from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.components.backup.manager import ( + BackupManagerError, BackupManagerState, CoreBackupReaderWriter, CreateBackupEvent, @@ -42,9 +45,9 @@ from .common import ( TEST_BACKUP_ABC123, TEST_BACKUP_DEF456, BackupAgentTest, + setup_backup_platform, ) -from tests.common import MockPlatform, mock_platform from tests.typing import ClientSessionGenerator, WebSocketGenerator _EXPECTED_FILES = [ @@ -61,18 +64,6 @@ _EXPECTED_FILES_WITH_DATABASE = { } -async def _setup_backup_platform( - hass: HomeAssistant, - *, - domain: str = "some_domain", - platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None, -) -> None: - """Set up a mock domain.""" - mock_platform(hass, f"{domain}.backup", platform or MockPlatform()) - assert await async_setup_component(hass, domain, {}) - await hass.async_block_till_done() - - @pytest.fixture(autouse=True) def mock_delay_save() -> Generator[None]: """Mock the delay save constant.""" @@ -159,12 +150,15 @@ async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: ("parameters", "expected_error"), [ ({"agent_ids": []}, "At least one agent must be selected"), - ({"agent_ids": ["non_existing"]}, "Invalid agent selected"), + ({"agent_ids": ["non_existing"]}, "Invalid agents selected: ['non_existing']"), ( {"include_addons": ["ssl"], "include_all_addons": True}, "Cannot include all addons and specify specific addons", ), - ({"include_homeassistant": False}, "Home Assistant must be included in backup"), + ( + {"include_homeassistant": False}, + "Home Assistant must be included in backup", + ), ], ) async def test_create_backup_wrong_parameters( @@ -242,7 +236,7 @@ async def test_async_initiate_backup( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -393,19 +387,96 @@ async def test_async_initiate_backup( @pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize("exception", [BackupAgentError("Boom!"), Exception("Boom!")]) async def test_async_initiate_backup_with_agent_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mocked_json_bytes: Mock, - mocked_tarfile: Mock, generate_backup_id: MagicMock, path_glob: MagicMock, hass_storage: dict[str, Any], + exception: Exception, ) -> None: - """Test generate backup.""" + """Test agent upload error during backup generation.""" agent_ids = [LOCAL_AGENT_ID, "test.remote"] local_agent = local_backup_platform.CoreLocalBackupAgent(hass) - remote_agent = BackupAgentTest("remote", backups=[]) + backup_1 = replace(TEST_BACKUP_ABC123, backup_id="backup1") # matching instance id + backup_2 = replace(TEST_BACKUP_DEF456, backup_id="backup2") # other instance id + backup_3 = replace(TEST_BACKUP_ABC123, backup_id="backup3") # matching instance id + backups_info: list[dict[str, Any]] = [ + { + "addons": [ + { + "name": "Test", + "slug": "test", + "version": "1.0.0", + }, + ], + "agent_ids": [ + "test.remote", + ], + "backup_id": "backup1", + "database_included": True, + "date": "1970-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": [ + "media", + "share", + ], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 0, + "with_automatic_settings": True, + }, + { + "addons": [], + "agent_ids": [ + "test.remote", + ], + "backup_id": "backup2", + "database_included": False, + "date": "1980-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": [ + "media", + "share", + ], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test 2", + "protected": False, + "size": 1, + "with_automatic_settings": None, + }, + { + "addons": [ + { + "name": "Test", + "slug": "test", + "version": "1.0.0", + }, + ], + "agent_ids": [ + "test.remote", + ], + "backup_id": "backup3", + "database_included": True, + "date": "1970-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": [ + "media", + "share", + ], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 0, + "with_automatic_settings": True, + }, + ] + remote_agent = BackupAgentTest("remote", backups=[backup_1, backup_2, backup_3]) with patch( "homeassistant.components.backup.backup.async_get_backup_agents" @@ -413,7 +484,7 @@ async def test_async_initiate_backup_with_agent_error( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -431,12 +502,18 @@ async def test_async_initiate_backup_with_agent_error( assert result["success"] is True assert result["result"] == { - "backups": [], + "backups": backups_info, "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, } + await ws_client.send_json_auto_id( + {"type": "backup/config/update", "retention": {"copies": 1, "days": None}} + ) + result = await ws_client.receive_json() + assert result["success"] + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) result = await ws_client.receive_json() @@ -445,11 +522,16 @@ async def test_async_initiate_backup_with_agent_error( result = await ws_client.receive_json() assert result["success"] is True + delete_backup = AsyncMock() + with ( patch("pathlib.Path.open", mock_open(read_data=b"test")), patch.object( - remote_agent, "async_upload_backup", side_effect=Exception("Test exception") + remote_agent, + "async_upload_backup", + side_effect=exception, ), + patch.object(remote_agent, "async_delete_backup", delete_backup), ): await ws_client.send_json_auto_id( {"type": "backup/generate", "agent_ids": agent_ids} @@ -486,13 +568,13 @@ async def test_async_initiate_backup_with_agent_error( assert result["event"] == { "manager_state": BackupManagerState.CREATE_BACKUP, "stage": None, - "state": CreateBackupState.COMPLETED, + "state": CreateBackupState.FAILED, } result = await ws_client.receive_json() assert result["event"] == {"manager_state": BackupManagerState.IDLE} - expected_backup_data = { + new_expected_backup_data = { "addons": [], "agent_ids": ["backup.local"], "backup_id": "abc123", @@ -508,20 +590,14 @@ async def test_async_initiate_backup_with_agent_error( "with_automatic_settings": False, } - await ws_client.send_json_auto_id( - {"type": "backup/details", "backup_id": backup_id} - ) - result = await ws_client.receive_json() - assert result["result"] == { - "agent_errors": {}, - "backup": expected_backup_data, - } - await ws_client.send_json_auto_id({"type": "backup/info"}) result = await ws_client.receive_json() + backups_response = result["result"].pop("backups") + + assert len(backups_response) == 4 + assert new_expected_backup_data in backups_response assert result["result"] == { "agent_errors": {}, - "backups": [expected_backup_data], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, } @@ -534,6 +610,9 @@ async def test_async_initiate_backup_with_agent_error( } ] + # one of the two matching backups with the remote agent should have been deleted + assert delete_backup.call_count == 1 + @pytest.mark.usefixtures("mock_backup_generation") @pytest.mark.parametrize( @@ -702,7 +781,7 @@ async def test_create_backup_failure_raises_issue( await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -743,6 +822,337 @@ async def test_create_backup_failure_raises_issue( assert issue.translation_placeholders == issue_data["translation_placeholders"] +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + "exception", [BackupReaderWriterError("Boom!"), BaseException("Boom!")] +) +async def test_async_initiate_backup_non_agent_upload_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + generate_backup_id: MagicMock, + path_glob: MagicMock, + hass_storage: dict[str, Any], + exception: Exception, +) -> None: + """Test an unknown or writer upload error during backup generation.""" + hass_storage[DOMAIN] = { + "data": {}, + "key": DOMAIN, + "version": 1, + } + agent_ids = [LOCAL_AGENT_ID, "test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch.object( + remote_agent, + "async_upload_backup", + side_effect=exception, + ), + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.FAILED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + assert not hass_storage[DOMAIN]["data"] + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + "exception", [BackupReaderWriterError("Boom!"), Exception("Boom!")] +) +async def test_async_initiate_backup_with_task_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + generate_backup_id: MagicMock, + path_glob: MagicMock, + create_backup: AsyncMock, + exception: Exception, +) -> None: + """Test backup task error during backup generation.""" + backup_task: asyncio.Future[Any] = asyncio.Future() + backup_task.set_exception(exception) + create_backup.return_value = (NewBackup(backup_job_id="abc123"), backup_task) + agent_ids = [LOCAL_AGENT_ID, "test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.FAILED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ( + "open_call_count", + "open_exception", + "read_call_count", + "read_exception", + "close_call_count", + "close_exception", + "unlink_call_count", + "unlink_exception", + ), + [ + (1, OSError("Boom!"), 0, None, 0, None, 1, None), + (1, None, 1, OSError("Boom!"), 1, None, 1, None), + (1, None, 1, None, 1, OSError("Boom!"), 1, None), + (1, None, 1, None, 1, None, 1, OSError("Boom!")), + ], +) +async def test_initiate_backup_file_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + generate_backup_id: MagicMock, + path_glob: MagicMock, + open_call_count: int, + open_exception: Exception | None, + read_call_count: int, + read_exception: Exception | None, + close_call_count: int, + close_exception: Exception | None, + unlink_call_count: int, + unlink_exception: Exception | None, +) -> None: + """Test file error during generate backup.""" + agent_ids = ["test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + open_mock = mock_open(read_data=b"test") + open_mock.side_effect = open_exception + open_mock.return_value.read.side_effect = read_exception + open_mock.return_value.close.side_effect = close_exception + + with ( + patch("pathlib.Path.open", open_mock), + patch("pathlib.Path.unlink", side_effect=unlink_exception) as unlink_mock, + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.FAILED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + assert open_mock.call_count == open_call_count + assert open_mock.return_value.read.call_count == read_call_count + assert open_mock.return_value.close.call_count == close_call_count + assert unlink_mock.call_count == unlink_call_count + + async def test_loading_platforms( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -754,8 +1164,9 @@ async def test_loading_platforms( get_agents_mock = AsyncMock(return_value=[]) - await _setup_backup_platform( + await setup_backup_platform( hass, + domain="test", platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), @@ -776,7 +1187,7 @@ class LocalBackupAgentTest(BackupAgentTest, LocalBackupAgent): def get_backup_path(self, backup_id: str) -> Path: """Return the local path to a backup.""" - return "test.tar" + return Path("test.tar") @pytest.mark.parametrize( @@ -797,7 +1208,7 @@ async def test_loading_platform_with_listener( get_agents_mock = AsyncMock(return_value=[agent_class("remote1", backups=[])]) register_listener_mock = Mock() - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -846,7 +1257,7 @@ async def test_not_loading_bad_platforms( platform_mock: Mock, ) -> None: """Test not loading bad backup platforms.""" - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=platform_mock, @@ -857,16 +1268,14 @@ async def test_not_loading_bad_platforms( assert platform_mock.mock_calls == [] -async def test_exception_platform_pre( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: +async def test_exception_platform_pre(hass: HomeAssistant) -> None: """Test exception in pre step.""" async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") remote_agent = BackupAgentTest("remote", backups=[]) - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -878,28 +1287,25 @@ async def test_exception_platform_pre( assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await hass.services.async_call( - DOMAIN, - "create", - blocking=True, - ) + with pytest.raises(BackupManagerError) as err: + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) - assert "Generating backup failed" in caplog.text - assert "Test exception" in caplog.text + assert str(err.value) == "Error during pre-backup: Test exception" @pytest.mark.usefixtures("mock_backup_generation") -async def test_exception_platform_post( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: +async def test_exception_platform_post(hass: HomeAssistant) -> None: """Test exception in post step.""" async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") remote_agent = BackupAgentTest("remote", backups=[]) - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -911,14 +1317,14 @@ async def test_exception_platform_post( assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await hass.services.async_call( - DOMAIN, - "create", - blocking=True, - ) + with pytest.raises(BackupManagerError) as err: + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) - assert "Generating backup failed" in caplog.text - assert "Test exception" in caplog.text + assert str(err.value) == "Error during post-backup: Test exception" @pytest.mark.parametrize( @@ -974,7 +1380,7 @@ async def test_receive_backup( ) -> None: """Test receive backup and upload to the local and a remote agent.""" remote_agent = BackupAgentTest("remote", backups=[]) - await _setup_backup_platform( + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -1098,8 +1504,8 @@ async def test_async_trigger_restore( manager = BackupManager(hass, CoreBackupReaderWriter(hass)) hass.data[DATA_MANAGER] = manager - await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) - await _setup_backup_platform( + await setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -1156,8 +1562,8 @@ async def test_async_trigger_restore_wrong_password(hass: HomeAssistant) -> None manager = BackupManager(hass, CoreBackupReaderWriter(hass)) hass.data[DATA_MANAGER] = manager - await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) - await _setup_backup_platform( + await setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await setup_backup_platform( hass, domain="test", platform=Mock( @@ -1228,7 +1634,7 @@ async def test_async_trigger_restore_wrong_parameters( """Test trigger restore.""" manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) await manager.load_platforms() local_agent = manager.backup_agents[LOCAL_AGENT_ID] diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index b407241be54..a3b29a55ad8 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -2,13 +2,19 @@ from collections.abc import Generator from typing import Any -from unittest.mock import ANY, AsyncMock, MagicMock, call, patch +from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.backup import AgentBackup, BackupAgentError +from homeassistant.components.backup import ( + AgentBackup, + BackupAgentError, + BackupAgentPlatformProtocol, + BackupReaderWriterError, + Folder, +) from homeassistant.components.backup.agent import BackupAgentUnreachableError from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.components.backup.manager import ( @@ -19,6 +25,7 @@ from homeassistant.components.backup.manager import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.setup import async_setup_component from .common import ( LOCAL_AGENT_ID, @@ -26,6 +33,7 @@ from .common import ( TEST_BACKUP_DEF456, BackupAgentTest, setup_backup_integration, + setup_backup_platform, ) from tests.common import async_fire_time_changed, async_mock_service @@ -472,27 +480,45 @@ async def test_generate_calls_create( ) -@pytest.mark.usefixtures("mock_backup_generation") @pytest.mark.parametrize( - ("create_backup_settings", "expected_call_params"), + ( + "create_backup_settings", + "expected_call_params", + "side_effect", + "last_completed_automatic_backup", + ), [ ( - {}, { - "agent_ids": [], + "agent_ids": ["test.remote"], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + }, + { + "agent_ids": ["test.remote"], + "backup_name": ANY, + "extra_metadata": { + "instance_id": ANY, + "with_automatic_settings": True, + }, "include_addons": None, "include_all_addons": False, "include_database": True, "include_folders": None, "include_homeassistant": True, - "name": None, + "on_progress": ANY, "password": None, - "with_automatic_settings": True, }, + None, + "2024-11-13T12:01:01+01:00", ), ( { - "agent_ids": ["test-agent"], + "agent_ids": ["test.remote"], "include_addons": ["test-addon"], "include_all_addons": False, "include_database": True, @@ -501,32 +527,78 @@ async def test_generate_calls_create( "password": "test-password", }, { - "agent_ids": ["test-agent"], + "agent_ids": ["test.remote"], + "backup_name": "test-name", + "extra_metadata": { + "instance_id": ANY, + "with_automatic_settings": True, + }, "include_addons": ["test-addon"], "include_all_addons": False, "include_database": True, - "include_folders": ["media"], + "include_folders": [Folder.MEDIA], "include_homeassistant": True, - "name": "test-name", + "on_progress": ANY, "password": "test-password", - "with_automatic_settings": True, }, + None, + "2024-11-13T12:01:01+01:00", + ), + ( + { + "agent_ids": ["test.remote"], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + }, + { + "agent_ids": ["test.remote"], + "backup_name": ANY, + "extra_metadata": { + "instance_id": ANY, + "with_automatic_settings": True, + }, + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "include_homeassistant": True, + "on_progress": ANY, + "password": None, + }, + BackupAgentError("Boom!"), + None, ), ], ) async def test_generate_with_default_settings_calls_create( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, + create_backup: AsyncMock, create_backup_settings: dict[str, Any], expected_call_params: dict[str, Any], + side_effect: Exception | None, + last_completed_automatic_backup: str, ) -> None: """Test backup/generate_with_automatic_settings calls async_initiate_backup.""" - await setup_backup_integration(hass, with_hassio=False) - client = await hass_ws_client(hass) - freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-13T12:01:00+01:00") + remote_agent = BackupAgentTest("remote", backups=[]) + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() await client.send_json_auto_id( @@ -535,17 +607,47 @@ async def test_generate_with_default_settings_calls_create( result = await client.receive_json() assert result["success"] - with patch( - "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", - return_value=NewBackup(backup_job_id="abc123"), - ) as generate_backup: + freezer.tick() + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass_storage[DOMAIN]["data"]["config"]["create_backup"] + == create_backup_settings + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] + is None + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] + is None + ) + + with patch.object(remote_agent, "async_upload_backup", side_effect=side_effect): await client.send_json_auto_id( {"type": "backup/generate_with_automatic_settings"} ) result = await client.receive_json() assert result["success"] assert result["result"] == {"backup_job_id": "abc123"} - generate_backup.assert_called_once_with(**expected_call_params) + + await hass.async_block_till_done() + + create_backup.assert_called_once_with(**expected_call_params) + + freezer.tick() + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] + == "2024-11-13T12:01:01+01:00" + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] + == last_completed_automatic_backup + ) @pytest.mark.parametrize( @@ -1193,7 +1295,23 @@ async def test_config_update_errors( 1, 2, BACKUP_CALL, - [Exception("Boom"), None], + [BackupReaderWriterError("Boom"), None], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # attempted to create backup but failed + "2024-11-11T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + [Exception("Boom"), None], # unknown error ), ], ) @@ -2272,7 +2390,7 @@ async def test_subscribe_event( hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, ) -> None: - """Test generating a backup.""" + """Test subscribe event.""" await setup_backup_integration(hass, with_hassio=False) manager = hass.data[DATA_MANAGER] diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 86b25d61d88..5d9513a1d1b 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -35,7 +35,10 @@ async def setup_integration( cloud_logged_in: None, ) -> AsyncGenerator[None]: """Set up cloud integration.""" - with patch("homeassistant.components.backup.is_hassio", return_value=False): + with ( + patch("homeassistant.components.backup.is_hassio", return_value=False), + patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0), + ): assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -345,7 +348,7 @@ async def test_agents_upload( homeassistant_version="2024.12.0", name="Test", protected=True, - size=0.0, + size=0, ) aioclient_mock.put(mock_get_upload_details.return_value["url"]) @@ -382,7 +385,7 @@ async def test_agents_upload( async def test_agents_upload_fail_put( hass: HomeAssistant, hass_client: ClientSessionGenerator, - caplog: pytest.LogCaptureFixture, + hass_storage: dict[str, Any], aioclient_mock: AiohttpClientMocker, mock_get_upload_details: Mock, put_mock_kwargs: dict[str, Any], @@ -401,7 +404,7 @@ async def test_agents_upload_fail_put( homeassistant_version="2024.12.0", name="Test", protected=True, - size=0.0, + size=0, ) aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs) @@ -421,9 +424,14 @@ async def test_agents_upload_fail_put( "/api/backup/upload?agent_id=cloud.cloud", data={"file": StringIO("test")}, ) + await hass.async_block_till_done() assert resp.status == 201 - assert "Error during backup upload - Failed to upload backup" in caplog.text + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] @pytest.mark.parametrize("side_effect", [ClientError, CloudError]) @@ -431,9 +439,9 @@ async def test_agents_upload_fail_put( async def test_agents_upload_fail_cloud( hass: HomeAssistant, hass_client: ClientSessionGenerator, + hass_storage: dict[str, Any], mock_get_upload_details: Mock, side_effect: Exception, - caplog: pytest.LogCaptureFixture, ) -> None: """Test agent upload backup, when cloud user is logged in.""" client = await hass_client() @@ -450,7 +458,7 @@ async def test_agents_upload_fail_cloud( homeassistant_version="2024.12.0", name="Test", protected=True, - size=0.0, + size=0, ) with ( patch( @@ -468,15 +476,20 @@ async def test_agents_upload_fail_cloud( "/api/backup/upload?agent_id=cloud.cloud", data={"file": StringIO("test")}, ) + await hass.async_block_till_done() assert resp.status == 201 - assert "Error during backup upload - Failed to get upload details" in caplog.text + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] async def test_agents_upload_not_protected( hass: HomeAssistant, hass_client: ClientSessionGenerator, - caplog: pytest.LogCaptureFixture, + hass_storage: dict[str, Any], ) -> None: """Test agent upload backup, when cloud user is logged in.""" client = await hass_client() @@ -492,7 +505,7 @@ async def test_agents_upload_not_protected( homeassistant_version="2024.12.0", name="Test", protected=False, - size=0.0, + size=0, ) with ( patch("pathlib.Path.open"), @@ -505,9 +518,14 @@ async def test_agents_upload_not_protected( "/api/backup/upload?agent_id=cloud.cloud", data={"file": StringIO("test")}, ) + await hass.async_block_till_done() assert resp.status == 201 - assert "Error during backup upload - Cloud backups must be protected" in caplog.text + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 3c9440c41ff..620532d30cf 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -16,6 +16,7 @@ from unittest.mock import ANY, AsyncMock, Mock, patch from aiohasupervisor.exceptions import ( SupervisorBadRequestError, + SupervisorError, SupervisorNotFoundError, ) from aiohasupervisor.models import ( @@ -46,7 +47,7 @@ TEST_BACKUP = supervisor_backups.Backup( compressed=False, content=supervisor_backups.BackupContent( addons=["ssl"], - folders=["share"], + folders=[supervisor_backups.Folder.SHARE], homeassistant=True, ), date=datetime.fromisoformat("1970-01-01T00:00:00Z"), @@ -71,7 +72,7 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( compressed=TEST_BACKUP.compressed, date=TEST_BACKUP.date, extra=None, - folders=["share"], + folders=[supervisor_backups.Folder.SHARE], homeassistant_exclude_database=False, homeassistant="2024.12.0", location=TEST_BACKUP.location, @@ -197,7 +198,7 @@ async def hassio_enabled( @pytest.fixture async def setup_integration( hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock -) -> AsyncGenerator[None]: +) -> None: """Set up Backup integration.""" assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) await hass.async_block_till_done() @@ -451,7 +452,7 @@ async def test_agent_upload( homeassistant_version="2024.12.0", name="Test", protected=False, - size=0.0, + size=0, ) supervisor_client.backups.reload.assert_not_called() @@ -732,6 +733,292 @@ async def test_reader_writer_create( supervisor_client.backups.download_backup.assert_not_called() supervisor_client.backups.remove_backup.assert_not_called() + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("side_effect", "error_code", "error_message"), + [ + ( + SupervisorError("Boom!"), + "home_assistant_error", + "Error creating backup: Boom!", + ), + (Exception("Boom!"), "unknown_error", "Unknown error"), + ], +) +async def test_reader_writer_create_partial_backup_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + side_effect: Exception, + error_code: str, + error_message: str, +) -> None: + """Test client partial backup error when generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.side_effect = side_effect + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert not response["success"] + assert response["error"]["code"] == error_code + assert response["error"]["message"] == error_message + + assert supervisor_client.backups.partial_backup.call_count == 1 + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_create_missing_reference_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test missing reference error when generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + assert supervisor_client.backups.partial_backup.call_count == 1 + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + await hass.async_block_till_done() + + assert supervisor_client.backups.backup_info.call_count == 0 + assert supervisor_client.backups.download_backup.call_count == 0 + assert supervisor_client.backups.remove_backup.call_count == 0 + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")]) +@pytest.mark.parametrize( + ("method", "download_call_count", "remove_call_count"), + [("download_backup", 1, 1), ("remove_backup", 1, 1)], +) +async def test_reader_writer_create_download_remove_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + exception: Exception, + method: str, + download_call_count: int, + remove_call_count: int, +) -> None: + """Test download and remove error when generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + method_mock = getattr(supervisor_client.backups, method) + method_mock.side_effect = exception + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["test.remote"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + assert supervisor_client.backups.partial_backup.call_count == 1 + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + await hass.async_block_till_done() + + assert supervisor_client.backups.backup_info.call_count == 1 + assert supervisor_client.backups.download_backup.call_count == download_call_count + assert supervisor_client.backups.remove_backup.call_count == remove_call_count + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")]) +async def test_reader_writer_create_info_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + exception: Exception, +) -> None: + """Test backup info error when generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.side_effect = exception + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["test.remote"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + assert supervisor_client.backups.partial_backup.call_count == 1 + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + await hass.async_block_till_done() + + assert supervisor_client.backups.backup_info.call_count == 1 + assert supervisor_client.backups.download_backup.call_count == 0 + assert supervisor_client.backups.remove_backup.call_count == 0 + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + @pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_create_remote_backup( From ce7a0650e4c33a31ab3d1512830ceae099e8c96e Mon Sep 17 00:00:00 2001 From: Matthew FitzGerald-Chamberlain Date: Thu, 2 Jan 2025 01:39:57 -0600 Subject: [PATCH 1154/1198] Improve support for Aprilaire S86WMUPR (#133974) --- homeassistant/components/aprilaire/coordinator.py | 15 ++++++++++----- homeassistant/components/aprilaire/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/aprilaire/test_config_flow.py | 1 - 5 files changed, 13 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/aprilaire/coordinator.py b/homeassistant/components/aprilaire/coordinator.py index 737fd768140..6b132cfcc95 100644 --- a/homeassistant/components/aprilaire/coordinator.py +++ b/homeassistant/components/aprilaire/coordinator.py @@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol): """Wait for the client to be ready.""" if not self.data or Attribute.MAC_ADDRESS not in self.data: + await self.client.read_mac_address() + data = await self.client.wait_for_response( FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT ) @@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol): return False - if not self.data or Attribute.NAME not in self.data: - await self.client.wait_for_response( - FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT - ) - if not self.data or Attribute.THERMOSTAT_MODES not in self.data: + await self.client.read_thermostat_iaq_available() + await self.client.wait_for_response( FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT ) @@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol): not self.data or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data ): + await self.client.read_sensors() + await self.client.wait_for_response( FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT ) + await self.client.read_thermostat_status() + + await self.client.read_iaq_status() + await ready_callback(True) return True diff --git a/homeassistant/components/aprilaire/manifest.json b/homeassistant/components/aprilaire/manifest.json index 179a101885b..577de8ae88d 100644 --- a/homeassistant/components/aprilaire/manifest.json +++ b/homeassistant/components/aprilaire/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["pyaprilaire"], - "requirements": ["pyaprilaire==0.7.4"] + "requirements": ["pyaprilaire==0.7.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index e57074933c0..4427d01f93b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1779,7 +1779,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.4 +pyaprilaire==0.7.7 # homeassistant.components.asuswrt pyasuswrt==0.1.21 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 223502ece25..7130ac0e6f6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1459,7 +1459,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.4 +pyaprilaire==0.7.7 # homeassistant.components.asuswrt pyasuswrt==0.1.21 diff --git a/tests/components/aprilaire/test_config_flow.py b/tests/components/aprilaire/test_config_flow.py index e4b7c167256..0cda1ed40ad 100644 --- a/tests/components/aprilaire/test_config_flow.py +++ b/tests/components/aprilaire/test_config_flow.py @@ -95,7 +95,6 @@ async def test_config_flow_data(client: AprilaireClient, hass: HomeAssistant) -> ) client.start_listen.assert_called_once() - client.wait_for_response.assert_any_call(FunctionalDomain.IDENTIFICATION, 4, 30) client.wait_for_response.assert_any_call(FunctionalDomain.CONTROL, 7, 30) client.wait_for_response.assert_any_call(FunctionalDomain.SENSORS, 2, 30) client.stop_listen.assert_called_once() From 554cdd1784836d765c2a27999f8ab9e781cfa511 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20=C5=A0trauch?= Date: Wed, 1 Jan 2025 13:10:40 +0100 Subject: [PATCH 1155/1198] Add new ID LAP-V201S-AEUR for Vital200S AirPurifier in Vesync integration (#133999) --- homeassistant/components/vesync/const.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 48215819ce5..b1bad8cfa11 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -56,6 +56,7 @@ SKU_TO_BASE_DEVICE = { "LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S "LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S "LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S + "LAP-V201S-AEUR": "Vital200S", # Alt ID Model Vital200S "LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S "Vital100S": "Vital100S", "LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S From fea3dfda9439370671cb53329511d1ae58565967 Mon Sep 17 00:00:00 2001 From: cdnninja Date: Wed, 1 Jan 2025 05:03:39 -0700 Subject: [PATCH 1156/1198] Vesync unload error when not all platforms used (#134166) --- homeassistant/components/vesync/__init__.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index b6f263f3037..0993743d461 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -135,7 +135,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + in_use_platforms = [] + if hass.data[DOMAIN][VS_SWITCHES]: + in_use_platforms.append(Platform.SWITCH) + if hass.data[DOMAIN][VS_FANS]: + in_use_platforms.append(Platform.FAN) + if hass.data[DOMAIN][VS_LIGHTS]: + in_use_platforms.append(Platform.LIGHT) + if hass.data[DOMAIN][VS_SENSORS]: + in_use_platforms.append(Platform.SENSOR) + unload_ok = await hass.config_entries.async_unload_platforms( + entry, in_use_platforms + ) if unload_ok: hass.data.pop(DOMAIN) From 3a8f71a64a747377e30fa215480d089ec8bd3c88 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Thu, 2 Jan 2025 11:37:25 +0100 Subject: [PATCH 1157/1198] Improve Supervisor backup error handling (#134346) * Raise Home Assistant error in case backup restore fails This change raises a Home Assistant error in case the backup restore fails. The Supervisor is checking some common issues before starting the actual restore in background. This early checks raise an exception (represented by a HTTP 400 error). This change catches such errors and raises a Home Assistant error with the message from the Supervisor exception. * Add test coverage --- homeassistant/components/hassio/backup.py | 32 ++++++---- tests/components/hassio/test_backup.py | 71 +++++++++++++++++++++++ 2 files changed, 92 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 9edffe985ae..e915e56622b 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -30,6 +30,9 @@ from homeassistant.components.backup import ( NewBackup, WrittenBackup, ) + +# pylint: disable-next=hass-component-root-import +from homeassistant.components.backup.manager import IncorrectPasswordError from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -403,17 +406,24 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) restore_location = agent.location - job = await self._client.backups.partial_restore( - backup_id, - supervisor_backups.PartialRestoreOptions( - addons=restore_addons_set, - folders=restore_folders_set, - homeassistant=restore_homeassistant, - password=password, - background=True, - location=restore_location, - ), - ) + try: + job = await self._client.backups.partial_restore( + backup_id, + supervisor_backups.PartialRestoreOptions( + addons=restore_addons_set, + folders=restore_folders_set, + homeassistant=restore_homeassistant, + password=password, + background=True, + location=restore_location, + ), + ) + except SupervisorBadRequestError as err: + # Supervisor currently does not transmit machine parsable error types + message = err.args[0] + if message.startswith("Invalid password for backup"): + raise IncorrectPasswordError(message) from err + raise HomeAssistantError(message) from err restore_complete = asyncio.Event() diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 620532d30cf..5657193fc49 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -1284,6 +1284,77 @@ async def test_reader_writer_restore( assert response["result"] is None +@pytest.mark.parametrize( + ("supervisor_error_string", "expected_error_code"), + [ + ( + "Invalid password for backup", + "password_incorrect", + ), + ( + "Backup was made on supervisor version 2025.12.0, can't restore on 2024.12.0. Must update supervisor first.", + "home_assistant_error", + ), + ], +) +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_restore_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + supervisor_error_string: str, + expected_error_code: str, +) -> None: + """Test restoring a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_restore.side_effect = SupervisorBadRequestError( + supervisor_error_string + ) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "in_progress", + } + + supervisor_client.backups.partial_restore.assert_called_once_with( + "abc123", + supervisor_backups.PartialRestoreOptions( + addons=None, + background=True, + folders=None, + homeassistant=True, + location=None, + password=None, + ), + ) + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "failed", + } + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert response["error"]["code"] == expected_error_code + + @pytest.mark.parametrize( ("parameters", "expected_error"), [ From 568b637dc598f952cd7c9fe8578d8105ea5f6d2d Mon Sep 17 00:00:00 2001 From: Kenny Root Date: Wed, 1 Jan 2025 02:42:16 -0800 Subject: [PATCH 1158/1198] Bump zabbix-utils to 2.0.2 (#134373) --- homeassistant/components/zabbix/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/zabbix/manifest.json b/homeassistant/components/zabbix/manifest.json index 86389d2b839..6707cb7ddb3 100644 --- a/homeassistant/components/zabbix/manifest.json +++ b/homeassistant/components/zabbix/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["zabbix_utils"], "quality_scale": "legacy", - "requirements": ["zabbix-utils==2.0.1"] + "requirements": ["zabbix-utils==2.0.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4427d01f93b..22c4a7a55e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3085,7 +3085,7 @@ youtubeaio==1.1.5 yt-dlp[default]==2024.12.23 # homeassistant.components.zabbix -zabbix-utils==2.0.1 +zabbix-utils==2.0.2 # homeassistant.components.zamg zamg==0.3.6 From f97439eaab818446330c50eb610f5ca27cae20d6 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 1 Jan 2025 21:09:15 +1000 Subject: [PATCH 1159/1198] Check vehicle metadata (#134381) --- homeassistant/components/teslemetry/__init__.py | 7 ++++++- tests/components/teslemetry/const.py | 16 ++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index 0b61120877a..5779283b955 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -85,6 +85,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - scopes = calls[0]["scopes"] region = calls[0]["region"] + vehicle_metadata = calls[0]["vehicles"] products = calls[1]["response"] device_registry = dr.async_get(hass) @@ -102,7 +103,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - ) for product in products: - if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: + if ( + "vin" in product + and vehicle_metadata.get(product["vin"], {}).get("access") + and Scope.VEHICLE_DEVICE_DATA in scopes + ): # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index bf483d576cd..46efed2153d 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -46,9 +46,25 @@ METADATA = { "energy_device_data", "energy_cmds", ], + "vehicles": { + "LRW3F7EK4NC700000": { + "proxy": False, + "access": True, + "polling": True, + "firmware": "2024.44.25", + } + }, } METADATA_NOSCOPE = { "uid": "abc-123", "region": "NA", "scopes": ["openid", "offline_access", "vehicle_device_data"], + "vehicles": { + "LRW3F7EK4NC700000": { + "proxy": False, + "access": True, + "polling": True, + "firmware": "2024.44.25", + } + }, } From 4cb413521db312eae8c22f5584402ccac14dcfa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Krzysztof=20D=C4=85browski?= Date: Thu, 2 Jan 2025 11:38:12 +0100 Subject: [PATCH 1160/1198] Add state attributes translations to GIOS (#134390) --- homeassistant/components/gios/strings.json | 72 ++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/homeassistant/components/gios/strings.json b/homeassistant/components/gios/strings.json index ee0f50ef40c..fc82f1c843d 100644 --- a/homeassistant/components/gios/strings.json +++ b/homeassistant/components/gios/strings.json @@ -34,6 +34,18 @@ "moderate": "Moderate", "good": "Good", "very_good": "Very good" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } }, "c6h6": { @@ -51,6 +63,18 @@ "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } }, "o3_index": { @@ -62,6 +86,18 @@ "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } }, "pm10_index": { @@ -73,6 +109,18 @@ "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } }, "pm25_index": { @@ -84,6 +132,18 @@ "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } }, "so2_index": { @@ -95,6 +155,18 @@ "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + }, + "state_attributes": { + "options": { + "state": { + "very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]", + "bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]", + "sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]", + "moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]", + "good": "[%key:component::gios::entity::sensor::aqi::state::good%]", + "very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]" + } + } } } } From 0e79c17cb8d4438b0ba56c61ed5283d2b4cb4ae8 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 2 Jan 2025 08:51:49 +0100 Subject: [PATCH 1161/1198] Fix SQL sensor name (#134414) --- homeassistant/components/sql/sensor.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/sql/sensor.py b/homeassistant/components/sql/sensor.py index 1d033728c0d..312b0cd345e 100644 --- a/homeassistant/components/sql/sensor.py +++ b/homeassistant/components/sql/sensor.py @@ -331,9 +331,16 @@ class SQLSensor(ManualTriggerSensorEntity): entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, unique_id)}, manufacturer="SQL", - name=self.name, + name=self._rendered.get(CONF_NAME), ) + @property + def name(self) -> str | None: + """Name of the entity.""" + if self.has_entity_name: + return self._attr_name + return self._rendered.get(CONF_NAME) + async def async_added_to_hass(self) -> None: """Call when entity about to be added to hass.""" await super().async_added_to_hass() From c9ba267fecd56ac443574cf8192b2c6cfdb59938 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 1 Jan 2025 20:03:17 -0600 Subject: [PATCH 1162/1198] Bump intents to 2025.1.1 (#134424) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 1 + 6 files changed, 6 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 4017ed82be1..979ea7538c4 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.1.0", "home-assistant-intents==2024.12.20"] + "requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c97dbe11d29..8f51b47ba30 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -36,7 +36,7 @@ hass-nabucasa==0.87.0 hassil==2.1.0 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241231.0 -home-assistant-intents==2024.12.20 +home-assistant-intents==2025.1.1 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.5 diff --git a/requirements_all.txt b/requirements_all.txt index 22c4a7a55e3..fb137a1d1ad 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1137,7 +1137,7 @@ holidays==0.63 home-assistant-frontend==20241231.0 # homeassistant.components.conversation -home-assistant-intents==2024.12.20 +home-assistant-intents==2025.1.1 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7130ac0e6f6..dee17173304 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -966,7 +966,7 @@ holidays==0.63 home-assistant-frontend==20241231.0 # homeassistant.components.conversation -home-assistant-intents==2024.12.20 +home-assistant-intents==2025.1.1 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 52948484ed8..962ab58d981 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2025.1.1 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index ce3247fbbad..0de575790db 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -39,6 +39,7 @@ 'mn', 'ms', 'nb', + 'ne', 'nl', 'pl', 'pt', From ca6bae6b158f0da7b68e71f1ed48f70ea235fcea Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Thu, 2 Jan 2025 08:43:38 +0100 Subject: [PATCH 1163/1198] Bump ZHA to 0.0.44 (#134427) --- homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/strings.json | 9 +++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index e396c8776e7..45d8f6bb25f 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.43"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.44"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 8e4d3f78eb4..da76c62e82e 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -879,6 +879,12 @@ }, "regulator_set_point": { "name": "Regulator set point" + }, + "detection_delay": { + "name": "Detection delay" + }, + "fading_time": { + "name": "Fading time" } }, "select": { @@ -1237,6 +1243,9 @@ }, "local_temperature_floor": { "name": "Floor temperature" + }, + "self_test": { + "name": "Self test result" } }, "switch": { diff --git a/requirements_all.txt b/requirements_all.txt index fb137a1d1ad..9d8a43694d8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3100,7 +3100,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.43 +zha==0.0.44 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dee17173304..d5076f45aa0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2489,7 +2489,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.43 +zha==0.0.44 # homeassistant.components.zwave_js zwave-js-server-python==0.60.0 From 8ace126d9f602ba59311153e25d60a919f8fafc3 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 2 Jan 2025 17:52:50 +0100 Subject: [PATCH 1164/1198] Improve hassio backup create and restore parameter checks (#134434) --- homeassistant/components/hassio/backup.py | 17 +++- tests/components/hassio/test_backup.py | 98 ++++++++++++++++++++--- 2 files changed, 103 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index e915e56622b..0abb0e0d953 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -218,6 +218,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): password: str | None, ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: """Create a backup.""" + if not include_homeassistant and include_database: + raise HomeAssistantError( + "Cannot create a backup with database but without Home Assistant" + ) manager = self._hass.data[DATA_MANAGER] include_addons_set: supervisor_backups.AddonSet | set[str] | None = None @@ -380,8 +384,16 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): restore_homeassistant: bool, ) -> None: """Restore a backup.""" - if restore_homeassistant and not restore_database: - raise HomeAssistantError("Cannot restore Home Assistant without database") + manager = self._hass.data[DATA_MANAGER] + # The backup manager has already checked that the backup exists so we don't need to + # check that here. + backup = await manager.backup_agents[agent_id].async_get_backup(backup_id) + if ( + backup + and restore_homeassistant + and restore_database != backup.database_included + ): + raise HomeAssistantError("Restore database must match backup") if not restore_homeassistant and restore_database: raise HomeAssistantError("Cannot restore database without Home Assistant") restore_addons_set = set(restore_addons) if restore_addons else None @@ -391,7 +403,6 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): else None ) - manager = self._hass.data[DATA_MANAGER] restore_location: str | None if manager.backup_agents[agent_id].domain != DOMAIN: # Download the backup to the supervisor. Supervisor will clean up the backup diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 5657193fc49..10a804d983f 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -176,6 +176,51 @@ TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete( ) +TEST_BACKUP_4 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP.compressed, + date=TEST_BACKUP.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=True, + homeassistant="2024.12.0", + location=TEST_BACKUP.location, + locations=TEST_BACKUP.locations, + name=TEST_BACKUP.name, + protected=TEST_BACKUP.protected, + repositories=[], + size=TEST_BACKUP.size, + size_bytes=TEST_BACKUP.size_bytes, + slug=TEST_BACKUP.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP.type, +) + + @pytest.fixture(autouse=True) def fixture_supervisor_environ() -> Generator[None]: """Mock os environ for supervisor.""" @@ -662,8 +707,17 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share"}), ), ( - {"include_folders": ["media"], "include_homeassistant": False}, - replace(DEFAULT_BACKUP_OPTIONS, folders={"media"}, homeassistant=False), + { + "include_folders": ["media"], + "include_database": False, + "include_homeassistant": False, + }, + replace( + DEFAULT_BACKUP_OPTIONS, + folders={"media"}, + homeassistant=False, + homeassistant_exclude_database=True, + ), ), ], ) @@ -1100,9 +1154,22 @@ async def test_reader_writer_create_remote_backup( @pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( - ("extra_generate_options"), + ("extra_generate_options", "expected_error"), [ - {"include_homeassistant": False}, + ( + {"include_homeassistant": False}, + { + "code": "home_assistant_error", + "message": "Cannot create a backup with database but without Home Assistant", + }, + ), + ( + {"include_homeassistant": False, "include_database": False}, + { + "code": "unknown_error", + "message": "Unknown error", + }, + ), ], ) async def test_reader_writer_create_wrong_parameters( @@ -1110,6 +1177,7 @@ async def test_reader_writer_create_wrong_parameters( hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, extra_generate_options: dict[str, Any], + expected_error: dict[str, str], ) -> None: """Test generating a backup.""" client = await hass_ws_client(hass) @@ -1147,7 +1215,7 @@ async def test_reader_writer_create_wrong_parameters( response = await client.receive_json() assert not response["success"] - assert response["error"] == {"code": "unknown_error", "message": "Unknown error"} + assert response["error"] == expected_error supervisor_client.backups.partial_backup.assert_not_called() @@ -1356,16 +1424,26 @@ async def test_reader_writer_restore_error( @pytest.mark.parametrize( - ("parameters", "expected_error"), + ("backup", "backup_details", "parameters", "expected_error"), [ ( + TEST_BACKUP, + TEST_BACKUP_DETAILS, {"restore_database": False}, - "Cannot restore Home Assistant without database", + "Restore database must match backup", ), ( + TEST_BACKUP, + TEST_BACKUP_DETAILS, {"restore_homeassistant": False}, "Cannot restore database without Home Assistant", ), + ( + TEST_BACKUP_4, + TEST_BACKUP_DETAILS_4, + {"restore_homeassistant": True, "restore_database": True}, + "Restore database must match backup", + ), ], ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -1373,13 +1451,15 @@ async def test_reader_writer_restore_wrong_parameters( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, + backup: supervisor_backups.Backup, + backup_details: supervisor_backups.BackupComplete, parameters: dict[str, Any], expected_error: str, ) -> None: """Test trigger restore.""" client = await hass_ws_client(hass) - supervisor_client.backups.list.return_value = [TEST_BACKUP] - supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.list.return_value = [backup] + supervisor_client.backups.backup_info.return_value = backup_details default_parameters = { "type": "backup/restore", From e89a1da46283ca9fc7f09384a8f7fc00688d52e0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 2 Jan 2025 12:40:10 +0100 Subject: [PATCH 1165/1198] Export IncorrectPasswordError from backup integration (#134436) --- homeassistant/components/backup/__init__.py | 2 ++ homeassistant/components/hassio/backup.py | 4 +--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index 7d9979ce9a2..00b226a9fee 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -24,6 +24,7 @@ from .manager import ( BackupReaderWriterError, CoreBackupReaderWriter, CreateBackupEvent, + IncorrectPasswordError, ManagerBackup, NewBackup, WrittenBackup, @@ -43,6 +44,7 @@ __all__ = [ "BackupReaderWriterError", "CreateBackupEvent", "Folder", + "IncorrectPasswordError", "LocalBackupAgent", "NewBackup", "WrittenBackup", diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 0abb0e0d953..537588e856a 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -27,12 +27,10 @@ from homeassistant.components.backup import ( BackupReaderWriterError, CreateBackupEvent, Folder, + IncorrectPasswordError, NewBackup, WrittenBackup, ) - -# pylint: disable-next=hass-component-root-import -from homeassistant.components.backup.manager import IncorrectPasswordError from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect From faf9c2ee401cd15e4fabfd0e088356bbfff5de54 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 2 Jan 2025 13:29:46 +0100 Subject: [PATCH 1166/1198] Adjust language in backup integration (#134440) * Adjust language in backup integration * Update tests --- homeassistant/components/backup/manager.py | 2 +- homeassistant/components/backup/strings.json | 4 ++-- tests/components/backup/snapshots/test_websocket.ambr | 6 +++--- tests/components/backup/test_manager.py | 6 +++--- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 8421448f619..33405d97883 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -752,7 +752,7 @@ class BackupManager: backup_name = ( name - or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}" + or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}" ) try: diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index d9de2bff861..43ae57cc781 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -5,8 +5,8 @@ "description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." }, "automatic_backup_failed_upload_agents": { - "title": "Automatic backup could not be uploaded to agents", - "description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." + "title": "Automatic backup could not be uploaded to the configured locations", + "description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." } }, "services": { diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 16640a95ddb..98b2f764d43 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -2574,7 +2574,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': 'fceef4e6', + 'backup_job_id': '64331d85', }), 'success': True, 'type': 'result', @@ -2645,7 +2645,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': 'fceef4e6', + 'backup_job_id': '64331d85', }), 'success': True, 'type': 'result', @@ -2716,7 +2716,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': 'fceef4e6', + 'backup_job_id': '64331d85', }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 4b5f43edb82..0797eef2274 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -112,7 +112,7 @@ async def test_async_create_backup( assert create_backup.called assert create_backup.call_args == call( agent_ids=["backup.local"], - backup_name="Custom 2025.1.0", + backup_name="Custom backup 2025.1.0", extra_metadata={ "instance_id": hass.data["core.uuid"], "with_automatic_settings": False, @@ -248,7 +248,7 @@ async def test_async_initiate_backup( ws_client = await hass_ws_client(hass) include_database = params.get("include_database", True) - name = params.get("name", "Custom 2025.1.0") + name = params.get("name", "Custom backup 2025.1.0") password = params.get("password") path_glob.return_value = [] @@ -584,7 +584,7 @@ async def test_async_initiate_backup_with_agent_error( "folders": [], "homeassistant_included": True, "homeassistant_version": "2025.1.0", - "name": "Custom 2025.1.0", + "name": "Custom backup 2025.1.0", "protected": False, "size": 123, "with_automatic_settings": False, From 21aca3c14643c245bad817c5484f8c2f1b631f7f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 2 Jan 2025 12:49:03 +0100 Subject: [PATCH 1167/1198] Initialize AppleTVConfigFlow.identifiers (#134443) --- homeassistant/components/apple_tv/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/apple_tv/config_flow.py b/homeassistant/components/apple_tv/config_flow.py index b0741cc9c61..5cb92ed892a 100644 --- a/homeassistant/components/apple_tv/config_flow.py +++ b/homeassistant/components/apple_tv/config_flow.py @@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 scan_filter: str | None = None - all_identifiers: set[str] atv: BaseConfig | None = None atv_identifiers: list[str] | None = None _host: str # host in zeroconf discovery info, should not be accessed by other flows @@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new AppleTVConfigFlow.""" self.credentials: dict[int, str | None] = {} # Protocol -> credentials + self.all_identifiers: set[str] = set() @property def device_identifier(self) -> str | None: From 0a13516ddd380862fc9912d5b492289f426ad43c Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Thu, 2 Jan 2025 15:33:22 +0100 Subject: [PATCH 1168/1198] Bump aioacaia to 0.1.12 (#134454) --- homeassistant/components/acaia/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index 36551e9c695..fef8c1219a8 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -26,5 +26,5 @@ "iot_class": "local_push", "loggers": ["aioacaia"], "quality_scale": "platinum", - "requirements": ["aioacaia==0.1.11"] + "requirements": ["aioacaia==0.1.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9d8a43694d8..b1f9b9555d6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.11 +aioacaia==0.1.12 # homeassistant.components.airq aioairq==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d5076f45aa0..80b3772500b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.11 +aioacaia==0.1.12 # homeassistant.components.airq aioairq==0.4.3 From d75d970fc7bfda78e86d2d757ee62e6284ad0177 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 2 Jan 2025 17:17:57 +0100 Subject: [PATCH 1169/1198] Update frontend to 20250102.0 (#134462) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index d1bb15b5d3b..33d1be3aad7 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241231.0"] + "requirements": ["home-assistant-frontend==20250102.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 8f51b47ba30..d8372ab6bc1 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.1.0 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241231.0 +home-assistant-frontend==20250102.0 home-assistant-intents==2025.1.1 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index b1f9b9555d6..864a980e54c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241231.0 +home-assistant-frontend==20250102.0 # homeassistant.components.conversation home-assistant-intents==2025.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 80b3772500b..252db100182 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20241231.0 +home-assistant-frontend==20250102.0 # homeassistant.components.conversation home-assistant-intents==2025.1.1 From 59f866bcf7a222684b6c1cb4720f58ed8690f773 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 2 Jan 2025 17:21:58 +0000 Subject: [PATCH 1170/1198] Bump version to 2025.1.0b6 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index d44095629f0..3bf985cfea3 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b5" +PATCH_VERSION: Final = "0b6" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index a461427b070..cc2991c3837 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b5" +version = "2025.1.0b6" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 67ec71031d4cf1b349e9bd46c6ca571603142296 Mon Sep 17 00:00:00 2001 From: Andrea Arcangeli Date: Thu, 2 Jan 2025 18:37:36 +0000 Subject: [PATCH 1171/1198] open_meteo: correct UTC timezone handling in hourly forecast (#129664) Co-authored-by: G Johansson --- .../components/open_meteo/weather.py | 13 +- .../open_meteo/snapshots/test_weather.ambr | 1070 +++++++++++++++++ tests/components/open_meteo/test_weather.py | 46 + 3 files changed, 1125 insertions(+), 4 deletions(-) create mode 100644 tests/components/open_meteo/snapshots/test_weather.ambr create mode 100644 tests/components/open_meteo/test_weather.py diff --git a/homeassistant/components/open_meteo/weather.py b/homeassistant/components/open_meteo/weather.py index a2be81f0928..1faa66c56de 100644 --- a/homeassistant/components/open_meteo/weather.py +++ b/homeassistant/components/open_meteo/weather.py @@ -2,6 +2,8 @@ from __future__ import annotations +from datetime import datetime, time + from open_meteo import Forecast as OpenMeteoForecast from homeassistant.components.weather import ( @@ -107,8 +109,9 @@ class OpenMeteoWeatherEntity( daily = self.coordinator.data.daily for index, date in enumerate(self.coordinator.data.daily.time): + _datetime = datetime.combine(date=date, time=time(0), tzinfo=dt_util.UTC) forecast = Forecast( - datetime=date.isoformat(), + datetime=_datetime.isoformat(), ) if daily.weathercode is not None: @@ -155,12 +158,14 @@ class OpenMeteoWeatherEntity( today = dt_util.utcnow() hourly = self.coordinator.data.hourly - for index, datetime in enumerate(self.coordinator.data.hourly.time): - if dt_util.as_utc(datetime) < today: + for index, _datetime in enumerate(self.coordinator.data.hourly.time): + if _datetime.tzinfo is None: + _datetime = _datetime.replace(tzinfo=dt_util.UTC) + if _datetime < today: continue forecast = Forecast( - datetime=datetime.isoformat(), + datetime=_datetime.isoformat(), ) if hourly.weather_code is not None: diff --git a/tests/components/open_meteo/snapshots/test_weather.ambr b/tests/components/open_meteo/snapshots/test_weather.ambr new file mode 100644 index 00000000000..dd5beb56d77 --- /dev/null +++ b/tests/components/open_meteo/snapshots/test_weather.ambr @@ -0,0 +1,1070 @@ +# serializer version: 1 +# name: test_forecast_service[forecast_daily] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-24T00:00:00+00:00', + 'precipitation': 0.19, + 'temperature': 7.6, + 'templow': 5.5, + 'wind_bearing': 251, + 'wind_speed': 10.9, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-25T00:00:00+00:00', + 'precipitation': 0.29, + 'temperature': 5.4, + 'templow': 0.2, + 'wind_bearing': 210, + 'wind_speed': 12.9, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-26T00:00:00+00:00', + 'precipitation': 0.76, + 'temperature': 4.8, + 'templow': 1.8, + 'wind_bearing': 230, + 'wind_speed': 14.8, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-27T00:00:00+00:00', + 'precipitation': 0.12, + 'temperature': 4.5, + 'templow': -0.1, + 'wind_bearing': 143, + 'wind_speed': 10.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-28T00:00:00+00:00', + 'precipitation': 0.15, + 'temperature': 3.4, + 'templow': -0.2, + 'wind_bearing': 143, + 'wind_speed': 7.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-11-29T00:00:00+00:00', + 'precipitation': 0.64, + 'temperature': 2.2, + 'templow': -0.5, + 'wind_bearing': 248, + 'wind_speed': 13.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T00:00:00+00:00', + 'precipitation': 1.74, + 'temperature': 3.0, + 'templow': -0.3, + 'wind_bearing': 256, + 'wind_speed': 16.1, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast_hourly] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-24T04:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 6.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T05:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 6.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T12:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 7.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T13:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 7.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T14:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 7.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-24T15:00:00+00:00', + 'precipitation': 0.06, + 'temperature': 7.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-24T16:00:00+00:00', + 'precipitation': 0.06, + 'temperature': 7.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T17:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T18:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T19:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T20:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T21:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-24T23:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T00:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T01:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T02:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 5.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.9, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-25T04:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.2, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-25T05:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.2, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-25T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.2, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-25T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.5, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-25T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T12:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T13:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T14:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 4.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-25T15:00:00+00:00', + 'precipitation': 0.07, + 'temperature': 4.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T16:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T17:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T18:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.9, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-25T19:00:00+00:00', + 'precipitation': 0.09, + 'temperature': 3.9, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-25T20:00:00+00:00', + 'precipitation': 0.09, + 'temperature': 4.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T21:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 3.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-25T23:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T00:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T01:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T02:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T04:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T05:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T12:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T13:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T14:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T15:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 4.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-26T16:00:00+00:00', + 'precipitation': 0.1, + 'temperature': 4.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-26T17:00:00+00:00', + 'precipitation': 0.3, + 'temperature': 3.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-26T18:00:00+00:00', + 'precipitation': 0.2, + 'temperature': 3.3, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-26T19:00:00+00:00', + 'precipitation': 0.15, + 'temperature': 3.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T20:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T21:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-26T23:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T00:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 1.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T01:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T02:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T04:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T05:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.2, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-27T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.6, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-27T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-27T12:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.9, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-27T13:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-27T14:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.5, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-27T15:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 4.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T16:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-27T17:00:00+00:00', + 'precipitation': 0.1, + 'temperature': 3.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T18:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 2.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T19:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T20:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T21:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-27T23:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-28T00:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.6, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-28T01:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T02:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T04:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T05:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T12:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T13:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T14:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T15:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T16:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 3.2, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-28T17:00:00+00:00', + 'precipitation': 0.05, + 'temperature': 3.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-28T18:00:00+00:00', + 'precipitation': 0.05, + 'temperature': 2.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-28T19:00:00+00:00', + 'precipitation': 0.05, + 'temperature': 2.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T20:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T21:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.5, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-28T23:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T00:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T01:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T02:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T03:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T04:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T05:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T06:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T07:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-11-29T08:00:00+00:00', + 'precipitation': 0.01, + 'temperature': -0.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-11-29T09:00:00+00:00', + 'precipitation': 0.01, + 'temperature': -0.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-11-29T10:00:00+00:00', + 'precipitation': 0.01, + 'temperature': -0.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T11:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 0.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T12:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 1.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T13:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 2.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T14:00:00+00:00', + 'precipitation': 0.02, + 'temperature': 2.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T15:00:00+00:00', + 'precipitation': 0.02, + 'temperature': 2.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T16:00:00+00:00', + 'precipitation': 0.02, + 'temperature': 1.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-29T17:00:00+00:00', + 'precipitation': 0.13, + 'temperature': 1.4, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-29T18:00:00+00:00', + 'precipitation': 0.13, + 'temperature': 1.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-29T19:00:00+00:00', + 'precipitation': 0.13, + 'temperature': 0.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T20:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T21:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T22:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-29T23:00:00+00:00', + 'precipitation': 0.07, + 'temperature': 1.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T00:00:00+00:00', + 'precipitation': 0.07, + 'temperature': 1.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T01:00:00+00:00', + 'precipitation': 0.07, + 'temperature': 1.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T02:00:00+00:00', + 'precipitation': 0.16, + 'temperature': 1.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T03:00:00+00:00', + 'precipitation': 0.16, + 'temperature': 1.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T04:00:00+00:00', + 'precipitation': 0.16, + 'temperature': 1.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T05:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 1.1, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T06:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 0.8, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T07:00:00+00:00', + 'precipitation': 0.01, + 'temperature': 0.5, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-30T08:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.1, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-30T09:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.2, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-11-30T10:00:00+00:00', + 'precipitation': 0.0, + 'temperature': -0.3, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-30T11:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 0.2, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-30T12:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-11-30T13:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 1.9, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T14:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T15:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.4, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-11-30T16:00:00+00:00', + 'precipitation': 0.0, + 'temperature': 2.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T17:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 2.6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T18:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 2.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T19:00:00+00:00', + 'precipitation': 0.03, + 'temperature': 2.4, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T20:00:00+00:00', + 'precipitation': 0.04, + 'temperature': 2.5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T21:00:00+00:00', + 'precipitation': 0.04, + 'temperature': 2.8, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T22:00:00+00:00', + 'precipitation': 0.04, + 'temperature': 3.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-11-30T23:00:00+00:00', + 'precipitation': 0.88, + 'temperature': 3.0, + }), + ]), + }), + }) +# --- diff --git a/tests/components/open_meteo/test_weather.py b/tests/components/open_meteo/test_weather.py new file mode 100644 index 00000000000..b43385c924a --- /dev/null +++ b/tests/components/open_meteo/test_weather.py @@ -0,0 +1,46 @@ +"""Test for the open meteo weather entity.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.weather import ( + DOMAIN as WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2021-11-24T03:00:00+00:00") +async def test_forecast_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_open_meteo: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test forecast service.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + {ATTR_ENTITY_ID: "weather.home", "type": "daily"}, + blocking=True, + return_response=True, + ) + assert response == snapshot(name="forecast_daily") + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + {ATTR_ENTITY_ID: "weather.home", "type": "hourly"}, + blocking=True, + return_response=True, + ) + assert response == snapshot(name="forecast_hourly") From 61ac8e7e8cc606061932aea2a12560deb66f17fb Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Thu, 2 Jan 2025 19:34:51 +0100 Subject: [PATCH 1172/1198] Include host in Peblar EV-Charger discovery setup description (#133954) Co-authored-by: Franck Nijhof --- .../components/peblar/config_flow.py | 21 ++++++++++++++----- homeassistant/components/peblar/strings.json | 2 +- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index 29bf456b7ea..24248355f72 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -27,7 +27,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _host: str + _discovery_info: zeroconf.ZeroconfServiceInfo async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -137,8 +137,15 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(sn) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) - self._host = discovery_info.host - self.context.update({"configuration_url": f"http://{discovery_info.host}"}) + self._discovery_info = discovery_info + self.context.update( + { + "title_placeholders": { + "name": discovery_info.name.replace("._http._tcp.local.", "") + }, + "configuration_url": f"http://{discovery_info.host}", + }, + ) return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm( @@ -149,7 +156,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is not None: peblar = Peblar( - host=self._host, + host=self._discovery_info.host, session=async_create_clientsession( self.hass, cookie_jar=CookieJar(unsafe=True) ), @@ -165,7 +172,7 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title="Peblar", data={ - CONF_HOST: self._host, + CONF_HOST: self._discovery_info.host, CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) @@ -179,6 +186,10 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): ), } ), + description_placeholders={ + "hostname": self._discovery_info.name.replace("._http._tcp.local.", ""), + "host": self._discovery_info.host, + }, errors=errors, ) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index f6a228ca236..3fcd7a14664 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -51,7 +51,7 @@ "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." + "description": "Set up your Peblar EV charger {hostname}, on IP address {host}, to integrate with Home Assistant\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." } } }, From 995e2229597158f81ddc54796d4d426657603876 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 2 Jan 2025 18:56:23 +0100 Subject: [PATCH 1173/1198] Don't start recorder if a database from the future is used (#134467) --- homeassistant/components/recorder/core.py | 10 ++++++ tests/components/recorder/test_init.py | 40 +++++++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 61c64be105c..e027922e8c4 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -719,6 +719,16 @@ class Recorder(threading.Thread): if schema_status is None: # Give up if we could not validate the schema return + if schema_status.current_version > SCHEMA_VERSION: + _LOGGER.error( + "The database schema version %s is newer than %s which is the maximum " + "database schema version supported by the installed version of " + "Home Assistant Core, either upgrade Home Assistant Core or restore " + "the database from a backup compatible with this version", + schema_status.current_version, + SCHEMA_VERSION, + ) + return self.schema_version = schema_status.current_version if not schema_status.migration_needed and not schema_status.schema_errors: diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 7e5abf1b514..2e9e9a7c729 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -2615,6 +2615,46 @@ async def test_clean_shutdown_when_schema_migration_fails( assert instance.engine is None +async def test_setup_fails_after_downgrade( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test we fail to setup after a downgrade. + + Also test we shutdown cleanly. + """ + with ( + patch.object( + migration, + "_get_current_schema_version", + side_effect=[None, SCHEMA_VERSION + 1], + ), + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), + ): + if recorder.DOMAIN not in hass.data: + recorder_helper.async_initialize_recorder(hass) + assert not await async_setup_component( + hass, + recorder.DOMAIN, + { + recorder.DOMAIN: { + CONF_DB_URL: "sqlite://", + CONF_DB_RETRY_WAIT: 0, + CONF_DB_MAX_RETRIES: 1, + } + }, + ) + await hass.async_block_till_done() + + instance = recorder.get_instance(hass) + await hass.async_stop() + assert instance.engine is None + assert ( + f"The database schema version {SCHEMA_VERSION+1} is newer than {SCHEMA_VERSION}" + " which is the maximum database schema version supported by the installed " + "version of Home Assistant Core" + ) in caplog.text + + async def test_events_are_recorded_until_final_write( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, From 5ac4d5bef7e0bd42952e316aefebacc63ec6c67d Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 2 Jan 2025 18:54:27 +0100 Subject: [PATCH 1174/1198] Bump deebot-client to 10.1.0 (#134470) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 3a2d4e7704b..67d18c4784c 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 864a980e54c..ef819c7b25c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.11 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==10.0.1 +deebot-client==10.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 252db100182..8cce8edcee9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.11 # homeassistant.components.ecovacs -deebot-client==10.0.1 +deebot-client==10.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 9b906e94c7eb8973a2c13d471e37d241002f0bad Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 2 Jan 2025 21:17:29 +0100 Subject: [PATCH 1175/1198] Fix a few small typos in peblar (#134481) --- homeassistant/components/peblar/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 3fcd7a14664..fffa2b08d85 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -20,7 +20,7 @@ "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar EV charger' web interface." + "description": "Reauthenticate with your Peblar EV charger.\n\nTo do so, you will need to enter your new password you use to log in to the Peblar EV charger's web interface." }, "reconfigure": { "data": { @@ -31,7 +31,7 @@ "host": "[%key:component::peblar::config::step::user::data_description::host%]", "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log into its web interface." + "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar EV charger and the password you use to log in to its web interface." }, "user": { "data": { @@ -40,9 +40,9 @@ }, "data_description": { "host": "The hostname or IP address of your Peblar EV charger on your home network.", - "password": "The same password as you use to log in to the Peblar EV charger' local web interface." + "password": "The same password as you use to log in to the Peblar EV charger's local web interface." }, - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log into its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar EV charger and the password you use to log in to its web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." }, "zeroconf_confirm": { "data": { @@ -51,7 +51,7 @@ "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" }, - "description": "Set up your Peblar EV charger {hostname}, on IP address {host}, to integrate with Home Assistant\n\nTo do so, you will need the password you use to log into the Peblar EV charger' web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." + "description": "Set up your Peblar EV charger {hostname}, on IP address {host}, to integrate with Home Assistant\n\nTo do so, you will need the password you use to log in to the Peblar EV charger's web interface.\n\nHome Assistant will automatically configure your Peblar EV charger for use with Home Assistant." } } }, From 7fa1983da051ff4808e3ae1dfba2ab0912b17514 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 2 Jan 2025 21:41:54 +0100 Subject: [PATCH 1176/1198] Update peblar to 0.3.1 (#134486) --- homeassistant/components/peblar/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index ab5572e66d0..76e228351e5 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["peblar==0.3.0"], + "requirements": ["peblar==0.3.1"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/requirements_all.txt b/requirements_all.txt index ef819c7b25c..8a5b3d85546 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1603,7 +1603,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.0 +peblar==0.3.1 # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8cce8edcee9..7c5f2e9306e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1330,7 +1330,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.0 +peblar==0.3.1 # homeassistant.components.peco peco==0.0.30 From 47190e4ac16af6ddb2e2c99e21917014bd1001e7 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 2 Jan 2025 22:23:54 +0000 Subject: [PATCH 1177/1198] Bump version to 2025.1.0b7 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 3bf985cfea3..a09482f3bd2 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b6" +PATCH_VERSION: Final = "0b7" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index cc2991c3837..8f6b72462ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b6" +version = "2025.1.0b7" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From f364e2914814a806a4af2165bba1973107899514 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 2 Jan 2025 23:45:00 +0100 Subject: [PATCH 1178/1198] Fix input_datetime.set_datetime not accepting 0 timestamp value (#134489) --- .../components/input_datetime/__init__.py | 2 +- tests/components/input_datetime/test_init.py | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/input_datetime/__init__.py b/homeassistant/components/input_datetime/__init__.py index dcc2865acad..428ffccb7c1 100644 --- a/homeassistant/components/input_datetime/__init__.py +++ b/homeassistant/components/input_datetime/__init__.py @@ -385,7 +385,7 @@ class InputDatetime(collection.CollectionEntity, RestoreEntity): @callback def async_set_datetime(self, date=None, time=None, datetime=None, timestamp=None): """Set a new date / time.""" - if timestamp: + if timestamp is not None: datetime = dt_util.as_local(dt_util.utc_from_timestamp(timestamp)) if datetime: diff --git a/tests/components/input_datetime/test_init.py b/tests/components/input_datetime/test_init.py index 411f084d39a..7d491f0cdcd 100644 --- a/tests/components/input_datetime/test_init.py +++ b/tests/components/input_datetime/test_init.py @@ -217,6 +217,34 @@ async def test_set_datetime_3(hass: HomeAssistant) -> None: assert state.attributes["timestamp"] == dt_obj.timestamp() +async def test_set_datetime_4(hass: HomeAssistant) -> None: + """Test set_datetime method using timestamp 0.""" + await async_setup_component( + hass, DOMAIN, {DOMAIN: {"test_datetime": {"has_time": True, "has_date": True}}} + ) + + entity_id = "input_datetime.test_datetime" + + dt_obj = datetime.datetime( + 1969, 12, 31, 16, 00, 00, tzinfo=dt_util.get_time_zone(hass.config.time_zone) + ) + + await async_set_timestamp(hass, entity_id, 0) + + state = hass.states.get(entity_id) + assert state.state == dt_obj.strftime(FORMAT_DATETIME) + assert state.attributes["has_time"] + assert state.attributes["has_date"] + + assert state.attributes["year"] == 1969 + assert state.attributes["month"] == 12 + assert state.attributes["day"] == 31 + assert state.attributes["hour"] == 16 + assert state.attributes["minute"] == 00 + assert state.attributes["second"] == 0 + assert state.attributes["timestamp"] == 0 + + async def test_set_datetime_time(hass: HomeAssistant) -> None: """Test set_datetime method with only time.""" await async_setup_component( From 59a3fe857b34e7e373137fcc0cbc34c3cc262534 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Thu, 2 Jan 2025 23:28:29 +0100 Subject: [PATCH 1179/1198] Bump aioacaia to 0.1.13 (#134496) --- homeassistant/components/acaia/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index fef8c1219a8..681f3f08555 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -26,5 +26,5 @@ "iot_class": "local_push", "loggers": ["aioacaia"], "quality_scale": "platinum", - "requirements": ["aioacaia==0.1.12"] + "requirements": ["aioacaia==0.1.13"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8a5b3d85546..166e5426553 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.12 +aioacaia==0.1.13 # homeassistant.components.airq aioairq==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7c5f2e9306e..2e3a5348473 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.12 +aioacaia==0.1.13 # homeassistant.components.airq aioairq==0.4.3 From e1f647562312c0a53c36bf4f2f7d2887d87b0260 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 3 Jan 2025 00:21:19 -0500 Subject: [PATCH 1180/1198] Fix backup dir not existing (#134506) --- homeassistant/components/backup/manager.py | 1 + tests/components/backup/test_manager.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 33405d97883..4d509003a21 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -1294,6 +1294,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): if self._local_agent_id in agent_ids: local_agent = manager.local_backup_agents[self._local_agent_id] tar_file_path = local_agent.get_backup_path(backup.backup_id) + await async_add_executor_job(make_backup_dir, tar_file_path.parent) await async_add_executor_job(shutil.move, temp_file, tar_file_path) else: tar_file_path = temp_file diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 0797eef2274..ad90e2e23bf 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -1397,6 +1397,9 @@ async def test_receive_backup( with ( patch("pathlib.Path.open", open_mock), + patch( + "homeassistant.components.backup.manager.make_backup_dir" + ) as make_backup_dir_mock, patch("shutil.move") as move_mock, patch( "homeassistant.components.backup.manager.read_backup", @@ -1412,6 +1415,7 @@ async def test_receive_backup( assert resp.status == 201 assert open_mock.call_count == open_call_count + assert make_backup_dir_mock.call_count == move_call_count + 1 assert move_mock.call_count == move_call_count for index, name in enumerate(move_path_names): assert move_mock.call_args_list[index].args[1].name == name From 1b67d51e24e619cf64f15342244c5d681e684147 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 10:01:35 +0100 Subject: [PATCH 1181/1198] Add error prints for recorder fatal errors (#134517) --- homeassistant/components/recorder/core.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index e027922e8c4..fee72ce273f 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -712,12 +712,14 @@ class Recorder(threading.Thread): setup_result = self._setup_recorder() if not setup_result: + _LOGGER.error("Recorder setup failed, recorder shutting down") # Give up if we could not connect return schema_status = migration.validate_db_schema(self.hass, self, self.get_session) if schema_status is None: # Give up if we could not validate the schema + _LOGGER.error("Failed to validate schema, recorder shutting down") return if schema_status.current_version > SCHEMA_VERSION: _LOGGER.error( From a830a1434238971f41f1140919bc08525720d012 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 10:05:07 +0100 Subject: [PATCH 1182/1198] Improve recorder schema migration error test (#134518) --- tests/components/recorder/test_init.py | 30 +++++++++++++++----------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 2e9e9a7c729..74d8861ae1e 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -9,7 +9,7 @@ import sqlite3 import sys import threading from typing import Any, cast -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch from freezegun.api import FrozenDateTimeFactory import pytest @@ -2575,23 +2575,25 @@ async def test_clean_shutdown_when_recorder_thread_raises_during_validate_db_sch @pytest.mark.parametrize( ("func_to_patch", "expected_setup_result"), - [("migrate_schema_non_live", False), ("migrate_schema_live", False)], + [ + ("migrate_schema_non_live", False), + ("migrate_schema_live", True), + ], ) async def test_clean_shutdown_when_schema_migration_fails( - hass: HomeAssistant, func_to_patch: str, expected_setup_result: bool + hass: HomeAssistant, + func_to_patch: str, + expected_setup_result: bool, + caplog: pytest.LogCaptureFixture, ) -> None: """Test we still shutdown cleanly when schema migration fails.""" with ( - patch.object( - migration, - "validate_db_schema", - return_value=MagicMock(valid=False, current_version=1), - ), + patch.object(migration, "_get_current_schema_version", side_effect=[None, 1]), patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch.object( migration, func_to_patch, - side_effect=Exception, + side_effect=Exception("Boom!"), ), ): if recorder.DOMAIN not in hass.data: @@ -2610,9 +2612,13 @@ async def test_clean_shutdown_when_schema_migration_fails( assert setup_result == expected_setup_result await hass.async_block_till_done() - instance = recorder.get_instance(hass) - await hass.async_stop() - assert instance.engine is None + instance = recorder.get_instance(hass) + await hass.async_stop() + assert instance.engine is None + + assert "Error during schema migration" in caplog.text + # Check the injected exception was logged + assert "Boom!" in caplog.text async def test_setup_fails_after_downgrade( From f719a1453777396e72b5f837df5cb1817bbb1044 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 3 Jan 2025 10:51:20 +0100 Subject: [PATCH 1183/1198] Handle deCONZ color temp 0 is never used when calculating kelvin CT (#134521) --- homeassistant/components/deconz/light.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index b1df32efc31..d82c05f14eb 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( @property def color_temp_kelvin(self) -> int | None: """Return the CT color value.""" - if self._device.color_temp is None: + if self._device.color_temp is None or self._device.color_temp == 0: return None return color_temperature_mired_to_kelvin(self._device.color_temp) From 316f93f2083ceee69f9f0965f899aa2df32cf8ec Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 10:29:29 +0100 Subject: [PATCH 1184/1198] Fix activating backup retention config on startup (#134523) --- homeassistant/components/backup/config.py | 6 + tests/components/backup/test_websocket.py | 319 ++++++++++++++++------ 2 files changed, 239 insertions(+), 86 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index d58c7365c8a..3c5d5d39f7e 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -124,6 +124,7 @@ class BackupConfig: def load(self, stored_config: StoredBackupConfig) -> None: """Load config.""" self.data = BackupConfigData.from_dict(stored_config) + self.data.retention.apply(self._manager) self.data.schedule.apply(self._manager) async def update( @@ -160,8 +161,13 @@ class RetentionConfig: def apply(self, manager: BackupManager) -> None: """Apply backup retention configuration.""" if self.days is not None: + LOGGER.debug( + "Scheduling next automatic delete of backups older than %s in 1 day", + self.days, + ) self._schedule_next(manager) else: + LOGGER.debug("Unscheduling next automatic delete") self._unschedule_next(manager) def to_dict(self) -> StoredRetentionConfig: diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index a3b29a55ad8..307a1d79e0c 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1173,7 +1173,7 @@ async def test_config_update_errors( @pytest.mark.parametrize( ( - "command", + "commands", "last_completed_automatic_backup", "time_1", "time_2", @@ -1186,11 +1186,8 @@ async def test_config_update_errors( ), [ ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "daily", - }, + # No config update + [], "2024-11-11T04:45:00+01:00", "2024-11-12T04:45:00+01:00", "2024-11-13T04:45:00+01:00", @@ -1202,11 +1199,32 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "mon", - }, + # Unchanged schedule + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + } + ], + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + } + ], "2024-11-11T04:45:00+01:00", "2024-11-18T04:45:00+01:00", "2024-11-25T04:45:00+01:00", @@ -1218,11 +1236,13 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "never", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + } + ], "2024-11-11T04:45:00+01:00", "2034-11-11T12:00:00+01:00", # ten years later and still no backups "2034-11-11T13:00:00+01:00", @@ -1234,11 +1254,13 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "daily", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + } + ], "2024-10-26T04:45:00+01:00", "2024-11-12T04:45:00+01:00", "2024-11-13T04:45:00+01:00", @@ -1250,11 +1272,13 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "mon", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + } + ], "2024-10-26T04:45:00+01:00", "2024-11-12T04:45:00+01:00", "2024-11-13T04:45:00+01:00", @@ -1266,11 +1290,13 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "never", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + } + ], "2024-10-26T04:45:00+01:00", "2034-11-11T12:00:00+01:00", # ten years later and still no backups "2034-11-12T12:00:00+01:00", @@ -1282,11 +1308,13 @@ async def test_config_update_errors( None, ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "daily", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + } + ], "2024-11-11T04:45:00+01:00", "2024-11-12T04:45:00+01:00", "2024-11-13T04:45:00+01:00", @@ -1298,11 +1326,13 @@ async def test_config_update_errors( [BackupReaderWriterError("Boom"), None], ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test.test-agent"]}, - "schedule": "daily", - }, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + } + ], "2024-11-11T04:45:00+01:00", "2024-11-12T04:45:00+01:00", "2024-11-13T04:45:00+01:00", @@ -1321,7 +1351,7 @@ async def test_config_schedule_logic( freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any], create_backup: AsyncMock, - command: dict[str, Any], + commands: list[dict[str, Any]], last_completed_automatic_backup: str, time_1: str, time_2: str, @@ -1338,7 +1368,7 @@ async def test_config_schedule_logic( "backups": {}, "config": { "create_backup": { - "agent_ids": ["test-agent"], + "agent_ids": ["test.test-agent"], "include_addons": ["test-addon"], "include_all_addons": False, "include_database": True, @@ -1364,10 +1394,10 @@ async def test_config_schedule_logic( await setup_backup_integration(hass, remote_agents=["test-agent"]) await hass.async_block_till_done() - await client.send_json_auto_id(command) - result = await client.receive_json() - - assert result["success"] + for command in commands: + await client.send_json_auto_id(command) + result = await client.receive_json() + assert result["success"] freezer.move_to(time_1) async_fire_time_changed(hass) @@ -2097,7 +2127,8 @@ async def test_config_retention_copies_logic_manual_backup( @pytest.mark.parametrize( ( - "command", + "stored_retained_days", + "commands", "backups", "get_backups_agent_errors", "delete_backup_agent_errors", @@ -2109,13 +2140,77 @@ async def test_config_retention_copies_logic_manual_backup( "delete_args_list", ), [ + # No config update - cleanup backups older than 2 days ( + 2, + [], { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 2}, - "schedule": "never", + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=False, + spec=ManagerBackup, + ), }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + # No config update - No cleanup + ( + None, + [], + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 0, + 0, + [], + ), + # Unchanged config + ( + 2, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", @@ -2143,12 +2238,51 @@ async def test_config_retention_copies_logic_manual_backup( [call("backup-1")], ), ( + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + } + ], { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 3}, - "schedule": "never", + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=False, + spec=ManagerBackup, + ), }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 3}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", @@ -2176,12 +2310,15 @@ async def test_config_retention_copies_logic_manual_backup( [], ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 2}, - "schedule": "never", - }, + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", @@ -2214,12 +2351,15 @@ async def test_config_retention_copies_logic_manual_backup( [call("backup-1"), call("backup-2")], ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 2}, - "schedule": "never", - }, + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", @@ -2247,12 +2387,15 @@ async def test_config_retention_copies_logic_manual_backup( [call("backup-1")], ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 2}, - "schedule": "never", - }, + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", @@ -2280,12 +2423,15 @@ async def test_config_retention_copies_logic_manual_backup( [call("backup-1")], ), ( - { - "type": "backup/config/update", - "create_backup": {"agent_ids": ["test-agent"]}, - "retention": {"copies": None, "days": 0}, - "schedule": "never", - }, + None, + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 0}, + "schedule": "never", + } + ], { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", @@ -2326,7 +2472,8 @@ async def test_config_retention_days_logic( hass_storage: dict[str, Any], delete_backup: AsyncMock, get_backups: AsyncMock, - command: dict[str, Any], + stored_retained_days: int | None, + commands: list[dict[str, Any]], backups: dict[str, Any], get_backups_agent_errors: dict[str, Exception], delete_backup_agent_errors: dict[str, Exception], @@ -2351,7 +2498,7 @@ async def test_config_retention_days_logic( "name": "test-name", "password": "test-password", }, - "retention": {"copies": None, "days": None}, + "retention": {"copies": None, "days": stored_retained_days}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": last_backup_time, "schedule": {"state": "never"}, @@ -2370,10 +2517,10 @@ async def test_config_retention_days_logic( await setup_backup_integration(hass) await hass.async_block_till_done() - await client.send_json_auto_id(command) - result = await client.receive_json() - - assert result["success"] + for command in commands: + await client.send_json_auto_id(command) + result = await client.receive_json() + assert result["success"] freezer.move_to(next_time) async_fire_time_changed(hass) From 96936f5f4a310119bdd265a5a1386133485c26a6 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 3 Jan 2025 10:37:39 +0100 Subject: [PATCH 1185/1198] Update peblar to v0.3.2 (#134524) --- homeassistant/components/peblar/manifest.json | 2 +- homeassistant/components/peblar/update.py | 6 ++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 76e228351e5..2c3e73ba76e 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["peblar==0.3.1"], + "requirements": ["peblar==0.3.2"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index 67ce30a89a6..29dfbfdcd47 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -27,8 +27,9 @@ PARALLEL_UPDATES = 1 class PeblarUpdateEntityDescription(UpdateEntityDescription): """Describe an Peblar update entity.""" - installed_fn: Callable[[PeblarVersionInformation], str | None] available_fn: Callable[[PeblarVersionInformation], str | None] + has_fn: Callable[[PeblarVersionInformation], bool] = lambda _: True + installed_fn: Callable[[PeblarVersionInformation], str | None] DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = ( @@ -41,8 +42,9 @@ DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = ( PeblarUpdateEntityDescription( key="customization", translation_key="customization", - installed_fn=lambda x: x.current.customization, available_fn=lambda x: x.available.customization, + has_fn=lambda x: x.current.customization is not None, + installed_fn=lambda x: x.current.customization, ), ) diff --git a/requirements_all.txt b/requirements_all.txt index 166e5426553..0363d3d2650 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1603,7 +1603,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.1 +peblar==0.3.2 # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2e3a5348473..ec70c179e15 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1330,7 +1330,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.1 +peblar==0.3.2 # homeassistant.components.peco peco==0.0.30 From ea82c1b73e0b5f13a276def20a0e7d998d7477e0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 3 Jan 2025 10:51:05 +0100 Subject: [PATCH 1186/1198] Only load Peblar customization update entity when present (#134526) --- homeassistant/components/peblar/update.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index 29dfbfdcd47..77879030f6c 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -62,6 +62,7 @@ async def async_setup_entry( description=description, ) for description in DESCRIPTIONS + if description.has_fn(entry.runtime_data.version_coordinator.data) ) From 1af384bc0adce8597e466dda11c117e81913421c Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 3 Jan 2025 09:56:51 +0000 Subject: [PATCH 1187/1198] Bump version to 2025.1.0b8 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index a09482f3bd2..5898c682d89 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b7" +PATCH_VERSION: Final = "0b8" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 8f6b72462ef..1d6fbc8cefe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b7" +version = "2025.1.0b8" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From c5746291cc23225825a58110c18570a69802d10a Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 3 Jan 2025 14:24:39 +0100 Subject: [PATCH 1188/1198] Add Reolink proxy for playback (#133916) --- homeassistant/components/reolink/__init__.py | 3 + .../components/reolink/manifest.json | 2 +- .../components/reolink/media_source.py | 31 +-- homeassistant/components/reolink/util.py | 13 + homeassistant/components/reolink/views.py | 147 +++++++++++ tests/components/reolink/test_views.py | 243 ++++++++++++++++++ 6 files changed, 418 insertions(+), 21 deletions(-) create mode 100644 homeassistant/components/reolink/views.py create mode 100644 tests/components/reolink/test_views.py diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 29dfb4ee57b..dd791bbaf1a 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -27,6 +27,7 @@ from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost from .services import async_setup_services from .util import ReolinkConfigEntry, ReolinkData, get_device_uid_and_ch +from .views import PlaybackProxyView _LOGGER = logging.getLogger(__name__) @@ -189,6 +190,8 @@ async def async_setup_entry( migrate_entity_ids(hass, config_entry.entry_id, host) + hass.http.register_view(PlaybackProxyView(hass)) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) config_entry.async_on_unload( diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7d01ca808e1..bb6b668368b 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -3,7 +3,7 @@ "name": "Reolink", "codeowners": ["@starkillerOG"], "config_flow": true, - "dependencies": ["webhook"], + "dependencies": ["http", "webhook"], "dhcp": [ { "hostname": "reolink*" diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 538a06a08f8..e912bfb5100 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -23,8 +23,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from .const import DOMAIN -from .host import ReolinkHost -from .util import ReolinkConfigEntry +from .util import get_host +from .views import async_generate_playback_proxy_url _LOGGER = logging.getLogger(__name__) @@ -47,15 +47,6 @@ def res_name(stream: str) -> str: return "Low res." -def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost: - """Return the Reolink host from the config entry id.""" - config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry( - config_entry_id - ) - assert config_entry is not None - return config_entry.runtime_data.host - - class ReolinkVODMediaSource(MediaSource): """Provide Reolink camera VODs as media sources.""" @@ -90,22 +81,22 @@ class ReolinkVODMediaSource(MediaSource): vod_type = get_vod_type() + if vod_type in [VodRequestType.DOWNLOAD, VodRequestType.PLAYBACK]: + proxy_url = async_generate_playback_proxy_url( + config_entry_id, channel, filename, stream_res, vod_type.value + ) + return PlayMedia(proxy_url, "video/mp4") + mime_type, url = await host.api.get_vod_source( channel, filename, stream_res, vod_type ) if _LOGGER.isEnabledFor(logging.DEBUG): - url_log = url - if "&user=" in url_log: - url_log = f"{url_log.split('&user=')[0]}&user=xxxxx&password=xxxxx" - elif "&token=" in url_log: - url_log = f"{url_log.split('&token=')[0]}&token=xxxxx" _LOGGER.debug( - "Opening VOD stream from %s: %s", host.api.camera_name(channel), url_log + "Opening VOD stream from %s: %s", + host.api.camera_name(channel), + host.api.hide_password(url), ) - if mime_type == "video/mp4": - return PlayMedia(url, mime_type) - stream = create_stream(self.hass, url, {}, DynamicStreamSettings()) stream.add_provider("hls", timeout=3600) stream_url: str = stream.endpoint_url("hls") diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index 1a6eab3f61d..f52cb08286c 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -22,6 +22,7 @@ from reolink_aio.exceptions import ( ) from homeassistant import config_entries +from homeassistant.components.media_source import Unresolvable from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr @@ -51,6 +52,18 @@ def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) ) +def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost: + """Return the Reolink host from the config entry id.""" + config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry( + config_entry_id + ) + if config_entry is None: + raise Unresolvable( + f"Could not find Reolink config entry id '{config_entry_id}'." + ) + return config_entry.runtime_data.host + + def get_device_uid_and_ch( device: dr.DeviceEntry, host: ReolinkHost ) -> tuple[list[str], int | None, bool]: diff --git a/homeassistant/components/reolink/views.py b/homeassistant/components/reolink/views.py new file mode 100644 index 00000000000..3b32ebaf74e --- /dev/null +++ b/homeassistant/components/reolink/views.py @@ -0,0 +1,147 @@ +"""Reolink Integration views.""" + +from __future__ import annotations + +from http import HTTPStatus +import logging +from urllib import parse + +from aiohttp import ClientError, ClientTimeout, web +from reolink_aio.enums import VodRequestType +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.http import HomeAssistantView +from homeassistant.components.media_source import Unresolvable +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.util.ssl import SSLCipherList + +from .util import get_host + +_LOGGER = logging.getLogger(__name__) + + +@callback +def async_generate_playback_proxy_url( + config_entry_id: str, channel: int, filename: str, stream_res: str, vod_type: str +) -> str: + """Generate proxy URL for event video.""" + + url_format = PlaybackProxyView.url + return url_format.format( + config_entry_id=config_entry_id, + channel=channel, + filename=parse.quote(filename, safe=""), + stream_res=stream_res, + vod_type=vod_type, + ) + + +class PlaybackProxyView(HomeAssistantView): + """View to proxy playback video from Reolink.""" + + requires_auth = True + url = "/api/reolink/video/{config_entry_id}/{channel}/{stream_res}/{vod_type}/{filename}" + name = "api:reolink_playback" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize a proxy view.""" + self.hass = hass + self.session = async_get_clientsession( + hass, + verify_ssl=False, + ssl_cipher=SSLCipherList.INSECURE, + ) + + async def get( + self, + request: web.Request, + config_entry_id: str, + channel: str, + stream_res: str, + vod_type: str, + filename: str, + retry: int = 2, + ) -> web.StreamResponse: + """Get playback proxy video response.""" + retry = retry - 1 + + filename = parse.unquote(filename) + ch = int(channel) + try: + host = get_host(self.hass, config_entry_id) + except Unresolvable: + err_str = f"Reolink playback proxy could not find config entry id: {config_entry_id}" + _LOGGER.warning(err_str) + return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST) + + try: + mime_type, reolink_url = await host.api.get_vod_source( + ch, filename, stream_res, VodRequestType(vod_type) + ) + except ReolinkError as err: + _LOGGER.warning("Reolink playback proxy error: %s", str(err)) + return web.Response(body=str(err), status=HTTPStatus.BAD_REQUEST) + + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "Opening VOD stream from %s: %s", + host.api.camera_name(ch), + host.api.hide_password(reolink_url), + ) + + try: + reolink_response = await self.session.get( + reolink_url, + timeout=ClientTimeout( + connect=15, sock_connect=15, sock_read=5, total=None + ), + ) + except ClientError as err: + err_str = host.api.hide_password( + f"Reolink playback error while getting mp4: {err!s}" + ) + if retry <= 0: + _LOGGER.warning(err_str) + return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST) + _LOGGER.debug("%s, renewing token", err_str) + await host.api.expire_session(unsubscribe=False) + return await self.get( + request, config_entry_id, channel, stream_res, vod_type, filename, retry + ) + + # Reolink typo "apolication/octet-stream" instead of "application/octet-stream" + if reolink_response.content_type not in [ + "video/mp4", + "application/octet-stream", + "apolication/octet-stream", + ]: + err_str = f"Reolink playback expected video/mp4 but got {reolink_response.content_type}" + _LOGGER.error(err_str) + return web.Response(body=err_str, status=HTTPStatus.BAD_REQUEST) + + response = web.StreamResponse( + status=200, + reason="OK", + headers={ + "Content-Type": "video/mp4", + }, + ) + + if reolink_response.content_length is not None: + response.content_length = reolink_response.content_length + + await response.prepare(request) + + try: + async for chunk in reolink_response.content.iter_chunked(65536): + await response.write(chunk) + except TimeoutError: + _LOGGER.debug( + "Timeout while reading Reolink playback from %s, writing EOF", + host.api.nvr_name, + ) + + reolink_response.release() + await response.write_eof() + return response diff --git a/tests/components/reolink/test_views.py b/tests/components/reolink/test_views.py new file mode 100644 index 00000000000..1eb184950bc --- /dev/null +++ b/tests/components/reolink/test_views.py @@ -0,0 +1,243 @@ +"""Tests for the Reolink views platform.""" + +from http import HTTPStatus +import logging +from typing import Any, cast +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +from aiohttp import ClientConnectionError, ClientResponse +import pytest +from reolink_aio.enums import VodRequestType +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.reolink.views import async_generate_playback_proxy_url +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + +TEST_YEAR = 2023 +TEST_MONTH = 11 +TEST_DAY = 14 +TEST_DAY2 = 15 +TEST_HOUR = 13 +TEST_MINUTE = 12 +TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4" +TEST_STREAM = "sub" +TEST_CHANNEL = "0" +TEST_VOD_TYPE = VodRequestType.PLAYBACK.value +TEST_MIME_TYPE_MP4 = "video/mp4" +TEST_URL = "http://test_url&token=test" +TEST_ERROR = "TestError" + + +def get_mock_session( + response: list[Any] | None = None, + content_length: int = 8, + content_type: str = TEST_MIME_TYPE_MP4, +) -> Mock: + """Get a mock session to mock the camera response.""" + if response is None: + response = [b"test", b"test", StopAsyncIteration()] + + content = Mock() + content.__anext__ = AsyncMock(side_effect=response) + content.__aiter__ = Mock(return_value=content) + + mock_response = Mock() + mock_response.content_length = content_length + mock_response.content_type = content_type + mock_response.content.iter_chunked = Mock(return_value=content) + + mock_session = Mock() + mock_session.get = AsyncMock(return_value=mock_response) + return mock_session + + +async def test_playback_proxy( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test successful playback proxy URL.""" + reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE_MP4, TEST_URL) + + mock_session = get_mock_session() + + with patch( + "homeassistant.components.reolink.views.async_get_clientsession", + return_value=mock_session, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + caplog.set_level(logging.DEBUG) + + proxy_url = async_generate_playback_proxy_url( + config_entry.entry_id, + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(proxy_url)) + + assert await response.content.read() == b"testtest" + assert response.status == 200 + + +async def test_proxy_get_source_error( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test error while getting source for playback proxy URL.""" + reolink_connect.get_vod_source.side_effect = ReolinkError(TEST_ERROR) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + proxy_url = async_generate_playback_proxy_url( + config_entry.entry_id, + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = await http_client.get(proxy_url) + + assert await response.content.read() == bytes(TEST_ERROR, "utf-8") + assert response.status == HTTPStatus.BAD_REQUEST + reolink_connect.get_vod_source.side_effect = None + + +async def test_proxy_invalid_config_entry_id( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test config entry id not found for playback proxy URL.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + proxy_url = async_generate_playback_proxy_url( + "wrong_config_id", + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = await http_client.get(proxy_url) + + assert await response.content.read() == bytes( + "Reolink playback proxy could not find config entry id: wrong_config_id", + "utf-8", + ) + assert response.status == HTTPStatus.BAD_REQUEST + + +async def test_playback_proxy_timeout( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test playback proxy URL with a timeout in the second chunk.""" + reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE_MP4, TEST_URL) + + mock_session = get_mock_session([b"test", TimeoutError()], 4) + + with patch( + "homeassistant.components.reolink.views.async_get_clientsession", + return_value=mock_session, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + proxy_url = async_generate_playback_proxy_url( + config_entry.entry_id, + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(proxy_url)) + + assert await response.content.read() == b"test" + assert response.status == 200 + + +async def test_playback_wrong_content( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test playback proxy URL with a wrong content type in the response.""" + reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE_MP4, TEST_URL) + + mock_session = get_mock_session(content_type="video/x-flv") + + with patch( + "homeassistant.components.reolink.views.async_get_clientsession", + return_value=mock_session, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + proxy_url = async_generate_playback_proxy_url( + config_entry.entry_id, + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(proxy_url)) + + assert response.status == HTTPStatus.BAD_REQUEST + + +async def test_playback_connect_error( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test playback proxy URL with a connection error.""" + reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE_MP4, TEST_URL) + + mock_session = Mock() + mock_session.get = AsyncMock(side_effect=ClientConnectionError(TEST_ERROR)) + + with patch( + "homeassistant.components.reolink.views.async_get_clientsession", + return_value=mock_session, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + proxy_url = async_generate_playback_proxy_url( + config_entry.entry_id, + TEST_CHANNEL, + TEST_FILE_NAME_MP4, + TEST_STREAM, + TEST_VOD_TYPE, + ) + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(proxy_url)) + + assert response.status == HTTPStatus.BAD_REQUEST From 7ea7178aa91478d4e9437f8ea72ecb0c159a7ca2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 14:16:05 +0100 Subject: [PATCH 1189/1198] Simplify error handling when creating backup (#134528) --- homeassistant/components/backup/manager.py | 36 ++++++++-------------- 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 4d509003a21..2fbd5014847 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -800,12 +800,10 @@ class BackupManager: """Finish a backup.""" if TYPE_CHECKING: assert self._backup_task is not None + backup_success = False try: written_backup = await self._backup_task except Exception as err: - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) - ) if with_automatic_settings: self._update_issue_backup_failed() @@ -831,33 +829,15 @@ class BackupManager: agent_ids=agent_ids, open_stream=written_backup.open_stream, ) - except BaseException: - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) - ) - raise # manager or unexpected error finally: - try: - await written_backup.release_stream() - except Exception: - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) - ) - raise + await written_backup.release_stream() self.known_backups.add(written_backup.backup, agent_errors) - if agent_errors: - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) - ) - else: + if not agent_errors: if with_automatic_settings: # create backup was successful, update last_completed_automatic_backup self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() - - self.async_on_backup_event( - CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) - ) + backup_success = True if with_automatic_settings: self._update_issue_after_agent_upload(agent_errors) @@ -868,6 +848,14 @@ class BackupManager: finally: self._backup_task = None self._backup_finish_task = None + self.async_on_backup_event( + CreateBackupEvent( + stage=None, + state=CreateBackupState.COMPLETED + if backup_success + else CreateBackupState.FAILED, + ) + ) self.async_on_backup_event(IdleEvent()) async def async_restore_backup( From 9b8ed9643fd48da830c5336da9a27a99510aa1ba Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 13:35:56 +0100 Subject: [PATCH 1190/1198] Add backup as after_dependency of frontend (#134534) --- homeassistant/components/frontend/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 33d1be3aad7..4b18330010a 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -1,6 +1,7 @@ { "domain": "frontend", "name": "Home Assistant Frontend", + "after_dependencies": ["backup"], "codeowners": ["@home-assistant/frontend"], "dependencies": [ "api", From c9f1fee6bb8a6f10d511c0a053a4db46a4be432a Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 3 Jan 2025 16:31:31 +0100 Subject: [PATCH 1191/1198] Set Ituran to silver (#134538) --- homeassistant/components/ituran/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/ituran/manifest.json b/homeassistant/components/ituran/manifest.json index 93860427a77..0cf20d3c6b2 100644 --- a/homeassistant/components/ituran/manifest.json +++ b/homeassistant/components/ituran/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/ituran", "integration_type": "hub", "iot_class": "cloud_polling", + "quality_scale": "silver", "requirements": ["pyituran==0.1.4"] } From 9c98125d20b316c8c3c5a6d4ecc666db0e872829 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 14:44:24 +0100 Subject: [PATCH 1192/1198] Avoid early COMPLETED event when restoring backup (#134546) --- homeassistant/components/backup/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 2fbd5014847..1910f8a55fb 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -1375,7 +1375,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): ) await self._hass.async_add_executor_job(_write_restore_file) - await self._hass.services.async_call("homeassistant", "restart", {}) + await self._hass.services.async_call("homeassistant", "restart", blocking=True) def _generate_backup_id(date: str, name: str) -> str: From 962b880146ba72fbc88b329eb536efc5c629ab6d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 3 Jan 2025 16:30:14 +0100 Subject: [PATCH 1193/1198] Log cloud backup agent file list (#134556) --- homeassistant/components/cloud/backup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index d21e28be50a..57145e52c44 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -5,6 +5,7 @@ from __future__ import annotations import base64 from collections.abc import AsyncIterator, Callable, Coroutine, Mapping import hashlib +import logging from typing import Any, Self from aiohttp import ClientError, ClientTimeout, StreamReader @@ -23,6 +24,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .client import CloudClient from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT +_LOGGER = logging.getLogger(__name__) _STORAGE_BACKUP = "backup" @@ -208,6 +210,7 @@ class CloudBackupAgent(BackupAgent): """List backups.""" try: backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP) + _LOGGER.debug("Cloud backups: %s", backups) except (ClientError, CloudError) as err: raise BackupAgentError("Failed to list backups") from err From b416ae1387cbfb7ae190c870ce31816263714cf8 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 3 Jan 2025 16:36:40 +0100 Subject: [PATCH 1194/1198] Update frontend to 20250103.0 (#134561) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 4b18330010a..2094f817dcd 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20250102.0"] + "requirements": ["home-assistant-frontend==20250103.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d8372ab6bc1..b07909e08eb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.1.0 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20250102.0 +home-assistant-frontend==20250103.0 home-assistant-intents==2025.1.1 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 0363d3d2650..68996b86ccb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1134,7 +1134,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20250102.0 +home-assistant-frontend==20250103.0 # homeassistant.components.conversation home-assistant-intents==2025.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ec70c179e15..273373c223e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -963,7 +963,7 @@ hole==0.8.0 holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20250102.0 +home-assistant-frontend==20250103.0 # homeassistant.components.conversation home-assistant-intents==2025.1.1 From 46b283069906077470088a94e3595d85404fbbae Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 3 Jan 2025 15:41:14 +0000 Subject: [PATCH 1195/1198] Bump version to 2025.1.0b9 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 5898c682d89..e8824f9dade 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b8" +PATCH_VERSION: Final = "0b9" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 1d6fbc8cefe..31e63101198 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b8" +version = "2025.1.0b9" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 03fd6a901b623d5beccccec2b4a9f531fe4275ac Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 3 Jan 2025 18:24:46 +0100 Subject: [PATCH 1196/1198] Cherry pick single file from #134020 to fix generic component tests (#134569) --- tests/components/generic/test_config_flow.py | 29 +++----------------- 1 file changed, 4 insertions(+), 25 deletions(-) diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index 4892496c486..9eee49619b5 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -30,7 +30,6 @@ from homeassistant.components.stream import ( CONF_RTSP_TRANSPORT, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, ) -from homeassistant.components.stream.worker import StreamWorkerError from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult from homeassistant.const import ( CONF_AUTHENTICATION, @@ -661,25 +660,6 @@ async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: await hass.async_block_till_done() -@respx.mock -@pytest.mark.usefixtures("fakeimg_png") -async def test_form_stream_worker_error( - hass: HomeAssistant, user_flow: ConfigFlowResult -) -> None: - """Test we handle a StreamWorkerError and pass the message through.""" - with patch( - "homeassistant.components.generic.config_flow.create_stream", - side_effect=StreamWorkerError("Some message"), - ): - result2 = await hass.config_entries.flow.async_configure( - user_flow["flow_id"], - TESTDATA, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"stream_source": "unknown_with_details"} - assert result2["description_placeholders"] == {"error": "Some message"} - - @respx.mock async def test_form_stream_permission_error( hass: HomeAssistant, fakeimgbytes_png: bytes, user_flow: ConfigFlowResult @@ -949,23 +929,22 @@ async def test_options_still_and_stream_not_provided( @respx.mock @pytest.mark.usefixtures("fakeimg_png") -async def test_form_options_stream_worker_error( +async def test_form_options_permission_error( hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: - """Test we handle a StreamWorkerError and pass the message through.""" + """Test we handle a PermissionError and pass the message through.""" result = await hass.config_entries.options.async_init(config_entry.entry_id) with patch( "homeassistant.components.generic.config_flow.create_stream", - side_effect=StreamWorkerError("Some message"), + side_effect=PermissionError("Some message"), ): result2 = await hass.config_entries.options.async_configure( result["flow_id"], TESTDATA, ) assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"stream_source": "unknown_with_details"} - assert result2["description_placeholders"] == {"error": "Some message"} + assert result2["errors"] == {"stream_source": "stream_not_permitted"} @pytest.mark.usefixtures("fakeimg_png") From 7e1e63374fc0289e9b6659cdc77ee58ed2fa3166 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Fri, 3 Jan 2025 16:45:27 +0000 Subject: [PATCH 1197/1198] Bump whirlpool-sixth-sense to 0.18.11 (#134562) --- homeassistant/components/whirlpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/whirlpool/manifest.json b/homeassistant/components/whirlpool/manifest.json index 5618a3f61cb..b463a1a76f8 100644 --- a/homeassistant/components/whirlpool/manifest.json +++ b/homeassistant/components/whirlpool/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["whirlpool"], - "requirements": ["whirlpool-sixth-sense==0.18.8"] + "requirements": ["whirlpool-sixth-sense==0.18.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index 68996b86ccb..36025003d9d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3012,7 +3012,7 @@ webmin-xmlrpc==0.0.2 weheat==2024.12.22 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.8 +whirlpool-sixth-sense==0.18.11 # homeassistant.components.whois whois==0.9.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 273373c223e..03e594dcf53 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2416,7 +2416,7 @@ webmin-xmlrpc==0.0.2 weheat==2024.12.22 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.8 +whirlpool-sixth-sense==0.18.11 # homeassistant.components.whois whois==0.9.27 From ac4bd32137050c8f073838bdfb1916a40bd96131 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 3 Jan 2025 17:31:21 +0000 Subject: [PATCH 1198/1198] Bump version to 2025.1.0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index e8824f9dade..5a088d36449 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 MINOR_VERSION: Final = 1 -PATCH_VERSION: Final = "0b9" +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 31e63101198..c87e499155c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.1.0b9" +version = "2025.1.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst"