diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml
index aa4bfc60c11..cdffcbe4d5b 100644
--- a/.github/workflows/builder.yml
+++ b/.github/workflows/builder.yml
@@ -324,7 +324,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Install Cosign
- uses: sigstore/cosign-installer@v3.7.0
+ uses: sigstore/cosign-installer@v3.8.0
with:
cosign-release: "v2.2.3"
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 863c861db75..2a9f1571830 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -975,6 +975,7 @@ jobs:
${cov_params[@]} \
-o console_output_style=count \
-p no:sugar \
+ --exclude-warning-annotations \
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
@@ -1098,6 +1099,7 @@ jobs:
-o console_output_style=count \
--durations=10 \
-p no:sugar \
+ --exclude-warning-annotations \
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
tests/components/history \
tests/components/logbook \
@@ -1228,6 +1230,7 @@ jobs:
--durations=0 \
--durations-min=10 \
-p no:sugar \
+ --exclude-warning-annotations \
--dburl=postgresql://postgres:password@127.0.0.1/homeassistant-test \
tests/components/history \
tests/components/logbook \
@@ -1374,6 +1377,7 @@ jobs:
--durations=0 \
--durations-min=1 \
-p no:sugar \
+ --exclude-warning-annotations \
tests/components/${{ matrix.group }} \
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 805e3ac4dbd..a059710d3d7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -8,7 +8,7 @@ repos:
- id: ruff-format
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
- repo: https://github.com/codespell-project/codespell
- rev: v2.3.0
+ rev: v2.4.1
hooks:
- id: codespell
args:
diff --git a/CODEOWNERS b/CODEOWNERS
index 635f53d346f..e510eec6dfa 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -731,6 +731,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
/tests/components/intent/ @home-assistant/core @synesthesiam
/homeassistant/components/intesishome/ @jnimmo
+/homeassistant/components/iometer/ @MaestroOnICe
+/tests/components/iometer/ @MaestroOnICe
/homeassistant/components/ios/ @robbiet480
/tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
@@ -765,8 +767,8 @@ build.json @home-assistant/supervisor
/tests/components/ituran/ @shmuelzon
/homeassistant/components/izone/ @Swamp-Ig
/tests/components/izone/ @Swamp-Ig
-/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
-/tests/components/jellyfin/ @j-stienstra @ctalkington
+/homeassistant/components/jellyfin/ @RunC0deRun @ctalkington
+/tests/components/jellyfin/ @RunC0deRun @ctalkington
/homeassistant/components/jewish_calendar/ @tsvi
/tests/components/jewish_calendar/ @tsvi
/homeassistant/components/juicenet/ @jesserockz
diff --git a/Dockerfile b/Dockerfile
index 171d08731a9..19b2c97b181 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -13,7 +13,7 @@ ENV \
ARG QEMU_CPU
# Install uv
-RUN pip3 install uv==0.5.21
+RUN pip3 install uv==0.5.27
WORKDIR /usr/src
diff --git a/homeassistant/components/airgradient/__init__.py b/homeassistant/components/airgradient/__init__.py
index 3b27d6cda5e..8f7fd86847d 100644
--- a/homeassistant/components/airgradient/__init__.py
+++ b/homeassistant/components/airgradient/__init__.py
@@ -4,12 +4,11 @@ from __future__ import annotations
from airgradient import AirGradientClient
-from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
-from .coordinator import AirGradientCoordinator
+from .coordinator import AirGradientConfigEntry, AirGradientCoordinator
PLATFORMS: list[Platform] = [
Platform.BUTTON,
@@ -21,9 +20,6 @@ PLATFORMS: list[Platform] = [
]
-type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator]
-
-
async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool:
"""Set up Airgradient from a config entry."""
@@ -31,7 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry)
entry.data[CONF_HOST], session=async_get_clientsession(hass)
)
- coordinator = AirGradientCoordinator(hass, client)
+ coordinator = AirGradientCoordinator(hass, entry, client)
await coordinator.async_config_entry_first_refresh()
diff --git a/homeassistant/components/airgradient/coordinator.py b/homeassistant/components/airgradient/coordinator.py
index d2fc2a9de1b..7484c7e85a9 100644
--- a/homeassistant/components/airgradient/coordinator.py
+++ b/homeassistant/components/airgradient/coordinator.py
@@ -4,18 +4,17 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
-from typing import TYPE_CHECKING
from airgradient import AirGradientClient, AirGradientError, Config, Measures
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, LOGGER
-if TYPE_CHECKING:
- from . import AirGradientConfigEntry
+type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator]
@dataclass
@@ -32,11 +31,17 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
config_entry: AirGradientConfigEntry
_current_version: str
- def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
+ def __init__(
+ self,
+ hass: HomeAssistant,
+ config_entry: AirGradientConfigEntry,
+ client: AirGradientClient,
+ ) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
logger=LOGGER,
+ config_entry=config_entry,
name=f"AirGradient {client.host}",
update_interval=timedelta(minutes=1),
)
diff --git a/homeassistant/components/airq/manifest.json b/homeassistant/components/airq/manifest.json
index 1ae7da14875..d4a6e9c295f 100644
--- a/homeassistant/components/airq/manifest.json
+++ b/homeassistant/components/airq/manifest.json
@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioairq"],
- "requirements": ["aioairq==0.4.3"]
+ "requirements": ["aioairq==0.4.4"]
}
diff --git a/homeassistant/components/analytics_insights/manifest.json b/homeassistant/components/analytics_insights/manifest.json
index bf99d89e073..ab3c2e2fe24 100644
--- a/homeassistant/components/analytics_insights/manifest.json
+++ b/homeassistant/components/analytics_insights/manifest.json
@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["python_homeassistant_analytics"],
- "requirements": ["python-homeassistant-analytics==0.8.1"],
+ "requirements": ["python-homeassistant-analytics==0.9.0"],
"single_config_entry": true
}
diff --git a/homeassistant/components/anova/strings.json b/homeassistant/components/anova/strings.json
index bfe3a61282e..e9905e4cce5 100644
--- a/homeassistant/components/anova/strings.json
+++ b/homeassistant/components/anova/strings.json
@@ -39,7 +39,7 @@
"idle": "[%key:common::state::idle%]",
"cook": "Cooking",
"low_water": "Low water",
- "ota": "Ota",
+ "ota": "OTA update",
"provisioning": "Provisioning",
"high_temp": "High temperature",
"device_failure": "Device failure"
diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py
index e45e849adf6..259d1295809 100644
--- a/homeassistant/components/anthropic/conversation.py
+++ b/homeassistant/components/anthropic/conversation.py
@@ -272,6 +272,7 @@ class AnthropicConversationEntity(
continue
tool_input = llm.ToolInput(
+ id=tool_call.id,
tool_name=tool_call.name,
tool_args=cast(dict[str, Any], tool_call.input),
)
diff --git a/homeassistant/components/apple_tv/config_flow.py b/homeassistant/components/apple_tv/config_flow.py
index 5c317755d05..76c4681a30d 100644
--- a/homeassistant/components/apple_tv/config_flow.py
+++ b/homeassistant/components/apple_tv/config_flow.py
@@ -134,7 +134,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
unique_id for said entry. When a new (zeroconf) service or device is
discovered, the identifier is first used to look up if it belongs to an
existing config entry. If that's the case, the unique_id from that entry is
- re-used, otherwise the newly discovered identifier is used instead.
+ reused, otherwise the newly discovered identifier is used instead.
"""
assert self.atv
all_identifiers = set(self.atv.all_identifiers)
diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json
index ac45e352bb6..3131b00cda6 100644
--- a/homeassistant/components/aranet/manifest.json
+++ b/homeassistant/components/aranet/manifest.json
@@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/aranet",
"integration_type": "device",
"iot_class": "local_push",
- "requirements": ["aranet4==2.5.0"]
+ "requirements": ["aranet4==2.5.1"]
}
diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py
index cc7ecc1c426..9a32821e3a0 100644
--- a/homeassistant/components/assist_pipeline/__init__.py
+++ b/homeassistant/components/assist_pipeline/__init__.py
@@ -9,6 +9,7 @@ import voluptuous as vol
from homeassistant.components import stt
from homeassistant.core import Context, HomeAssistant
+from homeassistant.helpers import chat_session
from homeassistant.helpers.typing import ConfigType
from .const import (
@@ -114,24 +115,25 @@ async def async_pipeline_from_audio_stream(
Raises PipelineNotFound if no pipeline is found.
"""
- pipeline_input = PipelineInput(
- conversation_id=conversation_id,
- device_id=device_id,
- stt_metadata=stt_metadata,
- stt_stream=stt_stream,
- wake_word_phrase=wake_word_phrase,
- conversation_extra_system_prompt=conversation_extra_system_prompt,
- run=PipelineRun(
- hass,
- context=context,
- pipeline=async_get_pipeline(hass, pipeline_id=pipeline_id),
- start_stage=start_stage,
- end_stage=end_stage,
- event_callback=event_callback,
- tts_audio_output=tts_audio_output,
- wake_word_settings=wake_word_settings,
- audio_settings=audio_settings or AudioSettings(),
- ),
- )
- await pipeline_input.validate()
- await pipeline_input.execute()
+ with chat_session.async_get_chat_session(hass, conversation_id) as session:
+ pipeline_input = PipelineInput(
+ conversation_id=session.conversation_id,
+ device_id=device_id,
+ stt_metadata=stt_metadata,
+ stt_stream=stt_stream,
+ wake_word_phrase=wake_word_phrase,
+ conversation_extra_system_prompt=conversation_extra_system_prompt,
+ run=PipelineRun(
+ hass,
+ context=context,
+ pipeline=async_get_pipeline(hass, pipeline_id=pipeline_id),
+ start_stage=start_stage,
+ end_stage=end_stage,
+ event_callback=event_callback,
+ tts_audio_output=tts_audio_output,
+ wake_word_settings=wake_word_settings,
+ audio_settings=audio_settings or AudioSettings(),
+ ),
+ )
+ await pipeline_input.validate()
+ await pipeline_input.execute()
diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py
index cfc7261410a..94e2b04d7ae 100644
--- a/homeassistant/components/assist_pipeline/pipeline.py
+++ b/homeassistant/components/assist_pipeline/pipeline.py
@@ -624,7 +624,7 @@ class PipelineRun:
return
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
- def start(self, device_id: str | None) -> None:
+ def start(self, conversation_id: str, device_id: str | None) -> None:
"""Emit run start event."""
self._device_id = device_id
self._start_debug_recording_thread()
@@ -632,6 +632,7 @@ class PipelineRun:
data = {
"pipeline": self.pipeline.id,
"language": self.language,
+ "conversation_id": conversation_id,
}
if self.runner_data is not None:
data["runner_data"] = self.runner_data
@@ -1015,7 +1016,7 @@ class PipelineRun:
async def recognize_intent(
self,
intent_input: str,
- conversation_id: str | None,
+ conversation_id: str,
device_id: str | None,
conversation_extra_system_prompt: str | None,
) -> str:
@@ -1063,11 +1064,11 @@ class PipelineRun:
agent_id=self.intent_agent,
extra_system_prompt=conversation_extra_system_prompt,
)
- processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
- agent_id = user_input.agent_id
+ agent_id = self.intent_agent
+ processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
intent_response: intent.IntentResponse | None = None
- if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
+ if not processed_locally:
# Sentence triggers override conversation agent
if (
trigger_response_text
@@ -1105,9 +1106,8 @@ class PipelineRun:
speech: str = intent_response.speech.get("plain", {}).get(
"speech", ""
)
- chat_log.async_add_message(
- conversation.Content(
- role="assistant",
+ chat_log.async_add_assistant_content_without_tools(
+ conversation.AssistantContent(
agent_id=agent_id,
content=speech,
)
@@ -1409,12 +1409,15 @@ def _pipeline_debug_recording_thread_proc(
wav_writer.close()
-@dataclass
+@dataclass(kw_only=True)
class PipelineInput:
"""Input to a pipeline run."""
run: PipelineRun
+ conversation_id: str
+ """Identifier for the conversation."""
+
stt_metadata: stt.SpeechMetadata | None = None
"""Metadata of stt input audio. Required when start_stage = stt."""
@@ -1430,9 +1433,6 @@ class PipelineInput:
tts_input: str | None = None
"""Input for text-to-speech. Required when start_stage = tts."""
- conversation_id: str | None = None
- """Identifier for the conversation."""
-
conversation_extra_system_prompt: str | None = None
"""Extra prompt information for the conversation agent."""
@@ -1441,7 +1441,7 @@ class PipelineInput:
async def execute(self) -> None:
"""Run pipeline."""
- self.run.start(device_id=self.device_id)
+ self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
current_stage: PipelineStage | None = self.run.start_stage
stt_audio_buffer: list[EnhancedAudioChunk] = []
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py
index 69f917fcf83..d2d54a1b7c3 100644
--- a/homeassistant/components/assist_pipeline/websocket_api.py
+++ b/homeassistant/components/assist_pipeline/websocket_api.py
@@ -14,7 +14,11 @@ import voluptuous as vol
from homeassistant.components import conversation, stt, tts, websocket_api
from homeassistant.const import ATTR_DEVICE_ID, ATTR_SECONDS, MATCH_ALL
from homeassistant.core import HomeAssistant, callback
-from homeassistant.helpers import config_validation as cv, entity_registry as er
+from homeassistant.helpers import (
+ chat_session,
+ config_validation as cv,
+ entity_registry as er,
+)
from homeassistant.util import language as language_util
from .const import (
@@ -145,7 +149,6 @@ async def websocket_run(
# Arguments to PipelineInput
input_args: dict[str, Any] = {
- "conversation_id": msg.get("conversation_id"),
"device_id": msg.get("device_id"),
}
@@ -233,38 +236,42 @@ async def websocket_run(
audio_settings=audio_settings or AudioSettings(),
)
- pipeline_input = PipelineInput(**input_args)
+ with chat_session.async_get_chat_session(
+ hass, msg.get("conversation_id")
+ ) as session:
+ input_args["conversation_id"] = session.conversation_id
+ pipeline_input = PipelineInput(**input_args)
- try:
- await pipeline_input.validate()
- except PipelineError as error:
- # Report more specific error when possible
- connection.send_error(msg["id"], error.code, error.message)
- return
+ try:
+ await pipeline_input.validate()
+ except PipelineError as error:
+ # Report more specific error when possible
+ connection.send_error(msg["id"], error.code, error.message)
+ return
- # Confirm subscription
- connection.send_result(msg["id"])
+ # Confirm subscription
+ connection.send_result(msg["id"])
- run_task = hass.async_create_task(pipeline_input.execute())
+ run_task = hass.async_create_task(pipeline_input.execute())
- # Cancel pipeline if user unsubscribes
- connection.subscriptions[msg["id"]] = run_task.cancel
+ # Cancel pipeline if user unsubscribes
+ connection.subscriptions[msg["id"]] = run_task.cancel
- try:
- # Task contains a timeout
- async with asyncio.timeout(timeout):
- await run_task
- except TimeoutError:
- pipeline_input.run.process_event(
- PipelineEvent(
- PipelineEventType.ERROR,
- {"code": "timeout", "message": "Timeout running pipeline"},
+ try:
+ # Task contains a timeout
+ async with asyncio.timeout(timeout):
+ await run_task
+ except TimeoutError:
+ pipeline_input.run.process_event(
+ PipelineEvent(
+ PipelineEventType.ERROR,
+ {"code": "timeout", "message": "Timeout running pipeline"},
+ )
)
- )
- finally:
- if unregister_handler is not None:
- # Unregister binary handler
- unregister_handler()
+ finally:
+ if unregister_handler is not None:
+ # Unregister binary handler
+ unregister_handler()
@callback
diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py
index 0229e0358b1..e43abb4539c 100644
--- a/homeassistant/components/assist_satellite/entity.py
+++ b/homeassistant/components/assist_satellite/entity.py
@@ -8,7 +8,7 @@ from dataclasses import dataclass
from enum import StrEnum
import logging
import time
-from typing import Any, Final, Literal, final
+from typing import Any, Literal, final
from homeassistant.components import conversation, media_source, stt, tts
from homeassistant.components.assist_pipeline import (
@@ -28,14 +28,12 @@ from homeassistant.components.tts import (
)
from homeassistant.core import Context, callback
from homeassistant.exceptions import HomeAssistantError
-from homeassistant.helpers import entity
+from homeassistant.helpers import chat_session, entity
from homeassistant.helpers.entity import EntityDescription
from .const import AssistSatelliteEntityFeature
from .errors import AssistSatelliteError, SatelliteBusyError
-_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes
-
_LOGGER = logging.getLogger(__name__)
@@ -114,7 +112,6 @@ class AssistSatelliteEntity(entity.Entity):
_attr_vad_sensitivity_entity_id: str | None = None
_conversation_id: str | None = None
- _conversation_id_time: float | None = None
_run_has_tts: bool = False
_is_announcing = False
@@ -260,8 +257,27 @@ class AssistSatelliteEntity(entity.Entity):
else:
self._extra_system_prompt = start_message or None
+ with (
+ # Not passing in a conversation ID will force a new one to be created
+ chat_session.async_get_chat_session(self.hass) as session,
+ conversation.async_get_chat_log(self.hass, session) as chat_log,
+ ):
+ self._conversation_id = session.conversation_id
+
+ if start_message:
+ chat_log.async_add_assistant_content_without_tools(
+ conversation.AssistantContent(
+ agent_id=self.entity_id, content=start_message
+ )
+ )
+
try:
await self.async_start_conversation(announcement)
+ except Exception:
+ # Clear prompt on error
+ self._conversation_id = None
+ self._extra_system_prompt = None
+ raise
finally:
self._is_announcing = False
@@ -325,51 +341,52 @@ class AssistSatelliteEntity(entity.Entity):
assert self._context is not None
- # Reset conversation id if necessary
- if self._conversation_id_time and (
- (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC
- ):
- self._conversation_id = None
- self._conversation_id_time = None
-
# Set entity state based on pipeline events
self._run_has_tts = False
assert self.platform.config_entry is not None
- self._pipeline_task = self.platform.config_entry.async_create_background_task(
- self.hass,
- async_pipeline_from_audio_stream(
- self.hass,
- context=self._context,
- event_callback=self._internal_on_pipeline_event,
- stt_metadata=stt.SpeechMetadata(
- language="", # set in async_pipeline_from_audio_stream
- format=stt.AudioFormats.WAV,
- codec=stt.AudioCodecs.PCM,
- bit_rate=stt.AudioBitRates.BITRATE_16,
- sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
- channel=stt.AudioChannels.CHANNEL_MONO,
- ),
- stt_stream=audio_stream,
- pipeline_id=self._resolve_pipeline(),
- conversation_id=self._conversation_id,
- device_id=device_id,
- tts_audio_output=self.tts_options,
- wake_word_phrase=wake_word_phrase,
- audio_settings=AudioSettings(
- silence_seconds=self._resolve_vad_sensitivity()
- ),
- start_stage=start_stage,
- end_stage=end_stage,
- conversation_extra_system_prompt=extra_system_prompt,
- ),
- f"{self.entity_id}_pipeline",
- )
- try:
- await self._pipeline_task
- finally:
- self._pipeline_task = None
+ with chat_session.async_get_chat_session(
+ self.hass, self._conversation_id
+ ) as session:
+ # Store the conversation ID. If it is no longer valid, get_chat_session will reset it
+ self._conversation_id = session.conversation_id
+ self._pipeline_task = (
+ self.platform.config_entry.async_create_background_task(
+ self.hass,
+ async_pipeline_from_audio_stream(
+ self.hass,
+ context=self._context,
+ event_callback=self._internal_on_pipeline_event,
+ stt_metadata=stt.SpeechMetadata(
+ language="", # set in async_pipeline_from_audio_stream
+ format=stt.AudioFormats.WAV,
+ codec=stt.AudioCodecs.PCM,
+ bit_rate=stt.AudioBitRates.BITRATE_16,
+ sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
+ channel=stt.AudioChannels.CHANNEL_MONO,
+ ),
+ stt_stream=audio_stream,
+ pipeline_id=self._resolve_pipeline(),
+ conversation_id=session.conversation_id,
+ device_id=device_id,
+ tts_audio_output=self.tts_options,
+ wake_word_phrase=wake_word_phrase,
+ audio_settings=AudioSettings(
+ silence_seconds=self._resolve_vad_sensitivity()
+ ),
+ start_stage=start_stage,
+ end_stage=end_stage,
+ conversation_extra_system_prompt=extra_system_prompt,
+ ),
+ f"{self.entity_id}_pipeline",
+ )
+ )
+
+ try:
+ await self._pipeline_task
+ finally:
+ self._pipeline_task = None
async def _cancel_running_pipeline(self) -> None:
"""Cancel the current pipeline if it's running."""
@@ -393,11 +410,6 @@ class AssistSatelliteEntity(entity.Entity):
self._set_state(AssistSatelliteState.LISTENING)
elif event.type is PipelineEventType.INTENT_START:
self._set_state(AssistSatelliteState.PROCESSING)
- elif event.type is PipelineEventType.INTENT_END:
- assert event.data is not None
- # Update timeout
- self._conversation_id_time = time.monotonic()
- self._conversation_id = event.data["intent_output"]["conversation_id"]
elif event.type is PipelineEventType.TTS_START:
# Wait until tts_response_finished is called to return to waiting state
self._run_has_tts = True
diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json
index 652f1a7b966..5e16a22af76 100644
--- a/homeassistant/components/august/manifest.json
+++ b/homeassistant/components/august/manifest.json
@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
- "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
+ "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
}
diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py
index 86e5b95d196..71a4f5ea41a 100644
--- a/homeassistant/components/backup/__init__.py
+++ b/homeassistant/components/backup/__init__.py
@@ -26,15 +26,18 @@ from .manager import (
BackupReaderWriterError,
CoreBackupReaderWriter,
CreateBackupEvent,
+ CreateBackupStage,
+ CreateBackupState,
IdleEvent,
IncorrectPasswordError,
ManagerBackup,
NewBackup,
RestoreBackupEvent,
+ RestoreBackupStage,
RestoreBackupState,
WrittenBackup,
)
-from .models import AddonInfo, AgentBackup, Folder
+from .models import AddonInfo, AgentBackup, BackupNotFound, Folder
from .util import suggested_filename, suggested_filename_from_name_date
from .websocket import async_register_websocket_handlers
@@ -45,10 +48,13 @@ __all__ = [
"BackupAgentError",
"BackupAgentPlatformProtocol",
"BackupManagerError",
+ "BackupNotFound",
"BackupPlatformProtocol",
"BackupReaderWriter",
"BackupReaderWriterError",
"CreateBackupEvent",
+ "CreateBackupStage",
+ "CreateBackupState",
"Folder",
"IdleEvent",
"IncorrectPasswordError",
@@ -56,6 +62,7 @@ __all__ = [
"ManagerBackup",
"NewBackup",
"RestoreBackupEvent",
+ "RestoreBackupStage",
"RestoreBackupState",
"WrittenBackup",
"async_get_manager",
diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py
index 297ccd6f685..9530f386c7b 100644
--- a/homeassistant/components/backup/agent.py
+++ b/homeassistant/components/backup/agent.py
@@ -11,13 +11,7 @@ from propcache.api import cached_property
from homeassistant.core import HomeAssistant, callback
-from .models import AgentBackup, BackupError
-
-
-class BackupAgentError(BackupError):
- """Base class for backup agent errors."""
-
- error_code = "backup_agent_error"
+from .models import AgentBackup, BackupAgentError
class BackupAgentUnreachableError(BackupAgentError):
@@ -27,12 +21,6 @@ class BackupAgentUnreachableError(BackupAgentError):
_message = "The backup agent is unreachable."
-class BackupNotFound(BackupAgentError):
- """Raised when a backup is not found."""
-
- error_code = "backup_not_found"
-
-
class BackupAgent(abc.ABC):
"""Backup agent interface."""
diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py
index b6282186c06..c3a46a6ab1f 100644
--- a/homeassistant/components/backup/backup.py
+++ b/homeassistant/components/backup/backup.py
@@ -11,9 +11,9 @@ from typing import Any
from homeassistant.core import HomeAssistant
from homeassistant.helpers.hassio import is_hassio
-from .agent import BackupAgent, BackupNotFound, LocalBackupAgent
+from .agent import BackupAgent, LocalBackupAgent
from .const import DOMAIN, LOGGER
-from .models import AgentBackup
+from .models import AgentBackup, BackupNotFound
from .util import read_backup, suggested_filename
diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py
index 6b06db4601d..58f44d4a449 100644
--- a/homeassistant/components/backup/http.py
+++ b/homeassistant/components/backup/http.py
@@ -21,6 +21,7 @@ from . import util
from .agent import BackupAgent
from .const import DATA_MANAGER
from .manager import BackupManager
+from .models import BackupNotFound
@callback
@@ -69,13 +70,16 @@ class DownloadBackupView(HomeAssistantView):
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
}
- if not password or not backup.protected:
- return await self._send_backup_no_password(
- request, headers, backup_id, agent_id, agent, manager
+ try:
+ if not password or not backup.protected:
+ return await self._send_backup_no_password(
+ request, headers, backup_id, agent_id, agent, manager
+ )
+ return await self._send_backup_with_password(
+ hass, request, headers, backup_id, agent_id, password, agent, manager
)
- return await self._send_backup_with_password(
- hass, request, headers, backup_id, agent_id, password, agent, manager
- )
+ except BackupNotFound:
+ return Response(status=HTTPStatus.NOT_FOUND)
async def _send_backup_no_password(
self,
diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py
index 42b5f522ecd..25393a872cc 100644
--- a/homeassistant/components/backup/manager.py
+++ b/homeassistant/components/backup/manager.py
@@ -9,6 +9,7 @@ from dataclasses import dataclass, replace
from enum import StrEnum
import hashlib
import io
+from itertools import chain
import json
from pathlib import Path, PurePath
import shutil
@@ -50,7 +51,14 @@ from .const import (
EXCLUDE_FROM_BACKUP,
LOGGER,
)
-from .models import AgentBackup, BackupError, BackupManagerError, BaseBackup, Folder
+from .models import (
+ AgentBackup,
+ BackupError,
+ BackupManagerError,
+ BackupReaderWriterError,
+ BaseBackup,
+ Folder,
+)
from .store import BackupStore
from .util import (
AsyncIteratorReader,
@@ -274,12 +282,6 @@ class BackupReaderWriter(abc.ABC):
"""Get restore events after core restart."""
-class BackupReaderWriterError(BackupError):
- """Backup reader/writer error."""
-
- error_code = "backup_reader_writer_error"
-
-
class IncorrectPasswordError(BackupReaderWriterError):
"""Raised when the password is incorrect."""
@@ -826,7 +828,7 @@ class BackupManager:
password=None,
)
await written_backup.release_stream()
- self.known_backups.add(written_backup.backup, agent_errors)
+ self.known_backups.add(written_backup.backup, agent_errors, [])
return written_backup.backup.backup_id
async def async_create_backup(
@@ -950,12 +952,23 @@ class BackupManager:
with_automatic_settings: bool,
) -> NewBackup:
"""Initiate generating a backup."""
- if not agent_ids:
- raise BackupManagerError("At least one agent must be selected")
- if invalid_agents := [
+ unavailable_agents = [
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
- ]:
- raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
+ ]
+ if not (
+ available_agents := [
+ agent_id for agent_id in agent_ids if agent_id in self.backup_agents
+ ]
+ ):
+ raise BackupManagerError(
+ f"At least one available backup agent must be selected, got {agent_ids}"
+ )
+ if unavailable_agents:
+ LOGGER.warning(
+ "Backup agents %s are not available, will backupp to %s",
+ unavailable_agents,
+ available_agents,
+ )
if include_all_addons and include_addons:
raise BackupManagerError(
"Cannot include all addons and specify specific addons"
@@ -972,7 +985,7 @@ class BackupManager:
new_backup,
self._backup_task,
) = await self._reader_writer.async_create_backup(
- agent_ids=agent_ids,
+ agent_ids=available_agents,
backup_name=backup_name,
extra_metadata=extra_metadata
| {
@@ -991,7 +1004,9 @@ class BackupManager:
raise BackupManagerError(str(err)) from err
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
- self._async_finish_backup(agent_ids, with_automatic_settings, password),
+ self._async_finish_backup(
+ available_agents, unavailable_agents, with_automatic_settings, password
+ ),
name="backup_manager_finish_backup",
)
if not raise_task_error:
@@ -1008,7 +1023,11 @@ class BackupManager:
return new_backup
async def _async_finish_backup(
- self, agent_ids: list[str], with_automatic_settings: bool, password: str | None
+ self,
+ available_agents: list[str],
+ unavailable_agents: list[str],
+ with_automatic_settings: bool,
+ password: str | None,
) -> None:
"""Finish a backup."""
if TYPE_CHECKING:
@@ -1027,7 +1046,7 @@ class BackupManager:
LOGGER.debug(
"Generated new backup with backup_id %s, uploading to agents %s",
written_backup.backup.backup_id,
- agent_ids,
+ available_agents,
)
self.async_on_backup_event(
CreateBackupEvent(
@@ -1040,13 +1059,15 @@ class BackupManager:
try:
agent_errors = await self._async_upload_backup(
backup=written_backup.backup,
- agent_ids=agent_ids,
+ agent_ids=available_agents,
open_stream=written_backup.open_stream,
password=password,
)
finally:
await written_backup.release_stream()
- self.known_backups.add(written_backup.backup, agent_errors)
+ self.known_backups.add(
+ written_backup.backup, agent_errors, unavailable_agents
+ )
if not agent_errors:
if with_automatic_settings:
# create backup was successful, update last_completed_automatic_backup
@@ -1055,7 +1076,7 @@ class BackupManager:
backup_success = True
if with_automatic_settings:
- self._update_issue_after_agent_upload(agent_errors)
+ self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
# delete old backups more numerous than copies
# try this regardless of agent errors above
await delete_backups_exceeding_configured_count(self)
@@ -1215,10 +1236,10 @@ class BackupManager:
)
def _update_issue_after_agent_upload(
- self, agent_errors: dict[str, Exception]
+ self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
) -> None:
"""Update issue registry after a backup is uploaded to agents."""
- if not agent_errors:
+ if not agent_errors and not unavailable_agents:
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
return
ir.async_create_issue(
@@ -1232,7 +1253,13 @@ class BackupManager:
translation_key="automatic_backup_failed_upload_agents",
translation_placeholders={
"failed_agents": ", ".join(
- self.backup_agents[agent_id].name for agent_id in agent_errors
+ chain(
+ (
+ self.backup_agents[agent_id].name
+ for agent_id in agent_errors
+ ),
+ unavailable_agents,
+ )
)
},
)
@@ -1301,11 +1328,12 @@ class KnownBackups:
self,
backup: AgentBackup,
agent_errors: dict[str, Exception],
+ unavailable_agents: list[str],
) -> None:
"""Add a backup."""
self._backups[backup.backup_id] = KnownBackup(
backup_id=backup.backup_id,
- failed_agent_ids=list(agent_errors),
+ failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
)
self._manager.store.save()
@@ -1411,7 +1439,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
manager = self._hass.data[DATA_MANAGER]
agent_config = manager.config.data.agents.get(self._local_agent_id)
- if agent_config and not agent_config.protected:
+ if (
+ self._local_agent_id in agent_ids
+ and agent_config
+ and not agent_config.protected
+ ):
password = None
backup = AgentBackup(
diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py
index 1543d577964..95c5ef9809d 100644
--- a/homeassistant/components/backup/models.py
+++ b/homeassistant/components/backup/models.py
@@ -41,12 +41,6 @@ class BaseBackup:
homeassistant_version: str | None # None if homeassistant_included is False
name: str
- def as_frontend_json(self) -> dict:
- """Return a dict representation of this backup for sending to frontend."""
- return {
- key: val for key, val in asdict(self).items() if key != "extra_metadata"
- }
-
@dataclass(frozen=True, kw_only=True)
class AgentBackup(BaseBackup):
@@ -83,7 +77,25 @@ class BackupError(HomeAssistantError):
error_code = "unknown"
+class BackupAgentError(BackupError):
+ """Base class for backup agent errors."""
+
+ error_code = "backup_agent_error"
+
+
class BackupManagerError(BackupError):
"""Backup manager error."""
error_code = "backup_manager_error"
+
+
+class BackupReaderWriterError(BackupError):
+ """Backup reader/writer error."""
+
+ error_code = "backup_reader_writer_error"
+
+
+class BackupNotFound(BackupAgentError, BackupManagerError):
+ """Raised when a backup is not found."""
+
+ error_code = "backup_not_found"
diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py
index fbb13b4721a..9d8f6e815dc 100644
--- a/homeassistant/components/backup/util.py
+++ b/homeassistant/components/backup/util.py
@@ -4,6 +4,7 @@ from __future__ import annotations
import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
+from concurrent.futures import CancelledError, Future
import copy
from dataclasses import dataclass, replace
from io import BytesIO
@@ -12,6 +13,7 @@ import os
from pathlib import Path, PurePath
from queue import SimpleQueue
import tarfile
+import threading
from typing import IO, Any, Self, cast
import aiohttp
@@ -22,7 +24,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.json import JsonObjectType, json_loads_object
-from homeassistant.util.thread import ThreadWithException
from .const import BUF_SIZE, LOGGER
from .models import AddonInfo, AgentBackup, Folder
@@ -121,7 +122,7 @@ def read_backup(backup_path: Path) -> AgentBackup:
def suggested_filename_from_name_date(name: str, date_str: str) -> str:
"""Suggest a filename for the backup."""
date = dt_util.parse_datetime(date_str, raise_on_error=True)
- return "_".join(f"{name} - {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
+ return "_".join(f"{name} {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
def suggested_filename(backup: AgentBackup) -> str:
@@ -167,23 +168,38 @@ class AsyncIteratorReader:
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
+ self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
+ self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
+ def abort(self) -> None:
+ """Abort the reader."""
+ self._aborted = True
+ if self._next_future is not None:
+ self._next_future.cancel()
+
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
- self._buffer = asyncio.run_coroutine_threadsafe(
+ self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
- ).result()
+ )
+ if self._aborted:
+ self._next_future.cancel()
+ raise AbortCipher
+ try:
+ self._buffer = self._next_future.result()
+ except CancelledError as err:
+ raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
@@ -205,9 +221,11 @@ class AsyncIteratorWriter:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
+ self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
+ self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
@@ -219,13 +237,28 @@ class AsyncIteratorWriter:
return data
raise StopAsyncIteration
+ def abort(self) -> None:
+ """Abort the writer."""
+ self._aborted = True
+ if self._write_future is not None:
+ self._write_future.cancel()
+
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
- asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
+ self._write_future = asyncio.run_coroutine_threadsafe(
+ self._queue.put(s), self._hass.loop
+ )
+ if self._aborted:
+ self._write_future.cancel()
+ raise AbortCipher
+ try:
+ self._write_future.result()
+ except CancelledError as err:
+ raise AbortCipher from err
self._pos += len(s)
return len(s)
@@ -415,7 +448,9 @@ def _encrypt_backup(
class _CipherWorkerStatus:
done: asyncio.Event
error: Exception | None = None
- thread: ThreadWithException
+ reader: AsyncIteratorReader
+ thread: threading.Thread
+ writer: AsyncIteratorWriter
class _CipherBackupStreamer:
@@ -468,11 +503,13 @@ class _CipherBackupStreamer:
stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass)
- worker = ThreadWithException(
+ worker = threading.Thread(
target=self._cipher_func,
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
)
- worker_status = _CipherWorkerStatus(done=asyncio.Event(), thread=worker)
+ worker_status = _CipherWorkerStatus(
+ done=asyncio.Event(), reader=reader, thread=worker, writer=writer
+ )
self._workers.append(worker_status)
worker.start()
return writer
@@ -480,9 +517,8 @@ class _CipherBackupStreamer:
async def wait(self) -> None:
"""Wait for the worker threads to finish."""
for worker in self._workers:
- if not worker.thread.is_alive():
- continue
- worker.thread.raise_exc(AbortCipher)
+ worker.reader.abort()
+ worker.writer.abort()
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py
index 93dd81c3c14..b6d092e1913 100644
--- a/homeassistant/components/backup/websocket.py
+++ b/homeassistant/components/backup/websocket.py
@@ -15,7 +15,7 @@ from .manager import (
IncorrectPasswordError,
ManagerStateEvent,
)
-from .models import Folder
+from .models import BackupNotFound, Folder
@callback
@@ -57,7 +57,7 @@ async def handle_info(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
- "backups": [backup.as_frontend_json() for backup in backups.values()],
+ "backups": list(backups.values()),
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
"last_non_idle_event": manager.last_non_idle_event,
@@ -91,7 +91,7 @@ async def handle_details(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
- "backup": backup.as_frontend_json() if backup else None,
+ "backup": backup,
},
)
@@ -151,6 +151,8 @@ async def handle_restore(
restore_folders=msg.get("restore_folders"),
restore_homeassistant=msg["restore_homeassistant"],
)
+ except BackupNotFound:
+ connection.send_error(msg["id"], "backup_not_found", "Backup not found")
except IncorrectPasswordError:
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
else:
@@ -179,6 +181,8 @@ async def handle_can_decrypt_on_download(
agent_id=msg["agent_id"],
password=msg.get("password"),
)
+ except BackupNotFound:
+ connection.send_error(msg["id"], "backup_not_found", "Backup not found")
except IncorrectPasswordError:
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
except DecryptOnDowloadNotSupported:
diff --git a/homeassistant/components/bang_olufsen/event.py b/homeassistant/components/bang_olufsen/event.py
index 80ad4060c5e..99e5c8bb6fd 100644
--- a/homeassistant/components/bang_olufsen/event.py
+++ b/homeassistant/components/bang_olufsen/event.py
@@ -19,6 +19,8 @@ from .const import (
)
from .entity import BangOlufsenEntity
+PARALLEL_UPDATES = 0
+
async def async_setup_entry(
hass: HomeAssistant,
diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py
index 6cf1957f799..37e83ce2c47 100644
--- a/homeassistant/components/bluesound/__init__.py
+++ b/homeassistant/components/bluesound/__init__.py
@@ -1,8 +1,6 @@
"""The bluesound component."""
-from dataclasses import dataclass
-
-from pyblu import Player, SyncStatus
+from pyblu import Player
from pyblu.errors import PlayerUnreachableError
from homeassistant.config_entries import ConfigEntry
@@ -14,7 +12,11 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
-from .coordinator import BluesoundCoordinator
+from .coordinator import (
+ BluesoundConfigEntry,
+ BluesoundCoordinator,
+ BluesoundRuntimeData,
+)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
@@ -23,18 +25,6 @@ PLATFORMS = [
]
-@dataclass
-class BluesoundRuntimeData:
- """Bluesound data class."""
-
- player: Player
- sync_status: SyncStatus
- coordinator: BluesoundCoordinator
-
-
-type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
-
-
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Bluesound."""
return True
@@ -53,7 +43,7 @@ async def async_setup_entry(
except PlayerUnreachableError as ex:
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
- coordinator = BluesoundCoordinator(hass, player, sync_status)
+ coordinator = BluesoundCoordinator(hass, config_entry, player, sync_status)
await coordinator.async_config_entry_first_refresh()
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status, coordinator)
diff --git a/homeassistant/components/bluesound/coordinator.py b/homeassistant/components/bluesound/coordinator.py
index e62f3ef96cf..ceaf0b392eb 100644
--- a/homeassistant/components/bluesound/coordinator.py
+++ b/homeassistant/components/bluesound/coordinator.py
@@ -12,6 +12,7 @@ import logging
from pyblu import Input, Player, Preset, Status, SyncStatus
from pyblu.errors import PlayerUnreachableError
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@@ -21,6 +22,15 @@ NODE_OFFLINE_CHECK_TIMEOUT = timedelta(minutes=3)
PRESET_AND_INPUTS_INTERVAL = timedelta(minutes=15)
+@dataclass
+class BluesoundRuntimeData:
+ """Bluesound data class."""
+
+ player: Player
+ sync_status: SyncStatus
+ coordinator: BluesoundCoordinator
+
+
@dataclass
class BluesoundData:
"""Define a class to hold Bluesound data."""
@@ -31,6 +41,9 @@ class BluesoundData:
inputs: list[Input]
+type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
+
+
def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]:
"""Cancel a task."""
@@ -45,8 +58,14 @@ def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]
class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
"""Define an object to hold Bluesound data."""
+ config_entry: BluesoundConfigEntry
+
def __init__(
- self, hass: HomeAssistant, player: Player, sync_status: SyncStatus
+ self,
+ hass: HomeAssistant,
+ config_entry: BluesoundConfigEntry,
+ player: Player,
+ sync_status: SyncStatus,
) -> None:
"""Initialize."""
self.player = player
@@ -55,12 +74,11 @@ class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
super().__init__(
hass,
logger=_LOGGER,
+ config_entry=config_entry,
name=sync_status.name,
)
async def _async_setup(self) -> None:
- assert self.config_entry is not None
-
preset = await self.player.presets()
inputs = await self.player.inputs()
status = await self.player.status()
diff --git a/homeassistant/components/bluesound/strings.json b/homeassistant/components/bluesound/strings.json
index c85014fedc3..b50c01a11bf 100644
--- a/homeassistant/components/bluesound/strings.json
+++ b/homeassistant/components/bluesound/strings.json
@@ -28,7 +28,7 @@
"services": {
"join": {
"name": "Join",
- "description": "Group player together.",
+ "description": "Groups players together under a single master speaker.",
"fields": {
"master": {
"name": "Master",
@@ -36,23 +36,23 @@
},
"entity_id": {
"name": "Entity",
- "description": "Name of entity that will coordinate the grouping. Platform dependent."
+ "description": "Name of entity that will group to master speaker. Platform dependent."
}
}
},
"unjoin": {
"name": "Unjoin",
- "description": "Unjoin the player from a group.",
+ "description": "Separates a player from a group.",
"fields": {
"entity_id": {
"name": "Entity",
- "description": "Name of entity that will be unjoined from their group. Platform dependent."
+ "description": "Name of entity that will be separated from their group. Platform dependent."
}
}
},
"set_sleep_timer": {
"name": "Set sleep timer",
- "description": "Set a Bluesound timer. It will increase timer in steps: 15, 30, 45, 60, 90, 0.",
+ "description": "Sets a Bluesound timer that will turn off the speaker. It will increase in steps: 15, 30, 45, 60, 90, 0.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -62,7 +62,7 @@
},
"clear_sleep_timer": {
"name": "Clear sleep timer",
- "description": "Clear a Bluesound timer.",
+ "description": "Clears a Bluesound timer.",
"fields": {
"entity_id": {
"name": "Entity",
diff --git a/homeassistant/components/bluetooth/__init__.py b/homeassistant/components/bluetooth/__init__.py
index c423e9e747b..c46ef22803e 100644
--- a/homeassistant/components/bluetooth/__init__.py
+++ b/homeassistant/components/bluetooth/__init__.py
@@ -5,7 +5,7 @@ from __future__ import annotations
import datetime
import logging
import platform
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
from bleak_retry_connector import BleakSlotManager
from bluetooth_adapters import (
@@ -302,7 +302,6 @@ async def async_update_device(
entry: ConfigEntry,
adapter: str,
details: AdapterDetails,
- via_device_domain: str | None = None,
via_device_id: str | None = None,
) -> None:
"""Update device registry entry.
@@ -322,10 +321,11 @@ async def async_update_device(
sw_version=details.get(ADAPTER_SW_VERSION),
hw_version=details.get(ADAPTER_HW_VERSION),
)
- if via_device_id:
- device_registry.async_update_device(
- device_entry.id, via_device_id=via_device_id
- )
+ if via_device_id and (via_device_entry := device_registry.async_get(via_device_id)):
+ kwargs: dict[str, Any] = {"via_device_id": via_device_id}
+ if not device_entry.area_id and via_device_entry.area_id:
+ kwargs["area_id"] = via_device_entry.area_id
+ device_registry.async_update_device(device_entry.id, **kwargs)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@@ -360,7 +360,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
source_entry.title,
details,
- source_domain,
entry.data.get(CONF_SOURCE_DEVICE_ID),
)
return True
diff --git a/homeassistant/components/bluetooth/config_flow.py b/homeassistant/components/bluetooth/config_flow.py
index 5d03a9c9d0f..e76277306f5 100644
--- a/homeassistant/components/bluetooth/config_flow.py
+++ b/homeassistant/components/bluetooth/config_flow.py
@@ -140,7 +140,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
title=adapter_title(adapter, details), data={}
)
- configured_addresses = self._async_current_ids()
+ configured_addresses = self._async_current_ids(include_ignore=False)
bluetooth_adapters = get_adapters()
await bluetooth_adapters.refresh()
self._adapters = bluetooth_adapters.adapters
@@ -155,12 +155,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
and not (system == "Linux" and details[ADAPTER_ADDRESS] == DEFAULT_ADDRESS)
]
if not unconfigured_adapters:
- ignored_adapters = len(
- self._async_current_entries(include_ignore=True)
- ) - len(self._async_current_entries(include_ignore=False))
return self.async_abort(
reason="no_adapters",
- description_placeholders={"ignored_adapters": str(ignored_adapters)},
)
if len(unconfigured_adapters) == 1:
self._adapter = list(self._adapters)[0]
diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json
index cd2530e1717..5d2b8ab6285 100644
--- a/homeassistant/components/bluetooth/manifest.json
+++ b/homeassistant/components/bluetooth/manifest.json
@@ -16,11 +16,11 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.22.3",
- "bleak-retry-connector==3.8.0",
- "bluetooth-adapters==0.21.1",
+ "bleak-retry-connector==3.8.1",
+ "bluetooth-adapters==0.21.4",
"bluetooth-auto-recovery==1.4.2",
- "bluetooth-data-tools==1.23.3",
- "dbus-fast==2.31.0",
- "habluetooth==3.21.0"
+ "bluetooth-data-tools==1.23.4",
+ "dbus-fast==2.33.0",
+ "habluetooth==3.21.1"
]
}
diff --git a/homeassistant/components/bluetooth/strings.json b/homeassistant/components/bluetooth/strings.json
index 5f9a380d631..866b76c0985 100644
--- a/homeassistant/components/bluetooth/strings.json
+++ b/homeassistant/components/bluetooth/strings.json
@@ -23,7 +23,7 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
- "no_adapters": "No unconfigured Bluetooth adapters found. There are {ignored_adapters} ignored adapters."
+ "no_adapters": "No unconfigured Bluetooth adapters found."
}
},
"options": {
diff --git a/homeassistant/components/bthome/manifest.json b/homeassistant/components/bthome/manifest.json
index c8577113804..4130606ff5c 100644
--- a/homeassistant/components/bthome/manifest.json
+++ b/homeassistant/components/bthome/manifest.json
@@ -20,5 +20,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/bthome",
"iot_class": "local_push",
- "requirements": ["bthome-ble==3.12.3"]
+ "requirements": ["bthome-ble==3.12.4"]
}
diff --git a/homeassistant/components/bthome/sensor.py b/homeassistant/components/bthome/sensor.py
index e46cbbea700..23a058b0b0c 100644
--- a/homeassistant/components/bthome/sensor.py
+++ b/homeassistant/components/bthome/sensor.py
@@ -67,6 +67,11 @@ SENSOR_DESCRIPTIONS = {
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
),
+ # Channel (-)
+ (BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
+ key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
+ state_class=SensorStateClass.MEASUREMENT,
+ ),
# Conductivity (µS/cm)
(
BTHomeSensorDeviceClass.CONDUCTIVITY,
diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py
index 473f553593a..b1a845ef8b0 100644
--- a/homeassistant/components/cloud/http_api.py
+++ b/homeassistant/components/cloud/http_api.py
@@ -29,6 +29,7 @@ from homeassistant.components.google_assistant import helpers as google_helpers
from homeassistant.components.homeassistant import exposed_entities
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
from homeassistant.components.http.data_validator import RequestDataValidator
+from homeassistant.components.system_health import get_info as get_system_health_info
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
@@ -107,6 +108,7 @@ def async_setup(hass: HomeAssistant) -> None:
hass.http.register_view(CloudRegisterView)
hass.http.register_view(CloudResendConfirmView)
hass.http.register_view(CloudForgotPasswordView)
+ hass.http.register_view(DownloadSupportPackageView)
_CLOUD_ERRORS.update(
{
@@ -389,6 +391,59 @@ class CloudForgotPasswordView(HomeAssistantView):
return self.json_message("ok")
+class DownloadSupportPackageView(HomeAssistantView):
+ """Download support package view."""
+
+ url = "/api/cloud/support_package"
+ name = "api:cloud:support_package"
+
+ def _generate_markdown(
+ self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
+ ) -> str:
+ def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
+ if len(domain_info) == 0:
+ return "No information available\n"
+
+ markdown = ""
+ first = True
+ for key, value in domain_info.items():
+ markdown += f"{key} | {value}\n"
+ if first:
+ markdown += "--- | ---\n"
+ first = False
+ return markdown + "\n"
+
+ markdown = "## System Information\n\n"
+ markdown += get_domain_table_markdown(hass_info)
+
+ for domain, domain_info in domains_info.items():
+ domain_info_md = get_domain_table_markdown(domain_info)
+ markdown += (
+ f"{domain}
\n\n"
+ f"{domain_info_md}"
+ " \n\n"
+ )
+
+ return markdown
+
+ async def get(self, request: web.Request) -> web.Response:
+ """Download support package file."""
+
+ hass = request.app[KEY_HASS]
+ domain_health = await get_system_health_info(hass)
+
+ hass_info = domain_health.pop("homeassistant", {})
+ markdown = self._generate_markdown(hass_info, domain_health)
+
+ return web.Response(
+ body=markdown,
+ content_type="text/markdown",
+ headers={
+ "Content-Disposition": 'attachment; filename="support_package.md"'
+ },
+ )
+
+
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "cloud/remove_data"})
@websocket_api.async_response
diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json
index 0f415b1738a..8e8ff4335db 100644
--- a/homeassistant/components/cloud/manifest.json
+++ b/homeassistant/components/cloud/manifest.json
@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["hass_nabucasa"],
- "requirements": ["hass-nabucasa==0.88.1"],
+ "requirements": ["hass-nabucasa==0.89.0"],
"single_config_entry": true
}
diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py
index 645ff4f9e75..63f36554c65 100644
--- a/homeassistant/components/cloud/tts.py
+++ b/homeassistant/components/cloud/tts.py
@@ -38,6 +38,156 @@ ATTR_GENDER = "gender"
DEPRECATED_VOICES = {"XiaoxuanNeural": "XiaozhenNeural"}
SUPPORT_LANGUAGES = list(TTS_VOICES)
+DEFAULT_VOICES = {
+ "af-ZA": "AdriNeural",
+ "am-ET": "MekdesNeural",
+ "ar-AE": "FatimaNeural",
+ "ar-BH": "LailaNeural",
+ "ar-DZ": "AminaNeural",
+ "ar-EG": "SalmaNeural",
+ "ar-IQ": "RanaNeural",
+ "ar-JO": "SanaNeural",
+ "ar-KW": "NouraNeural",
+ "ar-LB": "LaylaNeural",
+ "ar-LY": "ImanNeural",
+ "ar-MA": "MounaNeural",
+ "ar-OM": "AbdullahNeural",
+ "ar-QA": "AmalNeural",
+ "ar-SA": "ZariyahNeural",
+ "ar-SY": "AmanyNeural",
+ "ar-TN": "ReemNeural",
+ "ar-YE": "MaryamNeural",
+ "az-AZ": "BabekNeural",
+ "bg-BG": "KalinaNeural",
+ "bn-BD": "NabanitaNeural",
+ "bn-IN": "TanishaaNeural",
+ "bs-BA": "GoranNeural",
+ "ca-ES": "JoanaNeural",
+ "cs-CZ": "VlastaNeural",
+ "cy-GB": "NiaNeural",
+ "da-DK": "ChristelNeural",
+ "de-AT": "IngridNeural",
+ "de-CH": "LeniNeural",
+ "de-DE": "KatjaNeural",
+ "el-GR": "AthinaNeural",
+ "en-AU": "NatashaNeural",
+ "en-CA": "ClaraNeural",
+ "en-GB": "LibbyNeural",
+ "en-HK": "YanNeural",
+ "en-IE": "EmilyNeural",
+ "en-IN": "NeerjaNeural",
+ "en-KE": "AsiliaNeural",
+ "en-NG": "EzinneNeural",
+ "en-NZ": "MollyNeural",
+ "en-PH": "RosaNeural",
+ "en-SG": "LunaNeural",
+ "en-TZ": "ImaniNeural",
+ "en-US": "JennyNeural",
+ "en-ZA": "LeahNeural",
+ "es-AR": "ElenaNeural",
+ "es-BO": "SofiaNeural",
+ "es-CL": "CatalinaNeural",
+ "es-CO": "SalomeNeural",
+ "es-CR": "MariaNeural",
+ "es-CU": "BelkysNeural",
+ "es-DO": "RamonaNeural",
+ "es-EC": "AndreaNeural",
+ "es-ES": "ElviraNeural",
+ "es-GQ": "TeresaNeural",
+ "es-GT": "MartaNeural",
+ "es-HN": "KarlaNeural",
+ "es-MX": "DaliaNeural",
+ "es-NI": "YolandaNeural",
+ "es-PA": "MargaritaNeural",
+ "es-PE": "CamilaNeural",
+ "es-PR": "KarinaNeural",
+ "es-PY": "TaniaNeural",
+ "es-SV": "LorenaNeural",
+ "es-US": "PalomaNeural",
+ "es-UY": "ValentinaNeural",
+ "es-VE": "PaolaNeural",
+ "et-EE": "AnuNeural",
+ "eu-ES": "AinhoaNeural",
+ "fa-IR": "DilaraNeural",
+ "fi-FI": "SelmaNeural",
+ "fil-PH": "BlessicaNeural",
+ "fr-BE": "CharlineNeural",
+ "fr-CA": "SylvieNeural",
+ "fr-CH": "ArianeNeural",
+ "fr-FR": "DeniseNeural",
+ "ga-IE": "OrlaNeural",
+ "gl-ES": "SabelaNeural",
+ "gu-IN": "DhwaniNeural",
+ "he-IL": "HilaNeural",
+ "hi-IN": "SwaraNeural",
+ "hr-HR": "GabrijelaNeural",
+ "hu-HU": "NoemiNeural",
+ "hy-AM": "AnahitNeural",
+ "id-ID": "GadisNeural",
+ "is-IS": "GudrunNeural",
+ "it-IT": "ElsaNeural",
+ "ja-JP": "NanamiNeural",
+ "jv-ID": "SitiNeural",
+ "ka-GE": "EkaNeural",
+ "kk-KZ": "AigulNeural",
+ "km-KH": "SreymomNeural",
+ "kn-IN": "SapnaNeural",
+ "ko-KR": "SunHiNeural",
+ "lo-LA": "KeomanyNeural",
+ "lt-LT": "OnaNeural",
+ "lv-LV": "EveritaNeural",
+ "mk-MK": "MarijaNeural",
+ "ml-IN": "SobhanaNeural",
+ "mn-MN": "BataaNeural",
+ "mr-IN": "AarohiNeural",
+ "ms-MY": "YasminNeural",
+ "mt-MT": "GraceNeural",
+ "my-MM": "NilarNeural",
+ "nb-NO": "IselinNeural",
+ "ne-NP": "HemkalaNeural",
+ "nl-BE": "DenaNeural",
+ "nl-NL": "ColetteNeural",
+ "pl-PL": "AgnieszkaNeural",
+ "ps-AF": "LatifaNeural",
+ "pt-BR": "FranciscaNeural",
+ "pt-PT": "RaquelNeural",
+ "ro-RO": "AlinaNeural",
+ "ru-RU": "SvetlanaNeural",
+ "si-LK": "ThiliniNeural",
+ "sk-SK": "ViktoriaNeural",
+ "sl-SI": "PetraNeural",
+ "so-SO": "UbaxNeural",
+ "sq-AL": "AnilaNeural",
+ "sr-RS": "SophieNeural",
+ "su-ID": "TutiNeural",
+ "sv-SE": "SofieNeural",
+ "sw-KE": "ZuriNeural",
+ "sw-TZ": "RehemaNeural",
+ "ta-IN": "PallaviNeural",
+ "ta-LK": "SaranyaNeural",
+ "ta-MY": "KaniNeural",
+ "ta-SG": "VenbaNeural",
+ "te-IN": "ShrutiNeural",
+ "th-TH": "AcharaNeural",
+ "tr-TR": "EmelNeural",
+ "uk-UA": "PolinaNeural",
+ "ur-IN": "GulNeural",
+ "ur-PK": "UzmaNeural",
+ "uz-UZ": "MadinaNeural",
+ "vi-VN": "HoaiMyNeural",
+ "wuu-CN": "XiaotongNeural",
+ "yue-CN": "XiaoMinNeural",
+ "zh-CN": "XiaoxiaoNeural",
+ "zh-CN-henan": "YundengNeural",
+ "zh-CN-liaoning": "XiaobeiNeural",
+ "zh-CN-shaanxi": "XiaoniNeural",
+ "zh-CN-shandong": "YunxiangNeural",
+ "zh-CN-sichuan": "YunxiNeural",
+ "zh-HK": "HiuMaanNeural",
+ "zh-TW": "HsiaoChenNeural",
+ "zu-ZA": "ThandoNeural",
+}
+
_LOGGER = logging.getLogger(__name__)
@@ -186,12 +336,13 @@ class CloudTTSEntity(TextToSpeechEntity):
"""Load TTS from Home Assistant Cloud."""
gender: Gender | str | None = options.get(ATTR_GENDER)
gender = handle_deprecated_gender(self.hass, gender)
- original_voice: str | None = options.get(ATTR_VOICE)
- if original_voice is None and language == self._language:
- original_voice = self._voice
+ original_voice: str = options.get(
+ ATTR_VOICE,
+ self._voice if language == self._language else DEFAULT_VOICES[language],
+ )
voice = handle_deprecated_voice(self.hass, original_voice)
if voice not in TTS_VOICES[language]:
- default_voice = TTS_VOICES[language][0]
+ default_voice = DEFAULT_VOICES[language]
_LOGGER.debug(
"Unsupported voice %s detected, falling back to default %s for %s",
voice,
@@ -266,12 +417,13 @@ class CloudProvider(Provider):
assert self.hass is not None
gender: Gender | str | None = options.get(ATTR_GENDER)
gender = handle_deprecated_gender(self.hass, gender)
- original_voice: str | None = options.get(ATTR_VOICE)
- if original_voice is None and language == self._language:
- original_voice = self._voice
+ original_voice: str = options.get(
+ ATTR_VOICE,
+ self._voice if language == self._language else DEFAULT_VOICES[language],
+ )
voice = handle_deprecated_voice(self.hass, original_voice)
if voice not in TTS_VOICES[language]:
- default_voice = TTS_VOICES[language][0]
+ default_voice = DEFAULT_VOICES[language]
_LOGGER.debug(
"Unsupported voice %s detected, falling back to default %s for %s",
voice,
diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py
index 4a070a87734..52e3346002e 100644
--- a/homeassistant/components/config/config_entries.py
+++ b/homeassistant/components/config/config_entries.py
@@ -302,7 +302,8 @@ def config_entries_progress(
[
flw
for flw in hass.config_entries.flow.async_progress()
- if flw["context"]["source"] != config_entries.SOURCE_USER
+ if flw["context"]["source"]
+ not in (config_entries.SOURCE_RECONFIGURE, config_entries.SOURCE_USER)
],
)
diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py
index 13152beff51..69e738205c5 100644
--- a/homeassistant/components/conversation/__init__.py
+++ b/homeassistant/components/conversation/__init__.py
@@ -30,6 +30,16 @@ from .agent_manager import (
async_get_agent,
get_agent_manager,
)
+from .chat_log import (
+ AssistantContent,
+ ChatLog,
+ Content,
+ ConverseError,
+ SystemContent,
+ ToolResultContent,
+ UserContent,
+ async_get_chat_log,
+)
from .const import (
ATTR_AGENT_ID,
ATTR_CONVERSATION_ID,
@@ -48,13 +58,13 @@ from .default_agent import DefaultAgent, async_setup_default_agent
from .entity import ConversationEntity
from .http import async_setup as async_setup_conversation_http
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
-from .session import ChatLog, Content, ConverseError, NativeContent, async_get_chat_log
from .trace import ConversationTraceEventType, async_conversation_trace_append
__all__ = [
"DOMAIN",
"HOME_ASSISTANT_AGENT",
"OLD_HOME_ASSISTANT_AGENT",
+ "AssistantContent",
"ChatLog",
"Content",
"ConversationEntity",
@@ -63,7 +73,9 @@ __all__ = [
"ConversationResult",
"ConversationTraceEventType",
"ConverseError",
- "NativeContent",
+ "SystemContent",
+ "ToolResultContent",
+ "UserContent",
"async_conversation_trace_append",
"async_converse",
"async_get_agent_info",
diff --git a/homeassistant/components/conversation/session.py b/homeassistant/components/conversation/chat_log.py
similarity index 67%
rename from homeassistant/components/conversation/session.py
rename to homeassistant/components/conversation/chat_log.py
index c32d61333a0..ad7a9d0ce9e 100644
--- a/homeassistant/components/conversation/session.py
+++ b/homeassistant/components/conversation/chat_log.py
@@ -2,19 +2,16 @@
from __future__ import annotations
-from collections.abc import Generator
+from collections.abc import AsyncGenerator, Generator
from contextlib import contextmanager
from dataclasses import dataclass, field, replace
-from datetime import datetime
import logging
-from typing import Literal
import voluptuous as vol
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, TemplateError
from homeassistant.helpers import chat_session, intent, llm, template
-from homeassistant.util import dt as dt_util
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.json import JsonObjectType
@@ -31,7 +28,7 @@ LOGGER = logging.getLogger(__name__)
def async_get_chat_log(
hass: HomeAssistant,
session: chat_session.ChatSession,
- user_input: ConversationInput,
+ user_input: ConversationInput | None = None,
) -> Generator[ChatLog]:
"""Return chat log for a specific chat session."""
all_history = hass.data.get(DATA_CHAT_HISTORY)
@@ -42,9 +39,24 @@ def async_get_chat_log(
history = all_history.get(session.conversation_id)
if history:
- history = replace(history, messages=history.messages.copy())
+ history = replace(history, content=history.content.copy())
else:
- history = ChatLog(hass, session.conversation_id, user_input.agent_id)
+ history = ChatLog(hass, session.conversation_id)
+
+ if user_input is not None:
+ history.async_add_user_content(UserContent(content=user_input.text))
+
+ last_message = history.content[-1]
+
+ yield history
+
+ if history.content[-1] is last_message:
+ LOGGER.debug(
+ "History opened but no assistant message was added, ignoring update"
+ )
+ return
+
+ if session.conversation_id not in all_history:
@callback
def do_cleanup() -> None:
@@ -53,22 +65,6 @@ def async_get_chat_log(
session.async_on_cleanup(do_cleanup)
- message: Content = Content(
- role="user",
- agent_id=user_input.agent_id,
- content=user_input.text,
- )
- history.async_add_message(message)
-
- yield history
-
- if history.messages[-1] is message:
- LOGGER.debug(
- "History opened but no assistant message was added, ignoring update"
- )
- return
-
- history.last_updated = dt_util.utcnow()
all_history[session.conversation_id] = history
@@ -94,63 +90,103 @@ class ConverseError(HomeAssistantError):
)
-@dataclass
-class Content:
+@dataclass(frozen=True)
+class SystemContent:
"""Base class for chat messages."""
- role: Literal["system", "assistant", "user"]
- agent_id: str | None
+ role: str = field(init=False, default="system")
content: str
@dataclass(frozen=True)
-class NativeContent[_NativeT]:
- """Native content."""
+class UserContent:
+ """Assistant content."""
- role: str = field(init=False, default="native")
+ role: str = field(init=False, default="user")
+ content: str
+
+
+@dataclass(frozen=True)
+class AssistantContent:
+ """Assistant content."""
+
+ role: str = field(init=False, default="assistant")
agent_id: str
- content: _NativeT
+ content: str
+ tool_calls: list[llm.ToolInput] | None = None
+
+
+@dataclass(frozen=True)
+class ToolResultContent:
+ """Tool result content."""
+
+ role: str = field(init=False, default="tool_result")
+ agent_id: str
+ tool_call_id: str
+ tool_name: str
+ tool_result: JsonObjectType
+
+
+Content = SystemContent | UserContent | AssistantContent | ToolResultContent
@dataclass
-class ChatLog[_NativeT]:
+class ChatLog:
"""Class holding the chat history of a specific conversation."""
hass: HomeAssistant
conversation_id: str
- agent_id: str | None
- user_name: str | None = None
- messages: list[Content | NativeContent[_NativeT]] = field(
- default_factory=lambda: [Content(role="system", agent_id=None, content="")]
- )
+ content: list[Content] = field(default_factory=lambda: [SystemContent(content="")])
extra_system_prompt: str | None = None
llm_api: llm.APIInstance | None = None
- last_updated: datetime = field(default_factory=dt_util.utcnow)
@callback
- def async_add_message(self, message: Content | NativeContent[_NativeT]) -> None:
- """Process intent."""
- if message.role == "system":
- raise ValueError("Cannot add system messages to history")
- if message.role != "native" and self.messages[-1].role == message.role:
- raise ValueError("Cannot add two assistant or user messages in a row")
-
- self.messages.append(message)
+ def async_add_user_content(self, content: UserContent) -> None:
+ """Add user content to the log."""
+ self.content.append(content)
@callback
- def async_get_messages(
- self, agent_id: str | None = None
- ) -> list[Content | NativeContent[_NativeT]]:
- """Get messages for a specific agent ID.
+ def async_add_assistant_content_without_tools(
+ self, content: AssistantContent
+ ) -> None:
+ """Add assistant content to the log."""
+ if content.tool_calls is not None:
+ raise ValueError("Tool calls not allowed")
+ self.content.append(content)
- This will filter out any native message tied to other agent IDs.
- It can still include assistant/user messages generated by other agents.
- """
- return [
- message
- for message in self.messages
- if message.role != "native" or message.agent_id == agent_id
- ]
+ async def async_add_assistant_content(
+ self, content: AssistantContent
+ ) -> AsyncGenerator[ToolResultContent]:
+ """Add assistant content."""
+ self.content.append(content)
+
+ if content.tool_calls is None:
+ return
+
+ if self.llm_api is None:
+ raise ValueError("No LLM API configured")
+
+ for tool_input in content.tool_calls:
+ LOGGER.debug(
+ "Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
+ )
+
+ try:
+ tool_result = await self.llm_api.async_call_tool(tool_input)
+ except (HomeAssistantError, vol.Invalid) as e:
+ tool_result = {"error": type(e).__name__}
+ if str(e):
+ tool_result["error_text"] = str(e)
+ LOGGER.debug("Tool response: %s", tool_result)
+
+ response_content = ToolResultContent(
+ agent_id=content.agent_id,
+ tool_call_id=tool_input.id,
+ tool_name=tool_input.tool_name,
+ tool_result=tool_result,
+ )
+ self.content.append(response_content)
+ yield response_content
async def async_update_llm_data(
self,
@@ -250,36 +286,16 @@ class ChatLog[_NativeT]:
prompt = "\n".join(prompt_parts)
self.llm_api = llm_api
- self.user_name = user_name
self.extra_system_prompt = extra_system_prompt
- self.messages[0] = Content(
- role="system",
- agent_id=user_input.agent_id,
- content=prompt,
- )
+ self.content[0] = SystemContent(content=prompt)
- LOGGER.debug("Prompt: %s", self.messages)
+ LOGGER.debug("Prompt: %s", self.content)
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)
trace.async_conversation_trace_append(
trace.ConversationTraceEventType.AGENT_DETAIL,
{
- "messages": self.messages,
+ "messages": self.content,
"tools": self.llm_api.tools if self.llm_api else None,
},
)
-
- async def async_call_tool(self, tool_input: llm.ToolInput) -> JsonObjectType:
- """Invoke LLM tool for the configured LLM API."""
- if not self.llm_api:
- raise ValueError("No LLM API configured")
- LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args)
-
- try:
- tool_response = await self.llm_api.async_call_tool(tool_input)
- except (HomeAssistantError, vol.Invalid) as e:
- tool_response = {"error": type(e).__name__}
- if str(e):
- tool_response["error_text"] = str(e)
- LOGGER.debug("Tool response: %s", tool_response)
- return tool_response
diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py
index c4a8f7ea7eb..bd7450e5a0f 100644
--- a/homeassistant/components/conversation/default_agent.py
+++ b/homeassistant/components/conversation/default_agent.py
@@ -55,6 +55,7 @@ from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_added_domain
from homeassistant.util.json import JsonObjectType, json_loads_object
+from .chat_log import AssistantContent, async_get_chat_log
from .const import (
DATA_DEFAULT_ENTITY,
DEFAULT_EXPOSED_ATTRIBUTES,
@@ -63,7 +64,6 @@ from .const import (
)
from .entity import ConversationEntity
from .models import ConversationInput, ConversationResult
-from .session import Content, async_get_chat_log
from .trace import ConversationTraceEventType, async_conversation_trace_append
_LOGGER = logging.getLogger(__name__)
@@ -379,10 +379,9 @@ class DefaultAgent(ConversationEntity):
)
speech: str = response.speech.get("plain", {}).get("speech", "")
- chat_log.async_add_message(
- Content(
- role="assistant",
- agent_id=user_input.agent_id,
+ chat_log.async_add_assistant_content_without_tools(
+ AssistantContent(
+ agent_id=user_input.agent_id, # type: ignore[arg-type]
content=speech,
)
)
diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json
index 0485cb75fcb..2d4a8053d75 100644
--- a/homeassistant/components/conversation/manifest.json
+++ b/homeassistant/components/conversation/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
- "requirements": ["hassil==2.2.0", "home-assistant-intents==2025.1.28"]
+ "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
}
diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json
index 0eb7e4a64fc..45af4f1b5dd 100644
--- a/homeassistant/components/dhcp/manifest.json
+++ b/homeassistant/components/dhcp/manifest.json
@@ -14,8 +14,8 @@
],
"quality_scale": "internal",
"requirements": [
- "aiodhcpwatcher==1.0.3",
- "aiodiscover==2.1.0",
+ "aiodhcpwatcher==1.1.0",
+ "aiodiscover==2.2.2",
"cached-ipaddress==0.8.0"
]
}
diff --git a/homeassistant/components/dwd_weather_warnings/sensor.py b/homeassistant/components/dwd_weather_warnings/sensor.py
index c6aa5727b74..0aaf1f2a801 100644
--- a/homeassistant/components/dwd_weather_warnings/sensor.py
+++ b/homeassistant/components/dwd_weather_warnings/sensor.py
@@ -3,7 +3,7 @@
Data is fetched from DWD:
https://rcccm.dwd.de/DE/wetter/warnungen_aktuell/objekt_einbindung/objekteinbindung.html
-Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor
+Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor,extremem
Unwetterwarnungen (Stufe 3)
Warnungen vor markantem Wetter (Stufe 2) # codespell:ignore vor
Wetterwarnungen (Stufe 1)
diff --git a/homeassistant/components/econet/manifest.json b/homeassistant/components/econet/manifest.json
index 6586af92d1f..bda52ee3d07 100644
--- a/homeassistant/components/econet/manifest.json
+++ b/homeassistant/components/econet/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/econet",
"iot_class": "cloud_push",
"loggers": ["paho_mqtt", "pyeconet"],
- "requirements": ["pyeconet==0.1.23"]
+ "requirements": ["pyeconet==0.1.26"]
}
diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json
index 7b05162867b..33a251c22dc 100644
--- a/homeassistant/components/ecovacs/manifest.json
+++ b/homeassistant/components/ecovacs/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
- "requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0b0"]
+ "requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
}
diff --git a/homeassistant/components/electric_kiwi/__init__.py b/homeassistant/components/electric_kiwi/__init__.py
index de8d87553a3..825dbc54013 100644
--- a/homeassistant/components/electric_kiwi/__init__.py
+++ b/homeassistant/components/electric_kiwi/__init__.py
@@ -4,12 +4,16 @@ from __future__ import annotations
import aiohttp
from electrickiwi_api import ElectricKiwiApi
-from electrickiwi_api.exceptions import ApiException
+from electrickiwi_api.exceptions import ApiException, AuthException
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
-from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
+from homeassistant.helpers import (
+ aiohttp_client,
+ config_entry_oauth2_flow,
+ entity_registry as er,
+)
from . import api
from .coordinator import (
@@ -44,7 +48,9 @@ async def async_setup_entry(
raise ConfigEntryNotReady from err
ek_api = ElectricKiwiApi(
- api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
+ api.ConfigEntryElectricKiwiAuth(
+ aiohttp_client.async_get_clientsession(hass), session
+ )
)
hop_coordinator = ElectricKiwiHOPDataCoordinator(hass, entry, ek_api)
account_coordinator = ElectricKiwiAccountDataCoordinator(hass, entry, ek_api)
@@ -53,6 +59,8 @@ async def async_setup_entry(
await ek_api.set_active_session()
await hop_coordinator.async_config_entry_first_refresh()
await account_coordinator.async_config_entry_first_refresh()
+ except AuthException as err:
+ raise ConfigEntryAuthFailed from err
except ApiException as err:
raise ConfigEntryNotReady from err
@@ -70,3 +78,53 @@ async def async_unload_entry(
) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
+
+
+async def async_migrate_entry(
+ hass: HomeAssistant, config_entry: ElectricKiwiConfigEntry
+) -> bool:
+ """Migrate old entry."""
+ if config_entry.version == 1 and config_entry.minor_version == 1:
+ implementation = (
+ await config_entry_oauth2_flow.async_get_config_entry_implementation(
+ hass, config_entry
+ )
+ )
+
+ session = config_entry_oauth2_flow.OAuth2Session(
+ hass, config_entry, implementation
+ )
+
+ ek_api = ElectricKiwiApi(
+ api.ConfigEntryElectricKiwiAuth(
+ aiohttp_client.async_get_clientsession(hass), session
+ )
+ )
+ try:
+ await ek_api.set_active_session()
+ connection_details = await ek_api.get_connection_details()
+ except AuthException:
+ config_entry.async_start_reauth(hass)
+ return False
+ except ApiException:
+ return False
+ unique_id = str(ek_api.customer_number)
+ identifier = ek_api.electricity.identifier
+ hass.config_entries.async_update_entry(
+ config_entry, unique_id=unique_id, minor_version=2
+ )
+ entity_registry = er.async_get(hass)
+ entity_entries = er.async_entries_for_config_entry(
+ entity_registry, config_entry_id=config_entry.entry_id
+ )
+
+ for entity in entity_entries:
+ assert entity.config_entry_id
+ entity_registry.async_update_entity(
+ entity.entity_id,
+ new_unique_id=entity.unique_id.replace(
+ f"{unique_id}_{connection_details.id}", f"{unique_id}_{identifier}"
+ ),
+ )
+
+ return True
diff --git a/homeassistant/components/electric_kiwi/api.py b/homeassistant/components/electric_kiwi/api.py
index dead8a6a3c0..9f7ff333378 100644
--- a/homeassistant/components/electric_kiwi/api.py
+++ b/homeassistant/components/electric_kiwi/api.py
@@ -2,17 +2,16 @@
from __future__ import annotations
-from typing import cast
-
from aiohttp import ClientSession
from electrickiwi_api import AbstractAuth
-from homeassistant.helpers import config_entry_oauth2_flow
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
from .const import API_BASE_URL
-class AsyncConfigEntryAuth(AbstractAuth):
+class ConfigEntryElectricKiwiAuth(AbstractAuth):
"""Provide Electric Kiwi authentication tied to an OAuth2 based config entry."""
def __init__(
@@ -29,4 +28,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
"""Return a valid access token."""
await self._oauth_session.async_ensure_token_valid()
- return cast(str, self._oauth_session.token["access_token"])
+ return str(self._oauth_session.token["access_token"])
+
+
+class ConfigFlowElectricKiwiAuth(AbstractAuth):
+ """Provide Electric Kiwi authentication tied to an OAuth2 based config flow."""
+
+ def __init__(
+ self,
+ hass: HomeAssistant,
+ token: str,
+ ) -> None:
+ """Initialize ConfigFlowFitbitApi."""
+ super().__init__(aiohttp_client.async_get_clientsession(hass), API_BASE_URL)
+ self._token = token
+
+ async def async_get_access_token(self) -> str:
+ """Return the token for the Electric Kiwi API."""
+ return self._token
diff --git a/homeassistant/components/electric_kiwi/config_flow.py b/homeassistant/components/electric_kiwi/config_flow.py
index b74ab4268e2..b83fd89c4c6 100644
--- a/homeassistant/components/electric_kiwi/config_flow.py
+++ b/homeassistant/components/electric_kiwi/config_flow.py
@@ -6,9 +6,14 @@ from collections.abc import Mapping
import logging
from typing import Any
-from homeassistant.config_entries import ConfigFlowResult
+from electrickiwi_api import ElectricKiwiApi
+from electrickiwi_api.exceptions import ApiException
+
+from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
+from homeassistant.const import CONF_NAME
from homeassistant.helpers import config_entry_oauth2_flow
+from . import api
from .const import DOMAIN, SCOPE_VALUES
@@ -17,6 +22,8 @@ class ElectricKiwiOauth2FlowHandler(
):
"""Config flow to handle Electric Kiwi OAuth2 authentication."""
+ VERSION = 1
+ MINOR_VERSION = 2
DOMAIN = DOMAIN
@property
@@ -40,12 +47,30 @@ class ElectricKiwiOauth2FlowHandler(
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
if user_input is None:
- return self.async_show_form(step_id="reauth_confirm")
+ return self.async_show_form(
+ step_id="reauth_confirm",
+ description_placeholders={CONF_NAME: self._get_reauth_entry().title},
+ )
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Create an entry for Electric Kiwi."""
- existing_entry = await self.async_set_unique_id(DOMAIN)
- if existing_entry:
- return self.async_update_reload_and_abort(existing_entry, data=data)
- return await super().async_oauth_create_entry(data)
+ ek_api = ElectricKiwiApi(
+ api.ConfigFlowElectricKiwiAuth(self.hass, data["token"]["access_token"])
+ )
+
+ try:
+ session = await ek_api.get_active_session()
+ except ApiException:
+ return self.async_abort(reason="connection_error")
+
+ unique_id = str(session.data.customer_number)
+ await self.async_set_unique_id(unique_id)
+ if self.source == SOURCE_REAUTH:
+ self._abort_if_unique_id_mismatch(reason="wrong_account")
+ return self.async_update_reload_and_abort(
+ self._get_reauth_entry(), data=data
+ )
+
+ self._abort_if_unique_id_configured()
+ return self.async_create_entry(title=unique_id, data=data)
diff --git a/homeassistant/components/electric_kiwi/const.py b/homeassistant/components/electric_kiwi/const.py
index 907b6247172..c51422a7c72 100644
--- a/homeassistant/components/electric_kiwi/const.py
+++ b/homeassistant/components/electric_kiwi/const.py
@@ -8,4 +8,4 @@ OAUTH2_AUTHORIZE = "https://welcome.electrickiwi.co.nz/oauth/authorize"
OAUTH2_TOKEN = "https://welcome.electrickiwi.co.nz/oauth/token"
API_BASE_URL = "https://api.electrickiwi.co.nz"
-SCOPE_VALUES = "read_connection_detail read_billing_frequency read_account_running_balance read_consumption_summary read_consumption_averages read_hop_intervals_config read_hop_connection save_hop_connection read_session"
+SCOPE_VALUES = "read_customer_details read_connection_detail read_connection read_billing_address get_bill_address read_billing_frequency read_billing_details read_billing_bills read_billing_bill read_billing_bill_id read_billing_bill_file read_account_running_balance read_customer_account_summary read_consumption_summary download_consumption_file read_consumption_averages get_consumption_averages read_hop_intervals_config read_hop_intervals read_hop_connection read_hop_specific_connection save_hop_connection save_hop_specific_connection read_outage_contact get_outage_contact_info_for_icp read_session read_session_data_login"
diff --git a/homeassistant/components/electric_kiwi/coordinator.py b/homeassistant/components/electric_kiwi/coordinator.py
index 2065da5d668..635b55b2bc0 100644
--- a/homeassistant/components/electric_kiwi/coordinator.py
+++ b/homeassistant/components/electric_kiwi/coordinator.py
@@ -10,7 +10,7 @@ import logging
from electrickiwi_api import ElectricKiwiApi
from electrickiwi_api.exceptions import ApiException, AuthException
-from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
+from electrickiwi_api.model import AccountSummary, Hop, HopIntervals
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@@ -34,7 +34,7 @@ class ElectricKiwiRuntimeData:
type ElectricKiwiConfigEntry = ConfigEntry[ElectricKiwiRuntimeData]
-class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
+class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountSummary]):
"""ElectricKiwi Account Data object."""
def __init__(
@@ -51,13 +51,13 @@ class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
name="Electric Kiwi Account Data",
update_interval=ACCOUNT_SCAN_INTERVAL,
)
- self._ek_api = ek_api
+ self.ek_api = ek_api
- async def _async_update_data(self) -> AccountBalance:
+ async def _async_update_data(self) -> AccountSummary:
"""Fetch data from Account balance API endpoint."""
try:
async with asyncio.timeout(60):
- return await self._ek_api.get_account_balance()
+ return await self.ek_api.get_account_summary()
except AuthException as auth_err:
raise ConfigEntryAuthFailed from auth_err
except ApiException as api_err:
@@ -85,7 +85,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
# Polling interval. Will only be polled if there are subscribers.
update_interval=HOP_SCAN_INTERVAL,
)
- self._ek_api = ek_api
+ self.ek_api = ek_api
self.hop_intervals: HopIntervals | None = None
def get_hop_options(self) -> dict[str, int]:
@@ -100,7 +100,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
async def async_update_hop(self, hop_interval: int) -> Hop:
"""Update selected hop and data."""
try:
- self.async_set_updated_data(await self._ek_api.post_hop(hop_interval))
+ self.async_set_updated_data(await self.ek_api.post_hop(hop_interval))
except AuthException as auth_err:
raise ConfigEntryAuthFailed from auth_err
except ApiException as api_err:
@@ -118,7 +118,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
try:
async with asyncio.timeout(60):
if self.hop_intervals is None:
- hop_intervals: HopIntervals = await self._ek_api.get_hop_intervals()
+ hop_intervals: HopIntervals = await self.ek_api.get_hop_intervals()
hop_intervals.intervals = OrderedDict(
filter(
lambda pair: pair[1].active == 1,
@@ -127,7 +127,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
)
self.hop_intervals = hop_intervals
- return await self._ek_api.get_hop()
+ return await self.ek_api.get_hop()
except AuthException as auth_err:
raise ConfigEntryAuthFailed from auth_err
except ApiException as api_err:
diff --git a/homeassistant/components/electric_kiwi/manifest.json b/homeassistant/components/electric_kiwi/manifest.json
index 8ddb4c1af7c..9afe487d368 100644
--- a/homeassistant/components/electric_kiwi/manifest.json
+++ b/homeassistant/components/electric_kiwi/manifest.json
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/electric_kiwi",
"integration_type": "hub",
"iot_class": "cloud_polling",
- "requirements": ["electrickiwi-api==0.8.5"]
+ "requirements": ["electrickiwi-api==0.9.12"]
}
diff --git a/homeassistant/components/electric_kiwi/select.py b/homeassistant/components/electric_kiwi/select.py
index fa111381612..30e02b5c5b9 100644
--- a/homeassistant/components/electric_kiwi/select.py
+++ b/homeassistant/components/electric_kiwi/select.py
@@ -53,8 +53,8 @@ class ElectricKiwiSelectHOPEntity(
"""Initialise the HOP selection entity."""
super().__init__(coordinator)
self._attr_unique_id = (
- f"{coordinator._ek_api.customer_number}" # noqa: SLF001
- f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
+ f"{coordinator.ek_api.customer_number}"
+ f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
)
self.entity_description = description
self.values_dict = coordinator.get_hop_options()
diff --git a/homeassistant/components/electric_kiwi/sensor.py b/homeassistant/components/electric_kiwi/sensor.py
index e070f9495c1..410d70808c3 100644
--- a/homeassistant/components/electric_kiwi/sensor.py
+++ b/homeassistant/components/electric_kiwi/sensor.py
@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime, timedelta
-from electrickiwi_api.model import AccountBalance, Hop
+from electrickiwi_api.model import AccountSummary, Hop
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -39,7 +39,15 @@ ATTR_HOP_PERCENTAGE = "hop_percentage"
class ElectricKiwiAccountSensorEntityDescription(SensorEntityDescription):
"""Describes Electric Kiwi sensor entity."""
- value_func: Callable[[AccountBalance], float | datetime]
+ value_func: Callable[[AccountSummary], float | datetime]
+
+
+def _get_hop_percentage(account_balance: AccountSummary) -> float:
+ """Return the hop percentage from account summary."""
+ if power := account_balance.services.get("power"):
+ if connection := power.connections[0]:
+ return float(connection.hop_percentage)
+ return 0.0
ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
@@ -72,9 +80,7 @@ ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
translation_key="hop_power_savings",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
- value_func=lambda account_balance: float(
- account_balance.connections[0].hop_percentage
- ),
+ value_func=_get_hop_percentage,
),
)
@@ -165,8 +171,8 @@ class ElectricKiwiAccountEntity(
super().__init__(coordinator)
self._attr_unique_id = (
- f"{coordinator._ek_api.customer_number}" # noqa: SLF001
- f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
+ f"{coordinator.ek_api.customer_number}"
+ f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
)
self.entity_description = description
@@ -194,8 +200,8 @@ class ElectricKiwiHOPEntity(
super().__init__(coordinator)
self._attr_unique_id = (
- f"{coordinator._ek_api.customer_number}" # noqa: SLF001
- f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
+ f"{coordinator.ek_api.customer_number}"
+ f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
)
self.entity_description = description
diff --git a/homeassistant/components/electric_kiwi/strings.json b/homeassistant/components/electric_kiwi/strings.json
index 410d32909ba..5e0a2ef168d 100644
--- a/homeassistant/components/electric_kiwi/strings.json
+++ b/homeassistant/components/electric_kiwi/strings.json
@@ -21,7 +21,8 @@
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
- "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]"
+ "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
+ "connection_error": "[%key:common::config_flow::error::cannot_connect%]"
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json
index 3a55730c60f..0bc3ae55236 100644
--- a/homeassistant/components/eq3btsmart/manifest.json
+++ b/homeassistant/components/eq3btsmart/manifest.json
@@ -22,5 +22,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eq3btsmart"],
- "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.6.0"]
+ "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
}
diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json
index 9585be72c63..185f9ea5cf0 100644
--- a/homeassistant/components/esphome/manifest.json
+++ b/homeassistant/components/esphome/manifest.json
@@ -18,7 +18,7 @@
"requirements": [
"aioesphomeapi==29.0.0",
"esphome-dashboard-api==1.2.3",
- "bleak-esphome==2.6.0"
+ "bleak-esphome==2.7.1"
],
"zeroconf": ["_esphomelib._tcp.local."]
}
diff --git a/homeassistant/components/faa_delays/__init__.py b/homeassistant/components/faa_delays/__init__.py
index 750b1f4a833..e33ccc9fe48 100644
--- a/homeassistant/components/faa_delays/__init__.py
+++ b/homeassistant/components/faa_delays/__init__.py
@@ -1,33 +1,27 @@
"""The FAA Delays integration."""
-from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ID, Platform
from homeassistant.core import HomeAssistant
-from .const import DOMAIN
-from .coordinator import FAADataUpdateCoordinator
+from .coordinator import FAAConfigEntry, FAADataUpdateCoordinator
PLATFORMS = [Platform.BINARY_SENSOR]
-async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
+async def async_setup_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
"""Set up FAA Delays from a config entry."""
code = entry.data[CONF_ID]
- coordinator = FAADataUpdateCoordinator(hass, code)
+ coordinator = FAADataUpdateCoordinator(hass, entry, code)
await coordinator.async_config_entry_first_refresh()
- hass.data.setdefault(DOMAIN, {})
- hass.data[DOMAIN][entry.entry_id] = coordinator
+ entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
-async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
+async def async_unload_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
"""Unload a config entry."""
- unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
- if unload_ok:
- hass.data[DOMAIN].pop(entry.entry_id)
- return unload_ok
+ return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
diff --git a/homeassistant/components/faa_delays/binary_sensor.py b/homeassistant/components/faa_delays/binary_sensor.py
index 6a01bf6ebed..0fbc028f111 100644
--- a/homeassistant/components/faa_delays/binary_sensor.py
+++ b/homeassistant/components/faa_delays/binary_sensor.py
@@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
-from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
-from . import FAADataUpdateCoordinator
+from . import FAAConfigEntry, FAADataUpdateCoordinator
from .const import DOMAIN
@@ -84,10 +83,10 @@ FAA_BINARY_SENSORS: tuple[FaaDelaysBinarySensorEntityDescription, ...] = (
async def async_setup_entry(
- hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
+ hass: HomeAssistant, entry: FAAConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up a FAA sensor based on a config entry."""
- coordinator = hass.data[DOMAIN][entry.entry_id]
+ coordinator = entry.runtime_data
entities = [
FAABinarySensor(coordinator, entry.entry_id, description)
diff --git a/homeassistant/components/faa_delays/coordinator.py b/homeassistant/components/faa_delays/coordinator.py
index 9de10b2ebbb..aefc8d72487 100644
--- a/homeassistant/components/faa_delays/coordinator.py
+++ b/homeassistant/components/faa_delays/coordinator.py
@@ -7,6 +7,7 @@ import logging
from aiohttp import ClientConnectionError
from faadelays import Airport
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -15,14 +16,20 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
+type FAAConfigEntry = ConfigEntry[FAADataUpdateCoordinator]
+
class FAADataUpdateCoordinator(DataUpdateCoordinator[Airport]):
"""Class to manage fetching FAA API data from a single endpoint."""
- def __init__(self, hass: HomeAssistant, code: str) -> None:
+ def __init__(self, hass: HomeAssistant, entry: FAAConfigEntry, code: str) -> None:
"""Initialize the coordinator."""
super().__init__(
- hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=1)
+ hass,
+ _LOGGER,
+ config_entry=entry,
+ name=DOMAIN,
+ update_interval=timedelta(minutes=1),
)
self.session = aiohttp_client.async_get_clientsession(hass)
self.data = Airport(code, self.session)
diff --git a/homeassistant/components/fastdotcom/__init__.py b/homeassistant/components/fastdotcom/__init__.py
index 967e7ef8e35..59cb3f984d2 100644
--- a/homeassistant/components/fastdotcom/__init__.py
+++ b/homeassistant/components/fastdotcom/__init__.py
@@ -4,20 +4,20 @@ from __future__ import annotations
import logging
-from homeassistant.config_entries import ConfigEntry, ConfigEntryState
+from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.helpers.start import async_at_started
-from .const import DOMAIN, PLATFORMS
-from .coordinator import FastdotcomDataUpdateCoordinator
+from .const import PLATFORMS
+from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
-async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
+async def async_setup_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
"""Set up Fast.com from a config entry."""
- coordinator = FastdotcomDataUpdateCoordinator(hass)
- hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
+ coordinator = FastdotcomDataUpdateCoordinator(hass, entry)
+ entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(
entry,
@@ -36,8 +36,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
return True
-async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
+async def async_unload_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
"""Unload Fast.com config entry."""
- if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
- hass.data[DOMAIN].pop(entry.entry_id)
- return unload_ok
+ return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
diff --git a/homeassistant/components/fastdotcom/coordinator.py b/homeassistant/components/fastdotcom/coordinator.py
index 75ac55b8314..8365692804c 100644
--- a/homeassistant/components/fastdotcom/coordinator.py
+++ b/homeassistant/components/fastdotcom/coordinator.py
@@ -6,20 +6,24 @@ from datetime import timedelta
from fastdotcom import fast_com
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_INTERVAL, DOMAIN, LOGGER
+type FastdotcomConfigEntry = ConfigEntry[FastdotcomDataUpdateCoordinator]
+
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
"""Class to manage fetching Fast.com data API."""
- def __init__(self, hass: HomeAssistant) -> None:
+ def __init__(self, hass: HomeAssistant, entry: FastdotcomConfigEntry) -> None:
"""Initialize the coordinator for Fast.com."""
super().__init__(
hass,
LOGGER,
+ config_entry=entry,
name=DOMAIN,
update_interval=timedelta(hours=DEFAULT_INTERVAL),
)
diff --git a/homeassistant/components/fastdotcom/diagnostics.py b/homeassistant/components/fastdotcom/diagnostics.py
index d7383ef0c6a..42f4e32f49e 100644
--- a/homeassistant/components/fastdotcom/diagnostics.py
+++ b/homeassistant/components/fastdotcom/diagnostics.py
@@ -4,21 +4,13 @@ from __future__ import annotations
from typing import Any
-from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
-from .const import DOMAIN
-from .coordinator import FastdotcomDataUpdateCoordinator
+from .coordinator import FastdotcomConfigEntry
async def async_get_config_entry_diagnostics(
- hass: HomeAssistant, config_entry: ConfigEntry
+ hass: HomeAssistant, config_entry: FastdotcomConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for the config entry."""
- coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][
- config_entry.entry_id
- ]
-
- return {
- "coordinator_data": coordinator.data,
- }
+ return {"coordinator_data": config_entry.runtime_data.data}
diff --git a/homeassistant/components/fastdotcom/sensor.py b/homeassistant/components/fastdotcom/sensor.py
index 721290e8c0d..b633cb25628 100644
--- a/homeassistant/components/fastdotcom/sensor.py
+++ b/homeassistant/components/fastdotcom/sensor.py
@@ -7,7 +7,6 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorStateClass,
)
-from homeassistant.config_entries import ConfigEntry
from homeassistant.const import UnitOfDataRate
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
@@ -15,17 +14,16 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
-from .coordinator import FastdotcomDataUpdateCoordinator
+from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
async def async_setup_entry(
hass: HomeAssistant,
- entry: ConfigEntry,
+ entry: FastdotcomConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Fast.com sensor."""
- coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
- async_add_entities([SpeedtestSensor(entry.entry_id, coordinator)])
+ async_add_entities([SpeedtestSensor(entry.entry_id, entry.runtime_data)])
class SpeedtestSensor(CoordinatorEntity[FastdotcomDataUpdateCoordinator], SensorEntity):
diff --git a/homeassistant/components/file_upload/__init__.py b/homeassistant/components/file_upload/__init__.py
index 97b3f83d5bc..6b0a1423e49 100644
--- a/homeassistant/components/file_upload/__init__.py
+++ b/homeassistant/components/file_upload/__init__.py
@@ -21,9 +21,11 @@ from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import raise_if_invalid_filename
+from homeassistant.util.hass_dict import HassKey
from homeassistant.util.ulid import ulid_hex
DOMAIN = "file_upload"
+_DATA: HassKey[FileUploadData] = HassKey(DOMAIN)
ONE_MEGABYTE = 1024 * 1024
MAX_SIZE = 100 * ONE_MEGABYTE
@@ -41,7 +43,7 @@ def process_uploaded_file(hass: HomeAssistant, file_id: str) -> Iterator[Path]:
if DOMAIN not in hass.data:
raise ValueError("File does not exist")
- file_upload_data: FileUploadData = hass.data[DOMAIN]
+ file_upload_data = hass.data[_DATA]
if not file_upload_data.has_file(file_id):
raise ValueError("File does not exist")
@@ -149,10 +151,10 @@ class FileUploadView(HomeAssistantView):
hass = request.app[KEY_HASS]
file_id = ulid_hex()
- if DOMAIN not in hass.data:
- hass.data[DOMAIN] = await FileUploadData.create(hass)
+ if _DATA not in hass.data:
+ hass.data[_DATA] = await FileUploadData.create(hass)
- file_upload_data: FileUploadData = hass.data[DOMAIN]
+ file_upload_data = hass.data[_DATA]
file_dir = file_upload_data.file_dir(file_id)
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
SimpleQueue()
@@ -206,7 +208,7 @@ class FileUploadView(HomeAssistantView):
raise web.HTTPNotFound
file_id = data["file_id"]
- file_upload_data: FileUploadData = hass.data[DOMAIN]
+ file_upload_data = hass.data[_DATA]
if file_upload_data.files.pop(file_id, None) is None:
raise web.HTTPNotFound
diff --git a/homeassistant/components/fireservicerota/__init__.py b/homeassistant/components/fireservicerota/__init__.py
index aa303a08795..360a0f0b210 100644
--- a/homeassistant/components/fireservicerota/__init__.py
+++ b/homeassistant/components/fireservicerota/__init__.py
@@ -3,29 +3,16 @@
from __future__ import annotations
from datetime import timedelta
-import logging
-
-from pyfireservicerota import (
- ExpiredTokenError,
- FireServiceRota,
- FireServiceRotaIncidents,
- InvalidAuthError,
- InvalidTokenError,
-)
from homeassistant.config_entries import ConfigEntry
-from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_USERNAME, Platform
+from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
-from homeassistant.exceptions import ConfigEntryAuthFailed
-from homeassistant.helpers.dispatcher import dispatcher_send
-from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
-from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN, WSS_BWRURL
+from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
+from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
-_LOGGER = logging.getLogger(__name__)
-
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
@@ -40,17 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if client.token_refresh_failure:
return False
- async def async_update_data():
- return await client.async_update()
-
- coordinator = DataUpdateCoordinator(
- hass,
- _LOGGER,
- config_entry=entry,
- name="duty binary sensor",
- update_method=async_update_data,
- update_interval=MIN_TIME_BETWEEN_UPDATES,
- )
+ coordinator = FireServiceUpdateCoordinator(hass, client, entry)
await coordinator.async_config_entry_first_refresh()
@@ -74,165 +51,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok:
del hass.data[DOMAIN][entry.entry_id]
return unload_ok
-
-
-class FireServiceRotaOauth:
- """Handle authentication tokens."""
-
- def __init__(self, hass, entry, fsr):
- """Initialize the oauth object."""
- self._hass = hass
- self._entry = entry
-
- self._url = entry.data[CONF_URL]
- self._username = entry.data[CONF_USERNAME]
- self._fsr = fsr
-
- async def async_refresh_tokens(self) -> bool:
- """Refresh tokens and update config entry."""
- _LOGGER.debug("Refreshing authentication tokens after expiration")
-
- try:
- token_info = await self._hass.async_add_executor_job(
- self._fsr.refresh_tokens
- )
-
- except (InvalidAuthError, InvalidTokenError) as err:
- raise ConfigEntryAuthFailed(
- "Error refreshing tokens, triggered reauth workflow"
- ) from err
-
- _LOGGER.debug("Saving new tokens in config entry")
- self._hass.config_entries.async_update_entry(
- self._entry,
- data={
- "auth_implementation": DOMAIN,
- CONF_URL: self._url,
- CONF_USERNAME: self._username,
- CONF_TOKEN: token_info,
- },
- )
-
- return True
-
-
-class FireServiceRotaWebSocket:
- """Define a FireServiceRota websocket manager object."""
-
- def __init__(self, hass, entry):
- """Initialize the websocket object."""
- self._hass = hass
- self._entry = entry
-
- self._fsr_incidents = FireServiceRotaIncidents(on_incident=self._on_incident)
- self.incident_data = None
-
- def _construct_url(self) -> str:
- """Return URL with latest access token."""
- return WSS_BWRURL.format(
- self._entry.data[CONF_URL], self._entry.data[CONF_TOKEN]["access_token"]
- )
-
- def _on_incident(self, data) -> None:
- """Received new incident, update data."""
- _LOGGER.debug("Received new incident via websocket: %s", data)
- self.incident_data = data
- dispatcher_send(self._hass, f"{DOMAIN}_{self._entry.entry_id}_update")
-
- def start_listener(self) -> None:
- """Start the websocket listener."""
- _LOGGER.debug("Starting incidents listener")
- self._fsr_incidents.start(self._construct_url())
-
- def stop_listener(self) -> None:
- """Stop the websocket listener."""
- _LOGGER.debug("Stopping incidents listener")
- self._fsr_incidents.stop()
-
-
-class FireServiceRotaClient:
- """Getting the latest data from fireservicerota."""
-
- def __init__(self, hass, entry):
- """Initialize the data object."""
- self._hass = hass
- self._entry = entry
-
- self._url = entry.data[CONF_URL]
- self._tokens = entry.data[CONF_TOKEN]
-
- self.entry_id = entry.entry_id
- self.unique_id = entry.unique_id
-
- self.token_refresh_failure = False
- self.incident_id = None
- self.on_duty = False
-
- self.fsr = FireServiceRota(base_url=self._url, token_info=self._tokens)
-
- self.oauth = FireServiceRotaOauth(
- self._hass,
- self._entry,
- self.fsr,
- )
-
- self.websocket = FireServiceRotaWebSocket(self._hass, self._entry)
-
- async def setup(self) -> None:
- """Set up the data client."""
- await self._hass.async_add_executor_job(self.websocket.start_listener)
-
- async def update_call(self, func, *args):
- """Perform update call and return data."""
- if self.token_refresh_failure:
- return None
-
- try:
- return await self._hass.async_add_executor_job(func, *args)
- except (ExpiredTokenError, InvalidTokenError):
- await self._hass.async_add_executor_job(self.websocket.stop_listener)
- self.token_refresh_failure = True
-
- if await self.oauth.async_refresh_tokens():
- self.token_refresh_failure = False
- await self._hass.async_add_executor_job(self.websocket.start_listener)
-
- return await self._hass.async_add_executor_job(func, *args)
-
- async def async_update(self) -> dict | None:
- """Get the latest availability data."""
- data = await self.update_call(
- self.fsr.get_availability, str(self._hass.config.time_zone)
- )
-
- if not data:
- return None
-
- self.on_duty = bool(data.get("available"))
-
- _LOGGER.debug("Updated availability data: %s", data)
- return data
-
- async def async_response_update(self) -> dict | None:
- """Get the latest incident response data."""
-
- if not self.incident_id:
- return None
-
- _LOGGER.debug("Updating response data for incident id %s", self.incident_id)
-
- return await self.update_call(self.fsr.get_incident_response, self.incident_id)
-
- async def async_set_response(self, value) -> None:
- """Set incident response status."""
-
- if not self.incident_id:
- return
-
- _LOGGER.debug(
- "Setting incident response for incident id '%s' to state '%s'",
- self.incident_id,
- value,
- )
-
- await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
diff --git a/homeassistant/components/fireservicerota/binary_sensor.py b/homeassistant/components/fireservicerota/binary_sensor.py
index a22991f2008..b6d3aa67a0a 100644
--- a/homeassistant/components/fireservicerota/binary_sensor.py
+++ b/homeassistant/components/fireservicerota/binary_sensor.py
@@ -8,13 +8,10 @@ from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
-from homeassistant.helpers.update_coordinator import (
- CoordinatorEntity,
- DataUpdateCoordinator,
-)
+from homeassistant.helpers.update_coordinator import CoordinatorEntity
-from . import FireServiceRotaClient
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
+from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
async def async_setup_entry(
@@ -26,14 +23,16 @@ async def async_setup_entry(
DATA_CLIENT
]
- coordinator: DataUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
+ coordinator: FireServiceUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
entry.entry_id
][DATA_COORDINATOR]
async_add_entities([ResponseBinarySensor(coordinator, client, entry)])
-class ResponseBinarySensor(CoordinatorEntity, BinarySensorEntity):
+class ResponseBinarySensor(
+ CoordinatorEntity[FireServiceUpdateCoordinator], BinarySensorEntity
+):
"""Representation of an FireServiceRota sensor."""
_attr_has_entity_name = True
@@ -41,7 +40,7 @@ class ResponseBinarySensor(CoordinatorEntity, BinarySensorEntity):
def __init__(
self,
- coordinator: DataUpdateCoordinator,
+ coordinator: FireServiceUpdateCoordinator,
client: FireServiceRotaClient,
entry: ConfigEntry,
) -> None:
diff --git a/homeassistant/components/fireservicerota/coordinator.py b/homeassistant/components/fireservicerota/coordinator.py
new file mode 100644
index 00000000000..35f839b3bdb
--- /dev/null
+++ b/homeassistant/components/fireservicerota/coordinator.py
@@ -0,0 +1,213 @@
+"""The FireServiceRota integration."""
+
+from __future__ import annotations
+
+from datetime import timedelta
+import logging
+
+from pyfireservicerota import (
+ ExpiredTokenError,
+ FireServiceRota,
+ FireServiceRotaIncidents,
+ InvalidAuthError,
+ InvalidTokenError,
+)
+
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_USERNAME, Platform
+from homeassistant.core import HomeAssistant
+from homeassistant.exceptions import ConfigEntryAuthFailed
+from homeassistant.helpers.dispatcher import dispatcher_send
+from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
+
+from .const import DOMAIN, WSS_BWRURL
+
+MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
+
+_LOGGER = logging.getLogger(__name__)
+
+PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
+
+
+class FireServiceUpdateCoordinator(DataUpdateCoordinator[dict | None]):
+ """Data update coordinator for FireServiceRota."""
+
+ def __init__(
+ self, hass: HomeAssistant, client: FireServiceRotaClient, entry: ConfigEntry
+ ) -> None:
+ """Initialize the FireServiceRota DataUpdateCoordinator."""
+ super().__init__(
+ hass,
+ _LOGGER,
+ name="duty binary sensor",
+ config_entry=entry,
+ update_interval=MIN_TIME_BETWEEN_UPDATES,
+ )
+
+ self.client = client
+
+ async def _async_update_data(self) -> dict | None:
+ """Get the latest availability data."""
+ return await self.client.async_update()
+
+
+class FireServiceRotaOauth:
+ """Handle authentication tokens."""
+
+ def __init__(self, hass, entry, fsr):
+ """Initialize the oauth object."""
+ self._hass = hass
+ self._entry = entry
+
+ self._url = entry.data[CONF_URL]
+ self._username = entry.data[CONF_USERNAME]
+ self._fsr = fsr
+
+ async def async_refresh_tokens(self) -> bool:
+ """Refresh tokens and update config entry."""
+ _LOGGER.debug("Refreshing authentication tokens after expiration")
+
+ try:
+ token_info = await self._hass.async_add_executor_job(
+ self._fsr.refresh_tokens
+ )
+
+ except (InvalidAuthError, InvalidTokenError) as err:
+ raise ConfigEntryAuthFailed(
+ "Error refreshing tokens, triggered reauth workflow"
+ ) from err
+
+ _LOGGER.debug("Saving new tokens in config entry")
+ self._hass.config_entries.async_update_entry(
+ self._entry,
+ data={
+ "auth_implementation": DOMAIN,
+ CONF_URL: self._url,
+ CONF_USERNAME: self._username,
+ CONF_TOKEN: token_info,
+ },
+ )
+
+ return True
+
+
+class FireServiceRotaWebSocket:
+ """Define a FireServiceRota websocket manager object."""
+
+ def __init__(self, hass, entry):
+ """Initialize the websocket object."""
+ self._hass = hass
+ self._entry = entry
+
+ self._fsr_incidents = FireServiceRotaIncidents(on_incident=self._on_incident)
+ self.incident_data = None
+
+ def _construct_url(self) -> str:
+ """Return URL with latest access token."""
+ return WSS_BWRURL.format(
+ self._entry.data[CONF_URL], self._entry.data[CONF_TOKEN]["access_token"]
+ )
+
+ def _on_incident(self, data) -> None:
+ """Received new incident, update data."""
+ _LOGGER.debug("Received new incident via websocket: %s", data)
+ self.incident_data = data
+ dispatcher_send(self._hass, f"{DOMAIN}_{self._entry.entry_id}_update")
+
+ def start_listener(self) -> None:
+ """Start the websocket listener."""
+ _LOGGER.debug("Starting incidents listener")
+ self._fsr_incidents.start(self._construct_url())
+
+ def stop_listener(self) -> None:
+ """Stop the websocket listener."""
+ _LOGGER.debug("Stopping incidents listener")
+ self._fsr_incidents.stop()
+
+
+class FireServiceRotaClient:
+ """Getting the latest data from fireservicerota."""
+
+ def __init__(self, hass, entry):
+ """Initialize the data object."""
+ self._hass = hass
+ self._entry = entry
+
+ self._url = entry.data[CONF_URL]
+ self._tokens = entry.data[CONF_TOKEN]
+
+ self.entry_id = entry.entry_id
+ self.unique_id = entry.unique_id
+
+ self.token_refresh_failure = False
+ self.incident_id = None
+ self.on_duty = False
+
+ self.fsr = FireServiceRota(base_url=self._url, token_info=self._tokens)
+
+ self.oauth = FireServiceRotaOauth(
+ self._hass,
+ self._entry,
+ self.fsr,
+ )
+
+ self.websocket = FireServiceRotaWebSocket(self._hass, self._entry)
+
+ async def setup(self) -> None:
+ """Set up the data client."""
+ await self._hass.async_add_executor_job(self.websocket.start_listener)
+
+ async def update_call(self, func, *args):
+ """Perform update call and return data."""
+ if self.token_refresh_failure:
+ return None
+
+ try:
+ return await self._hass.async_add_executor_job(func, *args)
+ except (ExpiredTokenError, InvalidTokenError):
+ await self._hass.async_add_executor_job(self.websocket.stop_listener)
+ self.token_refresh_failure = True
+
+ if await self.oauth.async_refresh_tokens():
+ self.token_refresh_failure = False
+ await self._hass.async_add_executor_job(self.websocket.start_listener)
+
+ return await self._hass.async_add_executor_job(func, *args)
+
+ async def async_update(self) -> dict | None:
+ """Get the latest availability data."""
+ data = await self.update_call(
+ self.fsr.get_availability, str(self._hass.config.time_zone)
+ )
+
+ if not data:
+ return None
+
+ self.on_duty = bool(data.get("available"))
+
+ _LOGGER.debug("Updated availability data: %s", data)
+ return data
+
+ async def async_response_update(self) -> dict | None:
+ """Get the latest incident response data."""
+
+ if not self.incident_id:
+ return None
+
+ _LOGGER.debug("Updating response data for incident id %s", self.incident_id)
+
+ return await self.update_call(self.fsr.get_incident_response, self.incident_id)
+
+ async def async_set_response(self, value) -> None:
+ """Set incident response status."""
+
+ if not self.incident_id:
+ return
+
+ _LOGGER.debug(
+ "Setting incident response for incident id '%s' to state '%s'",
+ self.incident_id,
+ value,
+ )
+
+ await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json
index 2ecb165554a..d27785dcea5 100644
--- a/homeassistant/components/frontend/manifest.json
+++ b/homeassistant/components/frontend/manifest.json
@@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
- "requirements": ["home-assistant-frontend==20250131.0"]
+ "requirements": ["home-assistant-frontend==20250205.0"]
}
diff --git a/homeassistant/components/frontier_silicon/media_player.py b/homeassistant/components/frontier_silicon/media_player.py
index 8407e0a869d..52998e03703 100644
--- a/homeassistant/components/frontier_silicon/media_player.py
+++ b/homeassistant/components/frontier_silicon/media_player.py
@@ -244,7 +244,7 @@ class AFSAPIDevice(MediaPlayerEntity):
"""Send volume up command."""
volume = await self.fs_device.get_volume()
volume = int(volume or 0) + 1
- await self.fs_device.set_volume(min(volume, self._max_volume))
+ await self.fs_device.set_volume(min(volume, self._max_volume or 1))
async def async_volume_down(self) -> None:
"""Send volume down command."""
diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json
index 854ceb93b3e..4a5d672fcde 100644
--- a/homeassistant/components/generic/strings.json
+++ b/homeassistant/components/generic/strings.json
@@ -28,14 +28,14 @@
"user": {
"description": "Enter the settings to connect to the camera.",
"data": {
- "still_image_url": "Still Image URL (e.g. http://...)",
- "stream_source": "Stream Source URL (e.g. rtsp://...)",
+ "still_image_url": "Still image URL (e.g. http://...)",
+ "stream_source": "Stream source URL (e.g. rtsp://...)",
"rtsp_transport": "RTSP transport protocol",
"authentication": "Authentication",
- "limit_refetch_to_url_change": "Limit refetch to url change",
+ "limit_refetch_to_url_change": "Limit refetch to URL change",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]",
- "framerate": "Frame Rate (Hz)",
+ "framerate": "Frame rate (Hz)",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
}
},
diff --git a/homeassistant/components/gogogate2/manifest.json b/homeassistant/components/gogogate2/manifest.json
index 40633537ddf..238c145302a 100644
--- a/homeassistant/components/gogogate2/manifest.json
+++ b/homeassistant/components/gogogate2/manifest.json
@@ -14,5 +14,5 @@
},
"iot_class": "local_polling",
"loggers": ["ismartgate"],
- "requirements": ["ismartgate==5.0.1"]
+ "requirements": ["ismartgate==5.0.2"]
}
diff --git a/homeassistant/components/google_generative_ai_conversation/const.py b/homeassistant/components/google_generative_ai_conversation/const.py
index bd60e8d94c1..4d83b935528 100644
--- a/homeassistant/components/google_generative_ai_conversation/const.py
+++ b/homeassistant/components/google_generative_ai_conversation/const.py
@@ -8,7 +8,7 @@ CONF_PROMPT = "prompt"
CONF_RECOMMENDED = "recommended"
CONF_CHAT_MODEL = "chat_model"
-RECOMMENDED_CHAT_MODEL = "models/gemini-1.5-flash-latest"
+RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_TOP_P = "top_p"
diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py
index 53ee4e1f880..8a6c5563601 100644
--- a/homeassistant/components/google_generative_ai_conversation/conversation.py
+++ b/homeassistant/components/google_generative_ai_conversation/conversation.py
@@ -4,7 +4,7 @@ from __future__ import annotations
import codecs
from collections.abc import Callable
-from typing import Any, Literal
+from typing import Any, Literal, cast
from google.api_core.exceptions import GoogleAPIError
import google.generativeai as genai
@@ -149,15 +149,53 @@ def _escape_decode(value: Any) -> Any:
return value
-def _chat_message_convert(
- message: conversation.Content | conversation.NativeContent[genai_types.ContentDict],
-) -> genai_types.ContentDict:
- """Convert any native chat message for this agent to the native format."""
- if message.role == "native":
- return message.content
+def _create_google_tool_response_content(
+ content: list[conversation.ToolResultContent],
+) -> protos.Content:
+ """Create a Google tool response content."""
+ return protos.Content(
+ parts=[
+ protos.Part(
+ function_response=protos.FunctionResponse(
+ name=tool_result.tool_name, response=tool_result.tool_result
+ )
+ )
+ for tool_result in content
+ ]
+ )
- role = "model" if message.role == "assistant" else message.role
- return {"role": role, "parts": message.content}
+
+def _convert_content(
+ content: conversation.UserContent
+ | conversation.AssistantContent
+ | conversation.SystemContent,
+) -> genai_types.ContentDict:
+ """Convert HA content to Google content."""
+ if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr]
+ role = "model" if content.role == "assistant" else content.role
+ return {"role": role, "parts": content.content}
+
+ # Handle the Assistant content with tool calls.
+ assert type(content) is conversation.AssistantContent
+ parts = []
+
+ if content.content:
+ parts.append(protos.Part(text=content.content))
+
+ if content.tool_calls:
+ parts.extend(
+ [
+ protos.Part(
+ function_call=protos.FunctionCall(
+ name=tool_call.tool_name,
+ args=_escape_decode(tool_call.tool_args),
+ )
+ )
+ for tool_call in content.tool_calls
+ ]
+ )
+
+ return protos.Content({"role": "model", "parts": parts})
class GoogleGenerativeAIConversationEntity(
@@ -220,7 +258,7 @@ class GoogleGenerativeAIConversationEntity(
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
- session: conversation.ChatLog[genai_types.ContentDict],
+ chat_log: conversation.ChatLog,
) -> conversation.ConversationResult:
"""Call the API."""
@@ -228,7 +266,7 @@ class GoogleGenerativeAIConversationEntity(
options = self.entry.options
try:
- await session.async_update_llm_data(
+ await chat_log.async_update_llm_data(
DOMAIN,
user_input,
options.get(CONF_LLM_HASS_API),
@@ -238,10 +276,10 @@ class GoogleGenerativeAIConversationEntity(
return err.as_conversation_result()
tools: list[dict[str, Any]] | None = None
- if session.llm_api:
+ if chat_log.llm_api:
tools = [
- _format_tool(tool, session.llm_api.custom_serializer)
- for tool in session.llm_api.tools
+ _format_tool(tool, chat_log.llm_api.custom_serializer)
+ for tool in chat_log.llm_api.tools
]
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
@@ -252,9 +290,36 @@ class GoogleGenerativeAIConversationEntity(
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
)
- prompt, *messages = [
- _chat_message_convert(message) for message in session.async_get_messages()
- ]
+ prompt = chat_log.content[0].content # type: ignore[union-attr]
+ messages: list[genai_types.ContentDict] = []
+
+ # Google groups tool results, we do not. Group them before sending.
+ tool_results: list[conversation.ToolResultContent] = []
+
+ for chat_content in chat_log.content[1:]:
+ if chat_content.role == "tool_result":
+ # mypy doesn't like picking a type based on checking shared property 'role'
+ tool_results.append(cast(conversation.ToolResultContent, chat_content))
+ continue
+
+ if tool_results:
+ messages.append(_create_google_tool_response_content(tool_results))
+ tool_results.clear()
+
+ messages.append(
+ _convert_content(
+ cast(
+ conversation.UserContent
+ | conversation.SystemContent
+ | conversation.AssistantContent,
+ chat_content,
+ )
+ )
+ )
+
+ if tool_results:
+ messages.append(_create_google_tool_response_content(tool_results))
+
model = genai.GenerativeModel(
model_name=model_name,
generation_config={
@@ -282,12 +347,12 @@ class GoogleGenerativeAIConversationEntity(
),
},
tools=tools or None,
- system_instruction=prompt["parts"] if supports_system_instruction else None,
+ system_instruction=prompt if supports_system_instruction else None,
)
if not supports_system_instruction:
messages = [
- {"role": "user", "parts": prompt["parts"]},
+ {"role": "user", "parts": prompt},
{"role": "model", "parts": "Ok"},
*messages,
]
@@ -325,50 +390,40 @@ class GoogleGenerativeAIConversationEntity(
content = " ".join(
[part.text.strip() for part in chat_response.parts if part.text]
)
- if content:
- session.async_add_message(
- conversation.Content(
- role="assistant",
- agent_id=user_input.agent_id,
- content=content,
- )
- )
- function_calls = [
- part.function_call for part in chat_response.parts if part.function_call
- ]
-
- if not function_calls or not session.llm_api:
- break
-
- tool_responses = []
- for function_call in function_calls:
- tool_call = MessageToDict(function_call._pb) # noqa: SLF001
+ tool_calls = []
+ for part in chat_response.parts:
+ if not part.function_call:
+ continue
+ tool_call = MessageToDict(part.function_call._pb) # noqa: SLF001
tool_name = tool_call["name"]
tool_args = _escape_decode(tool_call["args"])
- tool_input = llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
- function_response = await session.async_call_tool(tool_input)
- tool_responses.append(
- protos.Part(
- function_response=protos.FunctionResponse(
- name=tool_name, response=function_response
+ tool_calls.append(
+ llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
+ )
+
+ chat_request = _create_google_tool_response_content(
+ [
+ tool_response
+ async for tool_response in chat_log.async_add_assistant_content(
+ conversation.AssistantContent(
+ agent_id=user_input.agent_id,
+ content=content,
+ tool_calls=tool_calls or None,
)
)
- )
- chat_request = protos.Content(parts=tool_responses)
- session.async_add_message(
- conversation.NativeContent(
- agent_id=user_input.agent_id,
- content=chat_request,
- )
+ ]
)
+ if not tool_calls:
+ break
+
response = intent.IntentResponse(language=user_input.language)
response.async_set_speech(
" ".join([part.text.strip() for part in chat_response.parts if part.text])
)
return conversation.ConversationResult(
- response=response, conversation_id=session.conversation_id
+ response=response, conversation_id=chat_log.conversation_id
)
async def _async_entry_update_listener(
diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json
index 5a123de7066..4d871a991a6 100644
--- a/homeassistant/components/govee_ble/manifest.json
+++ b/homeassistant/components/govee_ble/manifest.json
@@ -131,5 +131,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
"iot_class": "local_push",
- "requirements": ["govee-ble==0.42.0"]
+ "requirements": ["govee-ble==0.42.1"]
}
diff --git a/homeassistant/components/govee_light_local/light.py b/homeassistant/components/govee_light_local/light.py
index cb2e24fa8a6..c7799a7ffc4 100644
--- a/homeassistant/components/govee_light_local/light.py
+++ b/homeassistant/components/govee_light_local/light.py
@@ -5,7 +5,7 @@ from __future__ import annotations
import logging
from typing import Any
-from govee_local_api import GoveeDevice, GoveeLightCapability
+from govee_local_api import GoveeDevice, GoveeLightFeatures
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
@@ -71,13 +71,13 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
capabilities = device.capabilities
color_modes = {ColorMode.ONOFF}
if capabilities:
- if GoveeLightCapability.COLOR_RGB in capabilities:
+ if GoveeLightFeatures.COLOR_RGB & capabilities.features:
color_modes.add(ColorMode.RGB)
- if GoveeLightCapability.COLOR_KELVIN_TEMPERATURE in capabilities:
+ if GoveeLightFeatures.COLOR_KELVIN_TEMPERATURE & capabilities.features:
color_modes.add(ColorMode.COLOR_TEMP)
self._attr_max_color_temp_kelvin = 9000
self._attr_min_color_temp_kelvin = 2000
- if GoveeLightCapability.BRIGHTNESS in capabilities:
+ if GoveeLightFeatures.BRIGHTNESS & capabilities.features:
color_modes.add(ColorMode.BRIGHTNESS)
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
diff --git a/homeassistant/components/govee_light_local/manifest.json b/homeassistant/components/govee_light_local/manifest.json
index a94d4e58e9a..e813ab545df 100644
--- a/homeassistant/components/govee_light_local/manifest.json
+++ b/homeassistant/components/govee_light_local/manifest.json
@@ -6,5 +6,5 @@
"dependencies": ["network"],
"documentation": "https://www.home-assistant.io/integrations/govee_light_local",
"iot_class": "local_push",
- "requirements": ["govee-local-api==1.5.3"]
+ "requirements": ["govee-local-api==2.0.0"]
}
diff --git a/homeassistant/components/gpsd/icons.json b/homeassistant/components/gpsd/icons.json
index 59d904f918c..3605bdc6d70 100644
--- a/homeassistant/components/gpsd/icons.json
+++ b/homeassistant/components/gpsd/icons.json
@@ -16,6 +16,12 @@
},
"elevation": {
"default": "mdi:arrow-up-down"
+ },
+ "total_satellites": {
+ "default": "mdi:satellite-variant"
+ },
+ "used_satellites": {
+ "default": "mdi:satellite-variant"
}
}
}
diff --git a/homeassistant/components/gpsd/sensor.py b/homeassistant/components/gpsd/sensor.py
index 1bac41ecaae..70d32f88a65 100644
--- a/homeassistant/components/gpsd/sensor.py
+++ b/homeassistant/components/gpsd/sensor.py
@@ -14,6 +14,7 @@ from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
+ SensorStateClass,
)
from homeassistant.const import (
ATTR_LATITUDE,
@@ -39,12 +40,31 @@ ATTR_CLIMB = "climb"
ATTR_ELEVATION = "elevation"
ATTR_GPS_TIME = "gps_time"
ATTR_SPEED = "speed"
+ATTR_TOTAL_SATELLITES = "total_satellites"
+ATTR_USED_SATELLITES = "used_satellites"
DEFAULT_NAME = "GPS"
_MODE_VALUES = {2: "2d_fix", 3: "3d_fix"}
+def count_total_satellites_fn(agps_thread: AGPS3mechanism) -> int | None:
+ """Count the number of total satellites."""
+ satellites = agps_thread.data_stream.satellites
+ return None if satellites == "n/a" else len(satellites)
+
+
+def count_used_satellites_fn(agps_thread: AGPS3mechanism) -> int | None:
+ """Count the number of used satellites."""
+ satellites = agps_thread.data_stream.satellites
+ if satellites == "n/a":
+ return None
+
+ return sum(
+ 1 for sat in satellites if isinstance(sat, dict) and sat.get("used", False)
+ )
+
+
@dataclass(frozen=True, kw_only=True)
class GpsdSensorDescription(SensorEntityDescription):
"""Class describing GPSD sensor entities."""
@@ -116,6 +136,22 @@ SENSOR_TYPES: tuple[GpsdSensorDescription, ...] = (
suggested_display_precision=2,
entity_registry_enabled_default=False,
),
+ GpsdSensorDescription(
+ key=ATTR_TOTAL_SATELLITES,
+ translation_key=ATTR_TOTAL_SATELLITES,
+ entity_category=EntityCategory.DIAGNOSTIC,
+ state_class=SensorStateClass.MEASUREMENT,
+ value_fn=count_total_satellites_fn,
+ entity_registry_enabled_default=False,
+ ),
+ GpsdSensorDescription(
+ key=ATTR_USED_SATELLITES,
+ translation_key=ATTR_USED_SATELLITES,
+ entity_category=EntityCategory.DIAGNOSTIC,
+ state_class=SensorStateClass.MEASUREMENT,
+ value_fn=count_used_satellites_fn,
+ entity_registry_enabled_default=False,
+ ),
)
diff --git a/homeassistant/components/gpsd/strings.json b/homeassistant/components/gpsd/strings.json
index 867edf0b5a8..a5d6c570b54 100644
--- a/homeassistant/components/gpsd/strings.json
+++ b/homeassistant/components/gpsd/strings.json
@@ -50,6 +50,14 @@
},
"mode": { "name": "[%key:common::config_flow::data::mode%]" }
}
+ },
+ "total_satellites": {
+ "name": "Total satellites",
+ "unit_of_measurement": "satellites"
+ },
+ "used_satellites": {
+ "name": "Used satellites",
+ "unit_of_measurement": "satellites"
}
}
}
diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py
index 495e953df9d..ddaa821587f 100644
--- a/homeassistant/components/hassio/backup.py
+++ b/homeassistant/components/hassio/backup.py
@@ -20,6 +20,7 @@ from aiohasupervisor.models import (
backups as supervisor_backups,
mounts as supervisor_mounts,
)
+from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
from homeassistant.components.backup import (
DATA_MANAGER,
@@ -27,15 +28,19 @@ from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupManagerError,
+ BackupNotFound,
BackupReaderWriter,
BackupReaderWriterError,
CreateBackupEvent,
+ CreateBackupStage,
+ CreateBackupState,
Folder,
IdleEvent,
IncorrectPasswordError,
ManagerBackup,
NewBackup,
RestoreBackupEvent,
+ RestoreBackupStage,
RestoreBackupState,
WrittenBackup,
async_get_manager as async_get_backup_manager,
@@ -47,12 +52,11 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import dt as dt_util
+from homeassistant.util.enum import try_parse_enum
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
from .handler import get_supervisor_client
-LOCATION_CLOUD_BACKUP = ".cloud_backup"
-LOCATION_LOCAL = ".local"
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
# Set on backups automatically created when updating an addon
@@ -67,7 +71,9 @@ async def async_get_backup_agents(
"""Return the hassio backup agents."""
client = get_supervisor_client(hass)
mounts = await client.mounts.info()
- agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
+ agents: list[BackupAgent] = [
+ SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
+ ]
for mount in mounts.mounts:
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
continue
@@ -107,7 +113,7 @@ def async_register_backup_agents_listener(
def _backup_details_to_agent_backup(
- details: supervisor_backups.BackupComplete, location: str | None
+ details: supervisor_backups.BackupComplete, location: str
) -> AgentBackup:
"""Convert a supervisor backup details object to an agent backup."""
homeassistant_included = details.homeassistant is not None
@@ -120,7 +126,6 @@ def _backup_details_to_agent_backup(
for addon in details.addons
]
extra_metadata = details.extra or {}
- location = location or LOCATION_LOCAL
return AgentBackup(
addons=addons,
backup_id=details.slug,
@@ -143,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
domain = DOMAIN
- def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
+ def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
"""Initialize the backup agent."""
super().__init__()
self._hass = hass
@@ -158,10 +163,15 @@ class SupervisorBackupAgent(BackupAgent):
**kwargs: Any,
) -> AsyncIterator[bytes]:
"""Download a backup file."""
- return await self._client.backups.download_backup(
- backup_id,
- options=supervisor_backups.DownloadBackupOptions(location=self.location),
- )
+ try:
+ return await self._client.backups.download_backup(
+ backup_id,
+ options=supervisor_backups.DownloadBackupOptions(
+ location=self.location
+ ),
+ )
+ except SupervisorNotFoundError as err:
+ raise BackupNotFound from err
async def async_upload_backup(
self,
@@ -196,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
backup_list = await self._client.backups.list()
result = []
for backup in backup_list:
- if not backup.locations or self.location not in backup.locations:
+ if self.location not in backup.location_attributes:
continue
details = await self._client.backups.backup_info(backup.slug)
result.append(_backup_details_to_agent_backup(details, self.location))
@@ -212,7 +222,7 @@ class SupervisorBackupAgent(BackupAgent):
details = await self._client.backups.backup_info(backup_id)
except SupervisorNotFoundError:
return None
- if self.location not in details.locations:
+ if self.location not in details.location_attributes:
return None
return _backup_details_to_agent_backup(details, self.location)
@@ -285,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
# will be handled by async_upload_backup.
# If the lists are the same length, it does not matter which one we send,
# we send the encrypted list to have a well defined behavior.
- encrypted_locations: list[str | None] = []
- decrypted_locations: list[str | None] = []
+ encrypted_locations: list[str] = []
+ decrypted_locations: list[str] = []
agents_settings = manager.config.data.agents
for hassio_agent in hassio_agents:
if password is not None:
@@ -336,31 +346,43 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
self._async_wait_for_backup(
backup,
locations,
+ on_progress=on_progress,
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
),
name="backup_manager_create_backup",
eager_start=False, # To ensure the task is not started before we return
)
- return (NewBackup(backup_job_id=backup.job_id), backup_task)
+ return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
async def _async_wait_for_backup(
self,
backup: supervisor_backups.NewBackup,
- locations: list[str | None],
+ locations: list[str],
*,
+ on_progress: Callable[[CreateBackupEvent], None],
remove_after_upload: bool,
) -> WrittenBackup:
"""Wait for a backup to complete."""
backup_complete = asyncio.Event()
backup_id: str | None = None
+ create_errors: list[dict[str, str]] = []
@callback
def on_job_progress(data: Mapping[str, Any]) -> None:
"""Handle backup progress."""
nonlocal backup_id
+ if not (stage := try_parse_enum(CreateBackupStage, data.get("stage"))):
+ _LOGGER.debug("Unknown create stage: %s", data.get("stage"))
+ else:
+ on_progress(
+ CreateBackupEvent(
+ reason=None, stage=stage, state=CreateBackupState.IN_PROGRESS
+ )
+ )
if data.get("done") is True:
backup_id = data.get("reference")
+ create_errors.extend(data.get("errors", []))
backup_complete.set()
unsub = self._async_listen_job_events(backup.job_id, on_job_progress)
@@ -369,8 +391,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
await backup_complete.wait()
finally:
unsub()
- if not backup_id:
- raise BackupReaderWriterError("Backup failed")
+ if not backup_id or create_errors:
+ # We should add more specific error handling here in the future
+ raise BackupReaderWriterError(
+ f"Backup failed: {create_errors or 'no backup_id'}"
+ )
async def open_backup() -> AsyncIterator[bytes]:
try:
@@ -483,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
else None
)
- restore_location: str | None
+ restore_location: str
if manager.backup_agents[agent_id].domain != DOMAIN:
# Download the backup to the supervisor. Supervisor will clean up the backup
# two days after the restore is done.
@@ -509,6 +534,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
location=restore_location,
),
)
+ except SupervisorNotFoundError as err:
+ raise BackupNotFound from err
except SupervisorBadRequestError as err:
# Supervisor currently does not transmit machine parsable error types
message = err.args[0]
@@ -517,17 +544,30 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
raise HomeAssistantError(message) from err
restore_complete = asyncio.Event()
+ restore_errors: list[dict[str, str]] = []
@callback
def on_job_progress(data: Mapping[str, Any]) -> None:
"""Handle backup restore progress."""
+ if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
+ _LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
+ else:
+ on_progress(
+ RestoreBackupEvent(
+ reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
+ )
+ )
if data.get("done") is True:
restore_complete.set()
+ restore_errors.extend(data.get("errors", []))
unsub = self._async_listen_job_events(job.job_id, on_job_progress)
try:
await self._get_job_state(job.job_id, on_job_progress)
await restore_complete.wait()
+ if restore_errors:
+ # We should add more specific error handling here in the future
+ raise BackupReaderWriterError(f"Restore failed: {restore_errors}")
finally:
unsub()
@@ -537,28 +577,52 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
) -> None:
"""Check restore status after core restart."""
- if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
+ if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
_LOGGER.debug("No restore job ID found in environment")
return
+ restore_job_id = UUID(restore_job_str)
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
+ sent_event = False
+
@callback
def on_job_progress(data: Mapping[str, Any]) -> None:
"""Handle backup restore progress."""
+ nonlocal sent_event
+
+ if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
+ _LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
+
if data.get("done") is not True:
- on_progress(
- RestoreBackupEvent(
- reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
+ if stage or not sent_event:
+ sent_event = True
+ on_progress(
+ RestoreBackupEvent(
+ reason=None,
+ stage=stage,
+ state=RestoreBackupState.IN_PROGRESS,
+ )
)
- )
return
- on_progress(
- RestoreBackupEvent(
- reason="", stage=None, state=RestoreBackupState.COMPLETED
+ restore_errors = data.get("errors", [])
+ if restore_errors:
+ _LOGGER.warning("Restore backup failed: %s", restore_errors)
+ # We should add more specific error handling here in the future
+ on_progress(
+ RestoreBackupEvent(
+ reason="unknown_error",
+ stage=stage,
+ state=RestoreBackupState.FAILED,
+ )
+ )
+ else:
+ on_progress(
+ RestoreBackupEvent(
+ reason=None, stage=stage, state=RestoreBackupState.COMPLETED
+ )
)
- )
on_progress(IdleEvent())
unsub()
@@ -571,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
@callback
def _async_listen_job_events(
- self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
+ self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
) -> Callable[[], None]:
"""Listen for job events."""
@@ -586,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
if (
data.get("event") != "job"
or not (event_data := data.get("data"))
- or event_data.get("uuid") != job_id
+ or event_data.get("uuid") != job_id.hex
):
return
on_event(event_data)
@@ -597,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
return unsub
async def _get_job_state(
- self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
+ self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
) -> None:
"""Poll a job for its state."""
- job = await self._client.jobs.get_job(UUID(job_id))
+ job = await self._client.jobs.get_job(job_id)
_LOGGER.debug("Job state: %s", job)
on_event(job.to_dict())
diff --git a/homeassistant/components/hassio/coordinator.py b/homeassistant/components/hassio/coordinator.py
index 2d39e740e63..833068a713c 100644
--- a/homeassistant/components/hassio/coordinator.py
+++ b/homeassistant/components/hassio/coordinator.py
@@ -295,6 +295,8 @@ def async_remove_addons_from_dev_reg(
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to retrieve Hass.io status."""
+ config_entry: ConfigEntry
+
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, dev_reg: dr.DeviceRegistry
) -> None:
@@ -302,6 +304,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
super().__init__(
hass,
_LOGGER,
+ config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json
index ccc0f23fb43..ad98beb5baa 100644
--- a/homeassistant/components/hassio/manifest.json
+++ b/homeassistant/components/hassio/manifest.json
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/hassio",
"iot_class": "local_polling",
"quality_scale": "internal",
- "requirements": ["aiohasupervisor==0.2.2b6"],
+ "requirements": ["aiohasupervisor==0.3.0"],
"single_config_entry": true
}
diff --git a/homeassistant/components/heicko/__init__.py b/homeassistant/components/heicko/__init__.py
new file mode 100644
index 00000000000..65c527f5252
--- /dev/null
+++ b/homeassistant/components/heicko/__init__.py
@@ -0,0 +1 @@
+"""Virtual integration: Heicko."""
diff --git a/homeassistant/components/heicko/manifest.json b/homeassistant/components/heicko/manifest.json
new file mode 100644
index 00000000000..d8f939a5bed
--- /dev/null
+++ b/homeassistant/components/heicko/manifest.json
@@ -0,0 +1,6 @@
+{
+ "domain": "heicko",
+ "name": "Heicko",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+}
diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json
index edf3ebe7f04..6952d48ef32 100644
--- a/homeassistant/components/holiday/manifest.json
+++ b/homeassistant/components/holiday/manifest.json
@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
- "requirements": ["holidays==0.65", "babel==2.15.0"]
+ "requirements": ["holidays==0.66", "babel==2.15.0"]
}
diff --git a/homeassistant/components/homee/const.py b/homeassistant/components/homee/const.py
index d1d5be97ef7..1d7ce27335f 100644
--- a/homeassistant/components/homee/const.py
+++ b/homeassistant/components/homee/const.py
@@ -1,6 +1,7 @@
"""Constants for the homee integration."""
from homeassistant.const import (
+ DEGREE,
LIGHT_LUX,
PERCENTAGE,
REVOLUTIONS_PER_MINUTE,
@@ -32,6 +33,7 @@ HOMEE_UNIT_TO_HA_UNIT = {
"W": UnitOfPower.WATT,
"m/s": UnitOfSpeed.METERS_PER_SECOND,
"km/h": UnitOfSpeed.KILOMETERS_PER_HOUR,
+ "°": DEGREE,
"°F": UnitOfTemperature.FAHRENHEIT,
"°C": UnitOfTemperature.CELSIUS,
"K": UnitOfTemperature.KELVIN,
@@ -51,7 +53,7 @@ OPEN_CLOSE_MAP_REVERSED = {
0.0: "closed",
1.0: "open",
2.0: "partial",
- 3.0: "cosing",
+ 3.0: "closing",
4.0: "opening",
}
WINDOW_MAP = {
diff --git a/homeassistant/components/homewizard/__init__.py b/homeassistant/components/homewizard/__init__.py
index 1f29be8e6b6..36c9681dcd2 100644
--- a/homeassistant/components/homewizard/__init__.py
+++ b/homeassistant/components/homewizard/__init__.py
@@ -25,7 +25,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
api: HomeWizardEnergy
- if token := entry.data.get(CONF_TOKEN):
+ is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
+
+ if (token := entry.data.get(CONF_TOKEN)) and is_battery:
api = HomeWizardEnergyV2(
entry.data[CONF_IP_ADDRESS],
token=token,
@@ -37,7 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
clientsession=async_get_clientsession(hass),
)
- await async_check_v2_support_and_create_issue(hass, entry)
+ if is_battery:
+ await async_check_v2_support_and_create_issue(hass, entry)
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
try:
diff --git a/homeassistant/components/homewizard/config_flow.py b/homeassistant/components/homewizard/config_flow.py
index c94f590f000..6bcc51f939e 100644
--- a/homeassistant/components/homewizard/config_flow.py
+++ b/homeassistant/components/homewizard/config_flow.py
@@ -272,9 +272,14 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
+ reconfigure_entry = self._get_reconfigure_entry()
+
if user_input:
try:
- device_info = await async_try_connect(user_input[CONF_IP_ADDRESS])
+ device_info = await async_try_connect(
+ user_input[CONF_IP_ADDRESS],
+ token=reconfigure_entry.data.get(CONF_TOKEN),
+ )
except RecoverableError as ex:
LOGGER.error(ex)
@@ -288,7 +293,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
self._get_reconfigure_entry(),
data_updates=user_input,
)
- reconfigure_entry = self._get_reconfigure_entry()
return self.async_show_form(
step_id="reconfigure",
data_schema=vol.Schema(
@@ -306,7 +310,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
)
-async def async_try_connect(ip_address: str) -> Device:
+async def async_try_connect(ip_address: str, token: str | None = None) -> Device:
"""Try to connect.
Make connection with device to test the connection
@@ -317,7 +321,7 @@ async def async_try_connect(ip_address: str) -> Device:
# Determine if device is v1 or v2 capable
if await has_v2_api(ip_address):
- energy_api = HomeWizardEnergyV2(ip_address)
+ energy_api = HomeWizardEnergyV2(ip_address, token=token)
else:
energy_api = HomeWizardEnergyV1(ip_address)
diff --git a/homeassistant/components/hue/v1/light.py b/homeassistant/components/hue/v1/light.py
index e9669d226f0..33b99a7895b 100644
--- a/homeassistant/components/hue/v1/light.py
+++ b/homeassistant/components/hue/v1/light.py
@@ -408,7 +408,7 @@ class HueLight(CoordinatorEntity, LightEntity):
if self._fixed_color_mode:
return self._fixed_color_mode
- # The light supports both hs/xy and white with adjustabe color_temperature
+ # The light supports both hs/xy and white with adjustable color_temperature
mode = self._color_mode
if mode in ("xy", "hs"):
return ColorMode.HS
diff --git a/homeassistant/components/hunterdouglas_powerview/strings.json b/homeassistant/components/hunterdouglas_powerview/strings.json
index a107e2c5be4..231270d6eef 100644
--- a/homeassistant/components/hunterdouglas_powerview/strings.json
+++ b/homeassistant/components/hunterdouglas_powerview/strings.json
@@ -5,7 +5,7 @@
"title": "Connect to the PowerView Hub",
"data": {
"host": "[%key:common::config_flow::data::ip%]",
- "api_version": "Hub Generation"
+ "api_version": "Hub generation"
},
"data_description": {
"api_version": "API version is detectable, but you can override and force a specific version"
@@ -19,7 +19,7 @@
"flow_title": "{name} ({host})",
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
- "unsupported_device": "Only the primary powerview hub can be added",
+ "unsupported_device": "Only the primary PowerView Hub can be added",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
diff --git a/homeassistant/components/idasen_desk/config_flow.py b/homeassistant/components/idasen_desk/config_flow.py
index 782d4988a3c..aa832fdfe48 100644
--- a/homeassistant/components/idasen_desk/config_flow.py
+++ b/homeassistant/components/idasen_desk/config_flow.py
@@ -87,7 +87,7 @@ class IdasenDeskConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery := self._discovery_info:
self._discovered_devices[discovery.address] = discovery
else:
- current_addresses = self._async_current_ids()
+ current_addresses = self._async_current_ids(include_ignore=False)
for discovery in async_discovered_service_info(self.hass):
if (
discovery.address in current_addresses
diff --git a/homeassistant/components/iometer/__init__.py b/homeassistant/components/iometer/__init__.py
new file mode 100644
index 00000000000..5106d449fed
--- /dev/null
+++ b/homeassistant/components/iometer/__init__.py
@@ -0,0 +1,39 @@
+"""The IOmeter integration."""
+
+from __future__ import annotations
+
+from iometer import IOmeterClient, IOmeterConnectionError
+
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.const import CONF_HOST, Platform
+from homeassistant.core import HomeAssistant
+from homeassistant.exceptions import ConfigEntryNotReady
+from homeassistant.helpers.aiohttp_client import async_get_clientsession
+
+from .coordinator import IOmeterConfigEntry, IOMeterCoordinator
+
+PLATFORMS: list[Platform] = [Platform.SENSOR]
+
+
+async def async_setup_entry(hass: HomeAssistant, entry: IOmeterConfigEntry) -> bool:
+ """Set up IOmeter from a config entry."""
+
+ host = entry.data[CONF_HOST]
+ session = async_get_clientsession(hass)
+ client = IOmeterClient(host=host, session=session)
+ try:
+ await client.get_current_status()
+ except IOmeterConnectionError as err:
+ raise ConfigEntryNotReady from err
+
+ coordinator = IOMeterCoordinator(hass, client)
+ await coordinator.async_config_entry_first_refresh()
+ entry.runtime_data = coordinator
+ await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
+
+ return True
+
+
+async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
+ """Unload a config entry."""
+ return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
diff --git a/homeassistant/components/iometer/config_flow.py b/homeassistant/components/iometer/config_flow.py
new file mode 100644
index 00000000000..ee03d09abf7
--- /dev/null
+++ b/homeassistant/components/iometer/config_flow.py
@@ -0,0 +1,91 @@
+"""Config flow for the IOmeter integration."""
+
+from typing import Any, Final
+
+from iometer import IOmeterClient, IOmeterConnectionError
+import voluptuous as vol
+
+from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
+from homeassistant.const import CONF_HOST
+from homeassistant.helpers.aiohttp_client import async_get_clientsession
+from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
+
+from .const import DOMAIN
+
+CONFIG_SCHEMA: Final = vol.Schema({vol.Required(CONF_HOST): str})
+
+
+class IOMeterConfigFlow(ConfigFlow, domain=DOMAIN):
+ """Handles the config flow for a IOmeter bridge and core."""
+
+ def __init__(self) -> None:
+ """Initialize the config flow."""
+ self._host: str
+ self._meter_number: str
+
+ async def async_step_zeroconf(
+ self, discovery_info: ZeroconfServiceInfo
+ ) -> ConfigFlowResult:
+ """Handle zeroconf discovery."""
+ self._host = host = discovery_info.host
+ self._async_abort_entries_match({CONF_HOST: host})
+
+ session = async_get_clientsession(self.hass)
+ client = IOmeterClient(host=host, session=session)
+ try:
+ status = await client.get_current_status()
+ except IOmeterConnectionError:
+ return self.async_abort(reason="cannot_connect")
+
+ self._meter_number = status.meter.number
+
+ await self.async_set_unique_id(status.device.id)
+ self._abort_if_unique_id_configured()
+
+ self.context["title_placeholders"] = {"name": f"IOmeter {self._meter_number}"}
+ return await self.async_step_zeroconf_confirm()
+
+ async def async_step_zeroconf_confirm(
+ self, user_input: dict[str, Any] | None = None
+ ) -> ConfigFlowResult:
+ """Confirm discovery."""
+ if user_input is not None:
+ return await self._async_create_entry()
+
+ self._set_confirm_only()
+ return self.async_show_form(
+ step_id="zeroconf_confirm",
+ description_placeholders={"meter_number": self._meter_number},
+ )
+
+ async def async_step_user(
+ self, user_input: dict[str, Any] | None = None
+ ) -> ConfigFlowResult:
+ """Handle the initial configuration."""
+ errors: dict[str, str] = {}
+
+ if user_input is not None:
+ self._host = user_input[CONF_HOST]
+ session = async_get_clientsession(self.hass)
+ client = IOmeterClient(host=self._host, session=session)
+ try:
+ status = await client.get_current_status()
+ except IOmeterConnectionError:
+ errors["base"] = "cannot_connect"
+ else:
+ self._meter_number = status.meter.number
+ await self.async_set_unique_id(status.device.id)
+ self._abort_if_unique_id_configured()
+ return await self._async_create_entry()
+ return self.async_show_form(
+ step_id="user",
+ data_schema=CONFIG_SCHEMA,
+ errors=errors,
+ )
+
+ async def _async_create_entry(self) -> ConfigFlowResult:
+ """Create entry."""
+ return self.async_create_entry(
+ title=f"IOmeter {self._meter_number}",
+ data={CONF_HOST: self._host},
+ )
diff --git a/homeassistant/components/iometer/const.py b/homeassistant/components/iometer/const.py
new file mode 100644
index 00000000000..797aefcd7f0
--- /dev/null
+++ b/homeassistant/components/iometer/const.py
@@ -0,0 +1,5 @@
+"""Constants for the IOmeter integration."""
+
+from typing import Final
+
+DOMAIN: Final = "iometer"
diff --git a/homeassistant/components/iometer/coordinator.py b/homeassistant/components/iometer/coordinator.py
new file mode 100644
index 00000000000..3321b032e4b
--- /dev/null
+++ b/homeassistant/components/iometer/coordinator.py
@@ -0,0 +1,55 @@
+"""DataUpdateCoordinator for IOmeter."""
+
+from dataclasses import dataclass
+from datetime import timedelta
+import logging
+
+from iometer import IOmeterClient, IOmeterConnectionError, Reading, Status
+
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
+
+from .const import DOMAIN
+
+_LOGGER = logging.getLogger(__name__)
+DEFAULT_SCAN_INTERVAL = timedelta(seconds=10)
+
+type IOmeterConfigEntry = ConfigEntry[IOMeterCoordinator]
+
+
+@dataclass
+class IOmeterData:
+ """Class for data update."""
+
+ reading: Reading
+ status: Status
+
+
+class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]):
+ """Class to manage fetching IOmeter data."""
+
+ config_entry: IOmeterConfigEntry
+ client: IOmeterClient
+
+ def __init__(self, hass: HomeAssistant, client: IOmeterClient) -> None:
+ """Initialize coordinator."""
+
+ super().__init__(
+ hass,
+ _LOGGER,
+ name=DOMAIN,
+ update_interval=DEFAULT_SCAN_INTERVAL,
+ )
+ self.client = client
+ self.identifier = self.config_entry.entry_id
+
+ async def _async_update_data(self) -> IOmeterData:
+ """Update data async."""
+ try:
+ reading = await self.client.get_current_reading()
+ status = await self.client.get_current_status()
+ except IOmeterConnectionError as error:
+ raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error
+
+ return IOmeterData(reading=reading, status=status)
diff --git a/homeassistant/components/iometer/entity.py b/homeassistant/components/iometer/entity.py
new file mode 100644
index 00000000000..86494857e18
--- /dev/null
+++ b/homeassistant/components/iometer/entity.py
@@ -0,0 +1,24 @@
+"""Base class for IOmeter entities."""
+
+from homeassistant.helpers.device_registry import DeviceInfo
+from homeassistant.helpers.update_coordinator import CoordinatorEntity
+
+from .const import DOMAIN
+from .coordinator import IOMeterCoordinator
+
+
+class IOmeterEntity(CoordinatorEntity[IOMeterCoordinator]):
+ """Defines a base IOmeter entity."""
+
+ _attr_has_entity_name = True
+
+ def __init__(self, coordinator: IOMeterCoordinator) -> None:
+ """Initialize IOmeter entity."""
+ super().__init__(coordinator)
+ status = coordinator.data.status
+ self._attr_device_info = DeviceInfo(
+ identifiers={(DOMAIN, status.device.id)},
+ manufacturer="IOmeter GmbH",
+ model="IOmeter",
+ sw_version=f"{status.device.core.version}/{status.device.bridge.version}",
+ )
diff --git a/homeassistant/components/iometer/icons.json b/homeassistant/components/iometer/icons.json
new file mode 100644
index 00000000000..8c71684f859
--- /dev/null
+++ b/homeassistant/components/iometer/icons.json
@@ -0,0 +1,38 @@
+{
+ "entity": {
+ "sensor": {
+ "attachment_status": {
+ "default": "mdi:eye",
+ "state": {
+ "attached": "mdi:check-bold",
+ "detached": "mdi:close",
+ "unknown": "mdi:help"
+ }
+ },
+ "connection_status": {
+ "default": "mdi:eye",
+ "state": {
+ "connected": "mdi:check-bold",
+ "disconnected": "mdi:close",
+ "unknown": "mdi:help"
+ }
+ },
+ "pin_status": {
+ "default": "mdi:eye",
+ "state": {
+ "entered": "mdi:lock-open",
+ "pending": "mdi:lock-clock",
+ "missing": "mdi:lock",
+ "unknown": "mdi:help"
+ }
+ },
+ "power_status": {
+ "default": "mdi:eye",
+ "state": {
+ "battery": "mdi:battery",
+ "wired": "mdi:power-plug"
+ }
+ }
+ }
+ }
+}
diff --git a/homeassistant/components/iometer/manifest.json b/homeassistant/components/iometer/manifest.json
new file mode 100644
index 00000000000..061a2318e04
--- /dev/null
+++ b/homeassistant/components/iometer/manifest.json
@@ -0,0 +1,12 @@
+{
+ "domain": "iometer",
+ "name": "IOmeter",
+ "codeowners": ["@MaestroOnICe"],
+ "config_flow": true,
+ "documentation": "https://www.home-assistant.io/integrations/iometer",
+ "integration_type": "device",
+ "iot_class": "local_polling",
+ "quality_scale": "bronze",
+ "requirements": ["iometer==0.1.0"],
+ "zeroconf": ["_iometer._tcp.local."]
+}
diff --git a/homeassistant/components/iometer/quality_scale.yaml b/homeassistant/components/iometer/quality_scale.yaml
new file mode 100644
index 00000000000..71496d8043c
--- /dev/null
+++ b/homeassistant/components/iometer/quality_scale.yaml
@@ -0,0 +1,74 @@
+rules:
+ # Bronze
+ action-setup:
+ status: exempt
+ comment: This integration does not provide additional actions.
+ appropriate-polling: done
+ brands: done
+ common-modules: done
+ config-flow-test-coverage: done
+ config-flow: done
+ dependency-transparency: done
+ docs-actions:
+ status: exempt
+ comment: This integration does not provide additional actions.
+ docs-high-level-description: done
+ docs-installation-instructions: done
+ docs-removal-instructions: done
+ entity-event-setup:
+ status: exempt
+ comment: This integration does not register any events.
+ entity-unique-id: done
+ has-entity-name: done
+ runtime-data: done
+ test-before-configure: done
+ test-before-setup: done
+ unique-config-entry: done
+
+ # Silver
+ action-exceptions:
+ status: exempt
+ comment: This integration does not provide additional actions.
+ config-entry-unloading: done
+ docs-configuration-parameters:
+ status: exempt
+ comment: This integration has not option flow.
+ docs-installation-parameters: done
+ entity-unavailable: done
+ integration-owner: done
+ log-when-unavailable: done
+ parallel-updates:
+ status: exempt
+ comment: This integration polls data using a coordinator, there is no need for parallel updates.
+ reauthentication-flow:
+ status: exempt
+ comment: This integration requires no authentication.
+ test-coverage: todo
+
+ # Gold
+ devices: todo
+ diagnostics: todo
+ discovery-update-info: todo
+ discovery: done
+ docs-data-update: todo
+ docs-examples: todo
+ docs-known-limitations: todo
+ docs-supported-devices: todo
+ docs-supported-functions: todo
+ docs-troubleshooting: todo
+ docs-use-cases: todo
+ dynamic-devices: todo
+ entity-category: done
+ entity-device-class: done
+ entity-disabled-by-default: done
+ entity-translations: done
+ exception-translations: todo
+ icon-translations: todo
+ reconfiguration-flow: todo
+ repair-issues: todo
+ stale-devices: todo
+
+ # Platinum
+ async-dependency: done
+ inject-websession: done
+ strict-typing: todo
diff --git a/homeassistant/components/iometer/sensor.py b/homeassistant/components/iometer/sensor.py
new file mode 100644
index 00000000000..7d4c1155e8b
--- /dev/null
+++ b/homeassistant/components/iometer/sensor.py
@@ -0,0 +1,146 @@
+"""IOmeter sensors."""
+
+from collections.abc import Callable
+from dataclasses import dataclass
+
+from homeassistant.components.sensor import (
+ SensorDeviceClass,
+ SensorEntity,
+ SensorEntityDescription,
+ SensorStateClass,
+)
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.const import (
+ PERCENTAGE,
+ SIGNAL_STRENGTH_DECIBELS,
+ STATE_UNKNOWN,
+ EntityCategory,
+ UnitOfEnergy,
+ UnitOfPower,
+)
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers.entity_platform import AddEntitiesCallback
+from homeassistant.helpers.typing import StateType
+
+from .coordinator import IOMeterCoordinator, IOmeterData
+from .entity import IOmeterEntity
+
+
+@dataclass(frozen=True, kw_only=True)
+class IOmeterEntityDescription(SensorEntityDescription):
+ """Describes IOmeter sensor entity."""
+
+ value_fn: Callable[[IOmeterData], str | int | float]
+
+
+SENSOR_TYPES: list[IOmeterEntityDescription] = [
+ IOmeterEntityDescription(
+ key="meter_number",
+ translation_key="meter_number",
+ icon="mdi:meter-electric",
+ value_fn=lambda data: data.status.meter.number,
+ ),
+ IOmeterEntityDescription(
+ key="wifi_rssi",
+ translation_key="wifi_rssi",
+ native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
+ device_class=SensorDeviceClass.SIGNAL_STRENGTH,
+ state_class=SensorStateClass.MEASUREMENT,
+ entity_category=EntityCategory.DIAGNOSTIC,
+ entity_registry_enabled_default=False,
+ value_fn=lambda data: data.status.device.bridge.rssi,
+ ),
+ IOmeterEntityDescription(
+ key="core_bridge_rssi",
+ translation_key="core_bridge_rssi",
+ native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
+ device_class=SensorDeviceClass.SIGNAL_STRENGTH,
+ state_class=SensorStateClass.MEASUREMENT,
+ entity_category=EntityCategory.DIAGNOSTIC,
+ entity_registry_enabled_default=False,
+ value_fn=lambda data: data.status.device.core.rssi,
+ ),
+ IOmeterEntityDescription(
+ key="power_status",
+ translation_key="power_status",
+ device_class=SensorDeviceClass.ENUM,
+ options=["battery", "wired", "unknown"],
+ value_fn=lambda data: data.status.device.core.power_status or STATE_UNKNOWN,
+ ),
+ IOmeterEntityDescription(
+ key="battery_level",
+ translation_key="battery_level",
+ native_unit_of_measurement=PERCENTAGE,
+ device_class=SensorDeviceClass.BATTERY,
+ state_class=SensorStateClass.MEASUREMENT,
+ value_fn=lambda data: data.status.device.core.battery_level,
+ ),
+ IOmeterEntityDescription(
+ key="pin_status",
+ translation_key="pin_status",
+ device_class=SensorDeviceClass.ENUM,
+ options=["entered", "pending", "missing", "unknown"],
+ value_fn=lambda data: data.status.device.core.pin_status or STATE_UNKNOWN,
+ ),
+ IOmeterEntityDescription(
+ key="total_consumption",
+ translation_key="total_consumption",
+ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
+ device_class=SensorDeviceClass.ENERGY,
+ state_class=SensorStateClass.TOTAL,
+ value_fn=lambda data: data.reading.get_total_consumption(),
+ ),
+ IOmeterEntityDescription(
+ key="total_production",
+ translation_key="total_production",
+ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
+ device_class=SensorDeviceClass.ENERGY,
+ state_class=SensorStateClass.TOTAL,
+ value_fn=lambda data: data.reading.get_total_production(),
+ ),
+ IOmeterEntityDescription(
+ key="power",
+ native_unit_of_measurement=UnitOfPower.WATT,
+ device_class=SensorDeviceClass.POWER,
+ state_class=SensorStateClass.MEASUREMENT,
+ value_fn=lambda data: data.reading.get_current_power(),
+ ),
+]
+
+
+async def async_setup_entry(
+ hass: HomeAssistant,
+ config_entry: ConfigEntry,
+ async_add_entities: AddEntitiesCallback,
+) -> None:
+ """Set up the Sensors."""
+ coordinator: IOMeterCoordinator = config_entry.runtime_data
+
+ async_add_entities(
+ IOmeterSensor(
+ coordinator=coordinator,
+ description=description,
+ )
+ for description in SENSOR_TYPES
+ )
+
+
+class IOmeterSensor(IOmeterEntity, SensorEntity):
+ """Defines a IOmeter sensor."""
+
+ entity_description: IOmeterEntityDescription
+
+ def __init__(
+ self,
+ coordinator: IOMeterCoordinator,
+ description: IOmeterEntityDescription,
+ ) -> None:
+ """Initialize the sensor."""
+ super().__init__(coordinator)
+ self.entity_description = description
+ self._attr_unique_id = f"{coordinator.identifier}_{description.key}"
+
+ @property
+ def native_value(self) -> StateType:
+ """Return the sensor value."""
+ return self.entity_description.value_fn(self.coordinator.data)
diff --git a/homeassistant/components/iometer/strings.json b/homeassistant/components/iometer/strings.json
new file mode 100644
index 00000000000..31deb16aa9c
--- /dev/null
+++ b/homeassistant/components/iometer/strings.json
@@ -0,0 +1,65 @@
+{
+ "config": {
+ "step": {
+ "user": {
+ "description": "Setup your IOmeter device for local data",
+ "data": {
+ "host": "[%key:common::config_flow::data::host%]"
+ },
+ "data_description": {
+ "host": "The hostname or IP address of the IOmeter device to connect to."
+ }
+ },
+ "zeroconf_confirm": {
+ "title": "Discovered IOmeter",
+ "description": "Do you want to set up IOmeter on the meter with meter number: {meter_number}?"
+ }
+ },
+ "abort": {
+ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
+ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
+ },
+ "error": {
+ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
+ "unknown": "Unexpected error"
+ }
+ },
+ "entity": {
+ "sensor": {
+ "battery_level": {
+ "name": "Battery level"
+ },
+ "meter_number": {
+ "name": "Meter number"
+ },
+ "pin_status": {
+ "name": "PIN status",
+ "state": {
+ "entered": "Entered",
+ "pending": "Pending",
+ "missing": "Missing",
+ "unknown": "Unknown"
+ }
+ },
+ "power_status": {
+ "name": "Power supply",
+ "state": {
+ "battery": "Battery",
+ "wired": "Wired"
+ }
+ },
+ "total_consumption": {
+ "name": "Total consumption"
+ },
+ "total_production": {
+ "name": "Total production"
+ },
+ "core_bridge_rssi": {
+ "name": "Signal strength Core/Bridge"
+ },
+ "wifi_rssi": {
+ "name": "Signal strength Wi-Fi"
+ }
+ }
+ }
+}
diff --git a/homeassistant/components/isy994/sensor.py b/homeassistant/components/isy994/sensor.py
index 789075e5c57..58ba3171bc8 100644
--- a/homeassistant/components/isy994/sensor.py
+++ b/homeassistant/components/isy994/sensor.py
@@ -73,7 +73,7 @@ ISY_CONTROL_TO_DEVICE_CLASS = {
"CV": SensorDeviceClass.VOLTAGE,
"DEWPT": SensorDeviceClass.TEMPERATURE,
"DISTANC": SensorDeviceClass.DISTANCE,
- "ETO": SensorDeviceClass.PRECIPITATION_INTENSITY,
+ "ETO": SensorDeviceClass.PRECIPITATION_INTENSITY, # codespell:ignore eto
"FATM": SensorDeviceClass.WEIGHT,
"FREQ": SensorDeviceClass.FREQUENCY,
"MUSCLEM": SensorDeviceClass.WEIGHT,
diff --git a/homeassistant/components/jellyfin/manifest.json b/homeassistant/components/jellyfin/manifest.json
index 810b9ea45a9..d6b2261acaa 100644
--- a/homeassistant/components/jellyfin/manifest.json
+++ b/homeassistant/components/jellyfin/manifest.json
@@ -1,7 +1,7 @@
{
"domain": "jellyfin",
"name": "Jellyfin",
- "codeowners": ["@j-stienstra", "@ctalkington"],
+ "codeowners": ["@RunC0deRun", "@ctalkington"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/jellyfin",
"integration_type": "service",
diff --git a/homeassistant/components/jvc_projector/strings.json b/homeassistant/components/jvc_projector/strings.json
index b517bf064e1..c6e5736bd2d 100644
--- a/homeassistant/components/jvc_projector/strings.json
+++ b/homeassistant/components/jvc_projector/strings.json
@@ -36,7 +36,7 @@
"entity": {
"binary_sensor": {
"jvc_power": {
- "name": "[%key:component::sensor::entity_component::power::name%]"
+ "name": "[%key:component::binary_sensor::entity_component::power::name%]"
}
},
"select": {
diff --git a/homeassistant/components/lacrosse_view/manifest.json b/homeassistant/components/lacrosse_view/manifest.json
index 86b2f61a872..38e64274deb 100644
--- a/homeassistant/components/lacrosse_view/manifest.json
+++ b/homeassistant/components/lacrosse_view/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
"iot_class": "cloud_polling",
"loggers": ["lacrosse_view"],
- "requirements": ["lacrosse-view==1.0.4"]
+ "requirements": ["lacrosse-view==1.1.1"]
}
diff --git a/homeassistant/components/lacrosse_view/sensor.py b/homeassistant/components/lacrosse_view/sensor.py
index b2ad9672504..fceddeb9b2c 100644
--- a/homeassistant/components/lacrosse_view/sensor.py
+++ b/homeassistant/components/lacrosse_view/sensor.py
@@ -45,7 +45,7 @@ class LaCrosseSensorEntityDescription(SensorEntityDescription):
def get_value(sensor: Sensor, field: str) -> float | int | str | None:
"""Get the value of a sensor field."""
- field_data = sensor.data.get(field)
+ field_data = sensor.data.get(field) if sensor.data is not None else None
if field_data is None:
return None
value = field_data["values"][-1]["s"]
@@ -178,7 +178,7 @@ async def async_setup_entry(
continue
# if the API returns a different unit of measurement from the description, update it
- if sensor.data.get(field) is not None:
+ if sensor.data is not None and sensor.data.get(field) is not None:
native_unit_of_measurement = UNIT_OF_MEASUREMENT_MAP.get(
sensor.data[field].get("unit")
)
@@ -240,7 +240,9 @@ class LaCrosseViewSensor(
@property
def available(self) -> bool:
"""Return True if entity is available."""
+ data = self.coordinator.data[self.index].data
return (
super().available
- and self.entity_description.key in self.coordinator.data[self.index].data
+ and data is not None
+ and self.entity_description.key in data
)
diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json
index 2ac183dcc97..c1dd7751940 100644
--- a/homeassistant/components/lcn/manifest.json
+++ b/homeassistant/components/lcn/manifest.json
@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/lcn",
"iot_class": "local_push",
"loggers": ["pypck"],
- "requirements": ["pypck==0.8.3", "lcn-frontend==0.2.3"]
+ "requirements": ["pypck==0.8.5", "lcn-frontend==0.2.3"]
}
diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json
index a29a9834c9b..36d0150642e 100644
--- a/homeassistant/components/ld2410_ble/manifest.json
+++ b/homeassistant/components/ld2410_ble/manifest.json
@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
"integration_type": "device",
"iot_class": "local_push",
- "requirements": ["bluetooth-data-tools==1.23.3", "ld2410-ble==0.1.1"]
+ "requirements": ["bluetooth-data-tools==1.23.4", "ld2410-ble==0.1.1"]
}
diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json
index 8608c0b2798..309399e6958 100644
--- a/homeassistant/components/led_ble/manifest.json
+++ b/homeassistant/components/led_ble/manifest.json
@@ -35,5 +35,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/led_ble",
"iot_class": "local_polling",
- "requirements": ["bluetooth-data-tools==1.23.3", "led-ble==1.1.4"]
+ "requirements": ["bluetooth-data-tools==1.23.4", "led-ble==1.1.6"]
}
diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json
index 6dd60909c66..b00d28c1d4f 100644
--- a/homeassistant/components/lg_thinq/manifest.json
+++ b/homeassistant/components/lg_thinq/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/lg_thinq",
"iot_class": "cloud_push",
"loggers": ["thinqconnect"],
- "requirements": ["thinqconnect==1.0.2"]
+ "requirements": ["thinqconnect==1.0.4"]
}
diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py
index dd4f8314bef..b2d1c7f8ddb 100644
--- a/homeassistant/components/matter/select.py
+++ b/homeassistant/components/matter/select.py
@@ -308,7 +308,7 @@ DISCOVERY_SCHEMAS = [
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
key="MatterDeviceEnergyManagementMode",
- translation_key="mode",
+ translation_key="device_energy_management_mode",
),
entity_class=MatterModeSelectEntity,
required_attributes=(
diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json
index f1a123c61be..f299b5cb628 100644
--- a/homeassistant/components/matter/strings.json
+++ b/homeassistant/components/matter/strings.json
@@ -183,6 +183,9 @@
"mode": {
"name": "Mode"
},
+ "device_energy_management_mode": {
+ "name": "Energy management mode"
+ },
"sensitivity_level": {
"name": "Sensitivity",
"state": {
diff --git a/homeassistant/components/mcp_server/llm_api.py b/homeassistant/components/mcp_server/llm_api.py
index f4292744815..5c29b29153e 100644
--- a/homeassistant/components/mcp_server/llm_api.py
+++ b/homeassistant/components/mcp_server/llm_api.py
@@ -35,13 +35,13 @@ class StatelessAssistAPI(llm.AssistAPI):
"""Return the prompt for the exposed entities."""
prompt = []
- if exposed_entities:
+ if exposed_entities and exposed_entities["entities"]:
prompt.append(
"An overview of the areas and the devices in this smart home:"
)
entities = [
{k: v for k, v in entity_info.items() if k in EXPOSED_ENTITY_FIELDS}
- for entity_info in exposed_entities.values()
+ for entity_info in exposed_entities["entities"].values()
]
prompt.append(yaml_util.dump(list(entities)))
diff --git a/homeassistant/components/mill/manifest.json b/homeassistant/components/mill/manifest.json
index 6316eb72096..44c1136b7d5 100644
--- a/homeassistant/components/mill/manifest.json
+++ b/homeassistant/components/mill/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/mill",
"iot_class": "local_polling",
"loggers": ["mill", "mill_local"],
- "requirements": ["millheater==0.12.2", "mill-local==0.3.0"]
+ "requirements": ["millheater==0.12.3", "mill-local==0.3.0"]
}
diff --git a/homeassistant/components/motionmount/entity.py b/homeassistant/components/motionmount/entity.py
index 81d4d0119b5..bbb79729a9e 100644
--- a/homeassistant/components/motionmount/entity.py
+++ b/homeassistant/components/motionmount/entity.py
@@ -30,7 +30,7 @@ class MotionMountEntity(Entity):
self.config_entry = config_entry
# We store the pin, as we might need it during reconnect
- self.pin = config_entry.data[CONF_PIN]
+ self.pin = config_entry.data.get(CONF_PIN)
mac = format_mac(mm.mac.hex())
diff --git a/homeassistant/components/motionmount/sensor.py b/homeassistant/components/motionmount/sensor.py
index 8e55fad4a8b..685c3ebf932 100644
--- a/homeassistant/components/motionmount/sensor.py
+++ b/homeassistant/components/motionmount/sensor.py
@@ -1,6 +1,9 @@
"""Support for MotionMount sensors."""
+from typing import Final
+
import motionmount
+from motionmount import MotionMountSystemError
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
from homeassistant.core import HomeAssistant
@@ -9,6 +12,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import MotionMountConfigEntry
from .entity import MotionMountEntity
+ERROR_MESSAGES: Final = {
+ MotionMountSystemError.MotorError: "motor",
+ MotionMountSystemError.ObstructionDetected: "obstruction",
+ MotionMountSystemError.TVWidthConstraintError: "tv_width_constraint",
+ MotionMountSystemError.HDMICECError: "hdmi_cec",
+ MotionMountSystemError.InternalError: "internal",
+}
+
async def async_setup_entry(
hass: HomeAssistant,
@@ -25,7 +36,14 @@ class MotionMountErrorStatusSensor(MotionMountEntity, SensorEntity):
"""The error status sensor of a MotionMount."""
_attr_device_class = SensorDeviceClass.ENUM
- _attr_options = ["none", "motor", "internal"]
+ _attr_options = [
+ "none",
+ "motor",
+ "hdmi_cec",
+ "obstruction",
+ "tv_width_constraint",
+ "internal",
+ ]
_attr_translation_key = "motionmount_error_status"
def __init__(
@@ -38,13 +56,10 @@ class MotionMountErrorStatusSensor(MotionMountEntity, SensorEntity):
@property
def native_value(self) -> str:
"""Return error status."""
- errors = self.mm.error_status or 0
+ status = self.mm.system_status
- if errors & (1 << 31):
- # Only when but 31 is set are there any errors active at this moment
- if errors & (1 << 10):
- return "motor"
-
- return "internal"
+ for error, message in ERROR_MESSAGES.items():
+ if error in status:
+ return message
return "none"
diff --git a/homeassistant/components/motionmount/strings.json b/homeassistant/components/motionmount/strings.json
index 1fcb6c47c99..75fd0773322 100644
--- a/homeassistant/components/motionmount/strings.json
+++ b/homeassistant/components/motionmount/strings.json
@@ -72,6 +72,9 @@
"state": {
"none": "None",
"motor": "Motor",
+ "hdmi_cec": "HDMI CEC",
+ "obstruction": "Obstruction",
+ "tv_width_constraint": "TV width constraint",
"internal": "Internal"
}
}
diff --git a/homeassistant/components/mqtt/async_client.py b/homeassistant/components/mqtt/async_client.py
index 882e910d7e8..5f90136df44 100644
--- a/homeassistant/components/mqtt/async_client.py
+++ b/homeassistant/components/mqtt/async_client.py
@@ -51,10 +51,10 @@ class AsyncMQTTClient(MQTTClient):
since the client is running in an async event loop
and will never run in multiple threads.
"""
- self._in_callback_mutex = NullLock()
- self._callback_mutex = NullLock()
- self._msgtime_mutex = NullLock()
- self._out_message_mutex = NullLock()
- self._in_message_mutex = NullLock()
- self._reconnect_delay_mutex = NullLock()
- self._mid_generate_mutex = NullLock()
+ self._in_callback_mutex = NullLock() # type: ignore[assignment]
+ self._callback_mutex = NullLock() # type: ignore[assignment]
+ self._msgtime_mutex = NullLock() # type: ignore[assignment]
+ self._out_message_mutex = NullLock() # type: ignore[assignment]
+ self._in_message_mutex = NullLock() # type: ignore[assignment]
+ self._reconnect_delay_mutex = NullLock() # type: ignore[assignment]
+ self._mid_generate_mutex = NullLock() # type: ignore[assignment]
diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py
index 16a02e4956e..3aca566dbfc 100644
--- a/homeassistant/components/mqtt/client.py
+++ b/homeassistant/components/mqtt/client.py
@@ -15,7 +15,6 @@ import socket
import ssl
import time
from typing import TYPE_CHECKING, Any
-import uuid
import certifi
@@ -117,7 +116,7 @@ MAX_UNSUBSCRIBES_PER_CALL = 500
MAX_PACKETS_TO_READ = 500
-type SocketType = socket.socket | ssl.SSLSocket | mqtt.WebsocketWrapper | Any
+type SocketType = socket.socket | ssl.SSLSocket | mqtt._WebsocketWrapper | Any # noqa: SLF001
type SubscribePayloadType = str | bytes | bytearray # Only bytes if encoding is None
@@ -309,12 +308,13 @@ class MqttClientSetup:
if (client_id := config.get(CONF_CLIENT_ID)) is None:
# PAHO MQTT relies on the MQTT server to generate random client IDs.
# However, that feature is not mandatory so we generate our own.
- client_id = mqtt.base62(uuid.uuid4().int, padding=22)
+ client_id = None
transport: str = config.get(CONF_TRANSPORT, DEFAULT_TRANSPORT)
self._client = AsyncMQTTClient(
+ mqtt.CallbackAPIVersion.VERSION1,
client_id,
protocol=proto,
- transport=transport,
+ transport=transport, # type: ignore[arg-type]
reconnect_on_failure=False,
)
self._client.setup()
@@ -533,7 +533,7 @@ class MQTT:
try:
# Some operating systems do not allow us to set the preferred
# buffer size. In that case we try some other size options.
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, new_buffer_size)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, new_buffer_size) # type: ignore[union-attr]
except OSError as err:
if new_buffer_size <= MIN_BUFFER_SIZE:
_LOGGER.warning(
@@ -1216,7 +1216,9 @@ class MQTT:
if not future.done():
future.set_exception(asyncio.TimeoutError)
- async def _async_wait_for_mid_or_raise(self, mid: int, result_code: int) -> None:
+ async def _async_wait_for_mid_or_raise(
+ self, mid: int | None, result_code: int
+ ) -> None:
"""Wait for ACK from broker or raise on error."""
if result_code != 0:
# pylint: disable-next=import-outside-toplevel
@@ -1232,6 +1234,8 @@ class MQTT:
# Create the mid event if not created, either _mqtt_handle_mid or
# _async_wait_for_mid_or_raise may be executed first.
+ if TYPE_CHECKING:
+ assert mid is not None
future = self._async_get_mid_future(mid)
loop = self.hass.loop
timer_handle = loop.call_later(TIMEOUT_ACK, self._async_timeout_mid, future)
@@ -1269,7 +1273,7 @@ def _matcher_for_topic(subscription: str) -> Callable[[str], bool]:
# pylint: disable-next=import-outside-toplevel
from paho.mqtt.matcher import MQTTMatcher
- matcher = MQTTMatcher()
+ matcher = MQTTMatcher() # type: ignore[no-untyped-call]
matcher[subscription] = True
- return lambda topic: next(matcher.iter_match(topic), False)
+ return lambda topic: next(matcher.iter_match(topic), False) # type: ignore[no-untyped-call]
diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py
index 43b0cbf77b3..14e21e61d48 100644
--- a/homeassistant/components/mqtt/light/schema_json.py
+++ b/homeassistant/components/mqtt/light/schema_json.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-from collections.abc import Callable
from contextlib import suppress
import logging
from typing import TYPE_CHECKING, Any, cast
@@ -24,7 +23,6 @@ from homeassistant.components.light import (
ATTR_XY_COLOR,
DEFAULT_MAX_KELVIN,
DEFAULT_MIN_KELVIN,
- DOMAIN as LIGHT_DOMAIN,
ENTITY_ID_FORMAT,
FLASH_LONG,
FLASH_SHORT,
@@ -34,7 +32,6 @@ from homeassistant.components.light import (
LightEntityFeature,
brightness_supported,
color_supported,
- filter_supported_color_modes,
valid_supported_color_modes,
)
from homeassistant.const import (
@@ -48,15 +45,13 @@ from homeassistant.const import (
CONF_XY,
STATE_ON,
)
-from homeassistant.core import async_get_hass, callback
+from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
-from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.json import json_dumps
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, VolSchemaType
from homeassistant.util import color as color_util
from homeassistant.util.json import json_loads_object
-from homeassistant.util.yaml import dump as yaml_dump
from .. import subscription
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
@@ -68,7 +63,6 @@ from ..const import (
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
- DOMAIN as MQTT_DOMAIN,
)
from ..entity import MqttEntity
from ..models import ReceiveMessage
@@ -86,15 +80,10 @@ _LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt_json"
DEFAULT_BRIGHTNESS = False
-DEFAULT_COLOR_MODE = False
-DEFAULT_COLOR_TEMP = False
DEFAULT_EFFECT = False
DEFAULT_FLASH_TIME_LONG = 10
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_NAME = "MQTT JSON Light"
-DEFAULT_RGB = False
-DEFAULT_XY = False
-DEFAULT_HS = False
DEFAULT_BRIGHTNESS_SCALE = 255
DEFAULT_WHITE_SCALE = 255
@@ -110,89 +99,6 @@ CONF_MAX_MIREDS = "max_mireds"
CONF_MIN_MIREDS = "min_mireds"
-def valid_color_configuration(
- setup_from_yaml: bool,
-) -> Callable[[dict[str, Any]], dict[str, Any]]:
- """Test color_mode is not combined with deprecated config."""
-
- def _valid_color_configuration(config: ConfigType) -> ConfigType:
- deprecated = {CONF_COLOR_TEMP, CONF_HS, CONF_RGB, CONF_XY}
- deprecated_flags_used = any(config.get(key) for key in deprecated)
- if config.get(CONF_SUPPORTED_COLOR_MODES):
- if deprecated_flags_used:
- raise vol.Invalid(
- "supported_color_modes must not "
- f"be combined with any of {deprecated}"
- )
- elif deprecated_flags_used:
- deprecated_flags = ", ".join(key for key in deprecated if key in config)
- _LOGGER.warning(
- "Deprecated flags [%s] used in MQTT JSON light config "
- "for handling color mode, please use `supported_color_modes` instead. "
- "Got: %s. This will stop working in Home Assistant Core 2025.3",
- deprecated_flags,
- config,
- )
- if not setup_from_yaml:
- return config
- issue_id = hex(hash(frozenset(config)))
- yaml_config_str = yaml_dump(config)
- learn_more_url = (
- "https://www.home-assistant.io/integrations/"
- f"{LIGHT_DOMAIN}.mqtt/#json-schema"
- )
- hass = async_get_hass()
- async_create_issue(
- hass,
- MQTT_DOMAIN,
- issue_id,
- issue_domain=LIGHT_DOMAIN,
- is_fixable=False,
- severity=IssueSeverity.WARNING,
- learn_more_url=learn_more_url,
- translation_placeholders={
- "deprecated_flags": deprecated_flags,
- "config": yaml_config_str,
- },
- translation_key="deprecated_color_handling",
- )
-
- if CONF_COLOR_MODE in config:
- _LOGGER.warning(
- "Deprecated flag `color_mode` used in MQTT JSON light config "
- ", the `color_mode` flag is not used anymore and should be removed. "
- "Got: %s. This will stop working in Home Assistant Core 2025.3",
- config,
- )
- if not setup_from_yaml:
- return config
- issue_id = hex(hash(frozenset(config)))
- yaml_config_str = yaml_dump(config)
- learn_more_url = (
- "https://www.home-assistant.io/integrations/"
- f"{LIGHT_DOMAIN}.mqtt/#json-schema"
- )
- hass = async_get_hass()
- async_create_issue(
- hass,
- MQTT_DOMAIN,
- issue_id,
- breaks_in_ha_version="2025.3.0",
- issue_domain=LIGHT_DOMAIN,
- is_fixable=False,
- severity=IssueSeverity.WARNING,
- learn_more_url=learn_more_url,
- translation_placeholders={
- "config": yaml_config_str,
- },
- translation_key="deprecated_color_mode_flag",
- )
-
- return config
-
- return _valid_color_configuration
-
-
_PLATFORM_SCHEMA_BASE = (
MQTT_RW_SCHEMA.extend(
{
@@ -200,12 +106,6 @@ _PLATFORM_SCHEMA_BASE = (
vol.Optional(
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
): vol.All(vol.Coerce(int), vol.Range(min=1)),
- # CONF_COLOR_MODE was deprecated with HA Core 2024.4 and will be
- # removed with HA Core 2025.3
- vol.Optional(CONF_COLOR_MODE): cv.boolean,
- # CONF_COLOR_TEMP was deprecated with HA Core 2024.4 and will be
- # removed with HA Core 2025.3
- vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
vol.Optional(CONF_COLOR_TEMP_KELVIN, default=False): cv.boolean,
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
@@ -215,9 +115,6 @@ _PLATFORM_SCHEMA_BASE = (
vol.Optional(
CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT
): cv.positive_int,
- # CONF_HS was deprecated with HA Core 2024.4 and will be
- # removed with HA Core 2025.3
- vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
vol.Optional(CONF_MAX_KELVIN): cv.positive_int,
@@ -227,9 +124,6 @@ _PLATFORM_SCHEMA_BASE = (
vol.Coerce(int), vol.In([0, 1, 2])
),
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
- # CONF_RGB was deprecated with HA Core 2024.4 and will be
- # removed with HA Core 2025.3
- vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_SUPPORTED_COLOR_MODES): vol.All(
cv.ensure_list,
@@ -240,22 +134,29 @@ _PLATFORM_SCHEMA_BASE = (
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
- # CONF_XY was deprecated with HA Core 2024.4 and will be
- # removed with HA Core 2025.3
- vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
},
)
.extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
)
+# Support for legacy color_mode handling was removed with HA Core 2025.3
+# The removed attributes can be removed from the schema's from HA Core 2026.3
DISCOVERY_SCHEMA_JSON = vol.All(
- valid_color_configuration(False),
+ cv.removed(CONF_COLOR_MODE, raise_if_present=False),
+ cv.removed(CONF_COLOR_TEMP, raise_if_present=False),
+ cv.removed(CONF_HS, raise_if_present=False),
+ cv.removed(CONF_RGB, raise_if_present=False),
+ cv.removed(CONF_XY, raise_if_present=False),
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
)
PLATFORM_SCHEMA_MODERN_JSON = vol.All(
- valid_color_configuration(True),
+ cv.removed(CONF_COLOR_MODE),
+ cv.removed(CONF_COLOR_TEMP),
+ cv.removed(CONF_HS),
+ cv.removed(CONF_RGB),
+ cv.removed(CONF_XY),
_PLATFORM_SCHEMA_BASE,
)
@@ -272,8 +173,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
_topic: dict[str, str | None]
_optimistic: bool
- _deprecated_color_handling: bool = False
-
@staticmethod
def config_schema() -> VolSchemaType:
"""Return the config schema."""
@@ -318,122 +217,65 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
self._attr_color_mode = next(iter(self.supported_color_modes))
else:
self._attr_color_mode = ColorMode.UNKNOWN
- else:
- self._deprecated_color_handling = True
- color_modes = {ColorMode.ONOFF}
- if config[CONF_BRIGHTNESS]:
- color_modes.add(ColorMode.BRIGHTNESS)
- if config[CONF_COLOR_TEMP]:
- color_modes.add(ColorMode.COLOR_TEMP)
- if config[CONF_HS] or config[CONF_RGB] or config[CONF_XY]:
- color_modes.add(ColorMode.HS)
- self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
- if self.supported_color_modes and len(self.supported_color_modes) == 1:
- self._fixed_color_mode = next(iter(self.supported_color_modes))
def _update_color(self, values: dict[str, Any]) -> None:
- if self._deprecated_color_handling:
- # Deprecated color handling
- try:
- red = int(values["color"]["r"])
- green = int(values["color"]["g"])
- blue = int(values["color"]["b"])
- self._attr_hs_color = color_util.color_RGB_to_hs(red, green, blue)
- except KeyError:
- pass
- except ValueError:
- _LOGGER.warning(
- "Invalid RGB color value '%s' received for entity %s",
- values,
- self.entity_id,
+ color_mode: str = values["color_mode"]
+ if not self._supports_color_mode(color_mode):
+ _LOGGER.warning(
+ "Invalid color mode '%s' received for entity %s",
+ color_mode,
+ self.entity_id,
+ )
+ return
+ try:
+ if color_mode == ColorMode.COLOR_TEMP:
+ self._attr_color_temp_kelvin = (
+ values["color_temp"]
+ if self._color_temp_kelvin
+ else color_util.color_temperature_mired_to_kelvin(
+ values["color_temp"]
+ )
)
- return
-
- try:
- x_color = float(values["color"]["x"])
- y_color = float(values["color"]["y"])
- self._attr_hs_color = color_util.color_xy_to_hs(x_color, y_color)
- except KeyError:
- pass
- except ValueError:
- _LOGGER.warning(
- "Invalid XY color value '%s' received for entity %s",
- values,
- self.entity_id,
- )
- return
-
- try:
+ self._attr_color_mode = ColorMode.COLOR_TEMP
+ elif color_mode == ColorMode.HS:
hue = float(values["color"]["h"])
saturation = float(values["color"]["s"])
+ self._attr_color_mode = ColorMode.HS
self._attr_hs_color = (hue, saturation)
- except KeyError:
- pass
- except ValueError:
- _LOGGER.warning(
- "Invalid HS color value '%s' received for entity %s",
- values,
- self.entity_id,
- )
- return
- else:
- color_mode: str = values["color_mode"]
- if not self._supports_color_mode(color_mode):
- _LOGGER.warning(
- "Invalid color mode '%s' received for entity %s",
- color_mode,
- self.entity_id,
- )
- return
- try:
- if color_mode == ColorMode.COLOR_TEMP:
- self._attr_color_temp_kelvin = (
- values["color_temp"]
- if self._color_temp_kelvin
- else color_util.color_temperature_mired_to_kelvin(
- values["color_temp"]
- )
- )
- self._attr_color_mode = ColorMode.COLOR_TEMP
- elif color_mode == ColorMode.HS:
- hue = float(values["color"]["h"])
- saturation = float(values["color"]["s"])
- self._attr_color_mode = ColorMode.HS
- self._attr_hs_color = (hue, saturation)
- elif color_mode == ColorMode.RGB:
- r = int(values["color"]["r"])
- g = int(values["color"]["g"])
- b = int(values["color"]["b"])
- self._attr_color_mode = ColorMode.RGB
- self._attr_rgb_color = (r, g, b)
- elif color_mode == ColorMode.RGBW:
- r = int(values["color"]["r"])
- g = int(values["color"]["g"])
- b = int(values["color"]["b"])
- w = int(values["color"]["w"])
- self._attr_color_mode = ColorMode.RGBW
- self._attr_rgbw_color = (r, g, b, w)
- elif color_mode == ColorMode.RGBWW:
- r = int(values["color"]["r"])
- g = int(values["color"]["g"])
- b = int(values["color"]["b"])
- c = int(values["color"]["c"])
- w = int(values["color"]["w"])
- self._attr_color_mode = ColorMode.RGBWW
- self._attr_rgbww_color = (r, g, b, c, w)
- elif color_mode == ColorMode.WHITE:
- self._attr_color_mode = ColorMode.WHITE
- elif color_mode == ColorMode.XY:
- x = float(values["color"]["x"])
- y = float(values["color"]["y"])
- self._attr_color_mode = ColorMode.XY
- self._attr_xy_color = (x, y)
- except (KeyError, ValueError):
- _LOGGER.warning(
- "Invalid or incomplete color value '%s' received for entity %s",
- values,
- self.entity_id,
- )
+ elif color_mode == ColorMode.RGB:
+ r = int(values["color"]["r"])
+ g = int(values["color"]["g"])
+ b = int(values["color"]["b"])
+ self._attr_color_mode = ColorMode.RGB
+ self._attr_rgb_color = (r, g, b)
+ elif color_mode == ColorMode.RGBW:
+ r = int(values["color"]["r"])
+ g = int(values["color"]["g"])
+ b = int(values["color"]["b"])
+ w = int(values["color"]["w"])
+ self._attr_color_mode = ColorMode.RGBW
+ self._attr_rgbw_color = (r, g, b, w)
+ elif color_mode == ColorMode.RGBWW:
+ r = int(values["color"]["r"])
+ g = int(values["color"]["g"])
+ b = int(values["color"]["b"])
+ c = int(values["color"]["c"])
+ w = int(values["color"]["w"])
+ self._attr_color_mode = ColorMode.RGBWW
+ self._attr_rgbww_color = (r, g, b, c, w)
+ elif color_mode == ColorMode.WHITE:
+ self._attr_color_mode = ColorMode.WHITE
+ elif color_mode == ColorMode.XY:
+ x = float(values["color"]["x"])
+ y = float(values["color"]["y"])
+ self._attr_color_mode = ColorMode.XY
+ self._attr_xy_color = (x, y)
+ except (KeyError, TypeError, ValueError):
+ _LOGGER.warning(
+ "Invalid or incomplete color value '%s' received for entity %s",
+ values,
+ self.entity_id,
+ )
@callback
def _state_received(self, msg: ReceiveMessage) -> None:
@@ -447,18 +289,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
elif values["state"] is None:
self._attr_is_on = None
- if (
- self._deprecated_color_handling
- and color_supported(self.supported_color_modes)
- and "color" in values
- ):
- # Deprecated color handling
- if values["color"] is None:
- self._attr_hs_color = None
- else:
- self._update_color(values)
-
- if not self._deprecated_color_handling and "color_mode" in values:
+ if color_supported(self.supported_color_modes) and "color_mode" in values:
self._update_color(values)
if brightness_supported(self.supported_color_modes):
@@ -484,35 +315,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
self.entity_id,
)
- if (
- self._deprecated_color_handling
- and self.supported_color_modes
- and ColorMode.COLOR_TEMP in self.supported_color_modes
- ):
- # Deprecated color handling
- try:
- if values["color_temp"] is None:
- self._attr_color_temp_kelvin = None
- else:
- self._attr_color_temp_kelvin = (
- values["color_temp"] # type: ignore[assignment]
- if self._color_temp_kelvin
- else color_util.color_temperature_mired_to_kelvin(
- values["color_temp"] # type: ignore[arg-type]
- )
- )
- except KeyError:
- pass
- except (TypeError, ValueError):
- _LOGGER.warning(
- "Invalid color temp value '%s' received for entity %s",
- values["color_temp"],
- self.entity_id,
- )
- # Allow to switch back to color_temp
- if "color" not in values:
- self._attr_hs_color = None
-
if self.supported_features and LightEntityFeature.EFFECT:
with suppress(KeyError):
self._attr_effect = cast(str, values["effect"])
@@ -565,19 +367,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
)
self._attr_xy_color = last_attributes.get(ATTR_XY_COLOR, self.xy_color)
- @property
- def color_mode(self) -> ColorMode | str | None:
- """Return current color mode."""
- if not self._deprecated_color_handling:
- return self._attr_color_mode
- if self._fixed_color_mode:
- # Legacy light with support for a single color mode
- return self._fixed_color_mode
- # Legacy light with support for ct + hs, prioritize hs
- if self.hs_color is not None:
- return ColorMode.HS
- return ColorMode.COLOR_TEMP
-
def _set_flash_and_transition(self, message: dict[str, Any], **kwargs: Any) -> None:
if ATTR_TRANSITION in kwargs:
message["transition"] = kwargs[ATTR_TRANSITION]
@@ -604,17 +393,15 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
def _supports_color_mode(self, color_mode: ColorMode | str) -> bool:
"""Return True if the light natively supports a color mode."""
return (
- not self._deprecated_color_handling
- and self.supported_color_modes is not None
+ self.supported_color_modes is not None
and color_mode in self.supported_color_modes
)
- async def async_turn_on(self, **kwargs: Any) -> None: # noqa: C901
+ async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on.
This method is a coroutine.
"""
- brightness: int
should_update = False
hs_color: tuple[float, float]
message: dict[str, Any] = {"state": "ON"}
@@ -623,39 +410,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
rgbcw: tuple[int, ...]
xy_color: tuple[float, float]
- if ATTR_HS_COLOR in kwargs and (
- self._config[CONF_HS] or self._config[CONF_RGB] or self._config[CONF_XY]
- ):
- # Legacy color handling
- hs_color = kwargs[ATTR_HS_COLOR]
- message["color"] = {}
- if self._config[CONF_RGB]:
- # If brightness is supported, we don't want to scale the
- # RGB values given using the brightness.
- if self._config[CONF_BRIGHTNESS]:
- brightness = 255
- else:
- # We pop the brightness, to omit it from the payload
- brightness = kwargs.pop(ATTR_BRIGHTNESS, 255)
- rgb = color_util.color_hsv_to_RGB(
- hs_color[0], hs_color[1], brightness / 255 * 100
- )
- message["color"]["r"] = rgb[0]
- message["color"]["g"] = rgb[1]
- message["color"]["b"] = rgb[2]
- if self._config[CONF_XY]:
- xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
- message["color"]["x"] = xy_color[0]
- message["color"]["y"] = xy_color[1]
- if self._config[CONF_HS]:
- message["color"]["h"] = hs_color[0]
- message["color"]["s"] = hs_color[1]
-
- if self._optimistic:
- self._attr_color_temp_kelvin = None
- self._attr_hs_color = kwargs[ATTR_HS_COLOR]
- should_update = True
-
if ATTR_HS_COLOR in kwargs and self._supports_color_mode(ColorMode.HS):
hs_color = kwargs[ATTR_HS_COLOR]
message["color"] = {"h": hs_color[0], "s": hs_color[1]}
diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json
index 25e98c01aaf..1cd6ae3e47c 100644
--- a/homeassistant/components/mqtt/manifest.json
+++ b/homeassistant/components/mqtt/manifest.json
@@ -8,6 +8,6 @@
"documentation": "https://www.home-assistant.io/integrations/mqtt",
"iot_class": "local_push",
"quality_scale": "platinum",
- "requirements": ["paho-mqtt==1.6.1"],
+ "requirements": ["paho-mqtt==2.1.0"],
"single_config_entry": true
}
diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json
index 3815b6adbd5..bf0bd594ea4 100644
--- a/homeassistant/components/mqtt/strings.json
+++ b/homeassistant/components/mqtt/strings.json
@@ -1,13 +1,5 @@
{
"issues": {
- "deprecated_color_handling": {
- "title": "Deprecated color handling used for MQTT light",
- "description": "An MQTT light config (with `json` schema) found in `configuration.yaml` uses deprecated color handling flags.\n\nConfiguration found:\n```yaml\n{config}\n```\nDeprecated flags: **{deprecated_flags}**.\n\nUse the `supported_color_modes` option instead and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
- },
- "deprecated_color_mode_flag": {
- "title": "Deprecated color_mode option flag used for MQTT light",
- "description": "An MQTT light config (with `json` schema) found in `configuration.yaml` uses a deprecated `color_mode` flag.\n\nConfiguration found:\n```yaml\n{config}\n```\n\nRemove the option from your config and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
- },
"invalid_platform_config": {
"title": "Invalid config found for mqtt {domain} item",
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
diff --git a/homeassistant/components/nexia/manifest.json b/homeassistant/components/nexia/manifest.json
index 0013cd63de1..6a439f869c9 100644
--- a/homeassistant/components/nexia/manifest.json
+++ b/homeassistant/components/nexia/manifest.json
@@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/nexia",
"iot_class": "cloud_polling",
"loggers": ["nexia"],
- "requirements": ["nexia==2.0.8"]
+ "requirements": ["nexia==2.0.9"]
}
diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py
index ae4e8986816..37396e69caa 100644
--- a/homeassistant/components/niko_home_control/__init__.py
+++ b/homeassistant/components/niko_home_control/__init__.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-from nclib.errors import NetcatError
from nhc.controller import NHCController
from homeassistant.config_entries import ConfigEntry
@@ -25,12 +24,8 @@ async def async_setup_entry(
controller = NHCController(entry.data[CONF_HOST])
try:
await controller.connect()
- except NetcatError as err:
+ except (TimeoutError, OSError) as err:
raise ConfigEntryNotReady("cannot connect to controller.") from err
- except OSError as err:
- raise ConfigEntryNotReady(
- "unknown error while connecting to controller."
- ) from err
entry.runtime_data = controller
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
diff --git a/homeassistant/components/niko_home_control/cover.py b/homeassistant/components/niko_home_control/cover.py
index 51e2a8a702d..b3546b517d5 100644
--- a/homeassistant/components/niko_home_control/cover.py
+++ b/homeassistant/components/niko_home_control/cover.py
@@ -37,17 +37,17 @@ class NikoHomeControlCover(NikoHomeControlEntity, CoverEntity):
)
_action: NHCCover
- def open_cover(self, **kwargs: Any) -> None:
+ async def async_open_cover(self, **kwargs: Any) -> None:
"""Open the cover."""
- self._action.open()
+ await self._action.open()
- def close_cover(self, **kwargs: Any) -> None:
+ async def async_close_cover(self, **kwargs: Any) -> None:
"""Close the cover."""
- self._action.close()
+ await self._action.close()
- def stop_cover(self, **kwargs: Any) -> None:
+ async def async_stop_cover(self, **kwargs: Any) -> None:
"""Stop the cover."""
- self._action.stop()
+ await self._action.stop()
def update_state(self):
"""Update HA state."""
diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py
index 5c2b372fd25..7c0d11b3388 100644
--- a/homeassistant/components/niko_home_control/light.py
+++ b/homeassistant/components/niko_home_control/light.py
@@ -109,13 +109,13 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
self._attr_brightness = round(action.state * 2.55)
- def turn_on(self, **kwargs: Any) -> None:
+ async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the light to turn on."""
- self._action.turn_on(round(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55))
+ await self._action.turn_on(round(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55))
- def turn_off(self, **kwargs: Any) -> None:
+ async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the light to turn off."""
- self._action.turn_off()
+ await self._action.turn_off()
def update_state(self) -> None:
"""Handle updates from the controller."""
diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json
index 57f83180eb0..b50410cd7de 100644
--- a/homeassistant/components/niko_home_control/manifest.json
+++ b/homeassistant/components/niko_home_control/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
"iot_class": "local_push",
"loggers": ["nikohomecontrol"],
- "requirements": ["nhc==0.3.9"]
+ "requirements": ["nhc==0.4.4"]
}
diff --git a/homeassistant/components/nut/manifest.json b/homeassistant/components/nut/manifest.json
index 9e968b5a349..fb6c8561b25 100644
--- a/homeassistant/components/nut/manifest.json
+++ b/homeassistant/components/nut/manifest.json
@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["aionut"],
- "requirements": ["aionut==4.3.3"],
+ "requirements": ["aionut==4.3.4"],
"zeroconf": ["_nut._tcp.local."]
}
diff --git a/homeassistant/components/omnilogic/coordinator.py b/homeassistant/components/omnilogic/coordinator.py
index 72d16f03328..24c8cdf2554 100644
--- a/homeassistant/components/omnilogic/coordinator.py
+++ b/homeassistant/components/omnilogic/coordinator.py
@@ -18,6 +18,8 @@ _LOGGER = logging.getLogger(__name__)
class OmniLogicUpdateCoordinator(DataUpdateCoordinator[dict[tuple, dict[str, Any]]]):
"""Class to manage fetching update data from single endpoint."""
+ config_entry: ConfigEntry
+
def __init__(
self,
hass: HomeAssistant,
@@ -28,11 +30,11 @@ class OmniLogicUpdateCoordinator(DataUpdateCoordinator[dict[tuple, dict[str, Any
) -> None:
"""Initialize the global Omnilogic data updater."""
self.api = api
- self.config_entry = config_entry
super().__init__(
hass=hass,
logger=_LOGGER,
+ config_entry=config_entry,
name=name,
update_interval=timedelta(seconds=polling_interval),
)
diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py
index edf0b615779..1e29860e3c5 100644
--- a/homeassistant/components/onboarding/views.py
+++ b/homeassistant/components/onboarding/views.py
@@ -378,7 +378,7 @@ class BackupInfoView(BackupOnboardingView):
backups, _ = await manager.async_get_backups()
return self.json(
{
- "backups": [backup.as_frontend_json() for backup in backups.values()],
+ "backups": list(backups.values()),
"state": manager.state,
"last_non_idle_event": manager.last_non_idle_event,
}
diff --git a/homeassistant/components/oncue/manifest.json b/homeassistant/components/oncue/manifest.json
index b4c425a1645..33d56f23669 100644
--- a/homeassistant/components/oncue/manifest.json
+++ b/homeassistant/components/oncue/manifest.json
@@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/oncue",
"iot_class": "cloud_polling",
"loggers": ["aiooncue"],
- "requirements": ["aiooncue==0.3.7"]
+ "requirements": ["aiooncue==0.3.9"]
}
diff --git a/homeassistant/components/onedrive/__init__.py b/homeassistant/components/onedrive/__init__.py
index 4ae5ac73560..5feefb2cf7d 100644
--- a/homeassistant/components/onedrive/__init__.py
+++ b/homeassistant/components/onedrive/__init__.py
@@ -2,37 +2,38 @@
from __future__ import annotations
+from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
+from typing import cast
-from kiota_abstractions.api_error import APIError
-from kiota_abstractions.authentication import BaseBearerTokenAuthenticationProvider
-from msgraph import GraphRequestAdapter, GraphServiceClient
-from msgraph.generated.drives.item.items.items_request_builder import (
- ItemsRequestBuilder,
+from onedrive_personal_sdk import OneDriveClient
+from onedrive_personal_sdk.exceptions import (
+ AuthenticationError,
+ HttpRequestException,
+ OneDriveException,
)
-from msgraph.generated.models.drive_item import DriveItem
-from msgraph.generated.models.folder import Folder
from homeassistant.config_entries import ConfigEntry
+from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
+from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
OAuth2Session,
async_get_config_entry_implementation,
)
-from homeassistant.helpers.httpx_client import create_async_httpx_client
from homeassistant.helpers.instance_id import async_get as async_get_instance_id
-from .api import OneDriveConfigEntryAccessTokenProvider
-from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN, OAUTH_SCOPES
+from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
@dataclass
class OneDriveRuntimeData:
"""Runtime data for the OneDrive integration."""
- items: ItemsRequestBuilder
+ client: OneDriveClient
+ token_function: Callable[[], Awaitable[str]]
backup_folder_id: str
@@ -47,29 +48,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
session = OAuth2Session(hass, entry, implementation)
- auth_provider = BaseBearerTokenAuthenticationProvider(
- access_token_provider=OneDriveConfigEntryAccessTokenProvider(session)
- )
- adapter = GraphRequestAdapter(
- auth_provider=auth_provider,
- client=create_async_httpx_client(hass, follow_redirects=True),
- )
+ async def get_access_token() -> str:
+ await session.async_ensure_token_valid()
+ return cast(str, session.token[CONF_ACCESS_TOKEN])
- graph_client = GraphServiceClient(
- request_adapter=adapter,
- scopes=OAUTH_SCOPES,
- )
- assert entry.unique_id
- drive_item = graph_client.drives.by_drive_id(entry.unique_id)
+ client = OneDriveClient(get_access_token, async_get_clientsession(hass))
# get approot, will be created automatically if it does not exist
try:
- approot = await drive_item.special.by_drive_item_id("approot").get()
- except APIError as err:
- if err.response_status_code == 403:
- raise ConfigEntryAuthFailed(
- translation_domain=DOMAIN, translation_key="authentication_failed"
- ) from err
+ approot = await client.get_approot()
+ except AuthenticationError as err:
+ raise ConfigEntryAuthFailed(
+ translation_domain=DOMAIN, translation_key="authentication_failed"
+ ) from err
+ except (HttpRequestException, OneDriveException, TimeoutError) as err:
_LOGGER.debug("Failed to get approot", exc_info=True)
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
@@ -77,24 +69,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
translation_placeholders={"folder": "approot"},
) from err
- if approot is None or not approot.id:
- _LOGGER.debug("Failed to get approot, was None")
+ instance_id = await async_get_instance_id(hass)
+ backup_folder_name = f"backups_{instance_id[:8]}"
+ try:
+ backup_folder = await client.create_folder(
+ parent_id=approot.id, name=backup_folder_name
+ )
+ except (HttpRequestException, OneDriveException, TimeoutError) as err:
+ _LOGGER.debug("Failed to create backup folder", exc_info=True)
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="failed_to_get_folder",
- translation_placeholders={"folder": "approot"},
- )
-
- instance_id = await async_get_instance_id(hass)
- backup_folder_id = await _async_create_folder_if_not_exists(
- items=drive_item.items,
- base_folder_id=approot.id,
- folder=f"backups_{instance_id[:8]}",
- )
+ translation_placeholders={"folder": backup_folder_name},
+ ) from err
entry.runtime_data = OneDriveRuntimeData(
- items=drive_item.items,
- backup_folder_id=backup_folder_id,
+ client=client,
+ token_function=get_access_token,
+ backup_folder_id=backup_folder.id,
)
_async_notify_backup_listeners_soon(hass)
@@ -116,54 +108,3 @@ def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
@callback
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
hass.loop.call_soon(_async_notify_backup_listeners, hass)
-
-
-async def _async_create_folder_if_not_exists(
- items: ItemsRequestBuilder,
- base_folder_id: str,
- folder: str,
-) -> str:
- """Check if a folder exists and create it if it does not exist."""
- folder_item: DriveItem | None = None
-
- try:
- folder_item = await items.by_drive_item_id(f"{base_folder_id}:/{folder}:").get()
- except APIError as err:
- if err.response_status_code != 404:
- _LOGGER.debug("Failed to get folder %s", folder, exc_info=True)
- raise ConfigEntryNotReady(
- translation_domain=DOMAIN,
- translation_key="failed_to_get_folder",
- translation_placeholders={"folder": folder},
- ) from err
- # is 404 not found, create folder
- _LOGGER.debug("Creating folder %s", folder)
- request_body = DriveItem(
- name=folder,
- folder=Folder(),
- additional_data={
- "@microsoft_graph_conflict_behavior": "fail",
- },
- )
- try:
- folder_item = await items.by_drive_item_id(base_folder_id).children.post(
- request_body
- )
- except APIError as create_err:
- _LOGGER.debug("Failed to create folder %s", folder, exc_info=True)
- raise ConfigEntryNotReady(
- translation_domain=DOMAIN,
- translation_key="failed_to_create_folder",
- translation_placeholders={"folder": folder},
- ) from create_err
- _LOGGER.debug("Created folder %s", folder)
- else:
- _LOGGER.debug("Found folder %s", folder)
- if folder_item is None or not folder_item.id:
- _LOGGER.debug("Failed to get folder %s, was None", folder)
- raise ConfigEntryNotReady(
- translation_domain=DOMAIN,
- translation_key="failed_to_get_folder",
- translation_placeholders={"folder": folder},
- )
- return folder_item.id
diff --git a/homeassistant/components/onedrive/api.py b/homeassistant/components/onedrive/api.py
deleted file mode 100644
index 934a4f74ec9..00000000000
--- a/homeassistant/components/onedrive/api.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""API for OneDrive bound to Home Assistant OAuth."""
-
-from typing import Any, cast
-
-from kiota_abstractions.authentication import AccessTokenProvider, AllowedHostsValidator
-
-from homeassistant.const import CONF_ACCESS_TOKEN
-from homeassistant.helpers import config_entry_oauth2_flow
-
-
-class OneDriveAccessTokenProvider(AccessTokenProvider):
- """Provide OneDrive authentication tied to an OAuth2 based config entry."""
-
- def __init__(self) -> None:
- """Initialize OneDrive auth."""
- super().__init__()
- # currently allowing all hosts
- self._allowed_hosts_validator = AllowedHostsValidator(allowed_hosts=[])
-
- def get_allowed_hosts_validator(self) -> AllowedHostsValidator:
- """Retrieve the allowed hosts validator."""
- return self._allowed_hosts_validator
-
-
-class OneDriveConfigFlowAccessTokenProvider(OneDriveAccessTokenProvider):
- """Provide OneDrive authentication tied to an OAuth2 based config entry."""
-
- def __init__(self, token: str) -> None:
- """Initialize OneDrive auth."""
- super().__init__()
- self._token = token
-
- async def get_authorization_token( # pylint: disable=dangerous-default-value
- self, uri: str, additional_authentication_context: dict[str, Any] = {}
- ) -> str:
- """Return a valid authorization token."""
- return self._token
-
-
-class OneDriveConfigEntryAccessTokenProvider(OneDriveAccessTokenProvider):
- """Provide OneDrive authentication tied to an OAuth2 based config entry."""
-
- def __init__(self, oauth_session: config_entry_oauth2_flow.OAuth2Session) -> None:
- """Initialize OneDrive auth."""
- super().__init__()
- self._oauth_session = oauth_session
-
- async def get_authorization_token( # pylint: disable=dangerous-default-value
- self, uri: str, additional_authentication_context: dict[str, Any] = {}
- ) -> str:
- """Return a valid authorization token."""
- await self._oauth_session.async_ensure_token_valid()
- return cast(str, self._oauth_session.token[CONF_ACCESS_TOKEN])
diff --git a/homeassistant/components/onedrive/backup.py b/homeassistant/components/onedrive/backup.py
index a7bac5d01fc..78bdcb24b8c 100644
--- a/homeassistant/components/onedrive/backup.py
+++ b/homeassistant/components/onedrive/backup.py
@@ -2,37 +2,22 @@
from __future__ import annotations
-import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
from functools import wraps
import html
import json
import logging
-from typing import Any, Concatenate, cast
+from typing import Any, Concatenate
-from httpx import Response, TimeoutException
-from kiota_abstractions.api_error import APIError
-from kiota_abstractions.authentication import AnonymousAuthenticationProvider
-from kiota_abstractions.headers_collection import HeadersCollection
-from kiota_abstractions.method import Method
-from kiota_abstractions.native_response_handler import NativeResponseHandler
-from kiota_abstractions.request_information import RequestInformation
-from kiota_http.middleware.options import ResponseHandlerOption
-from msgraph import GraphRequestAdapter
-from msgraph.generated.drives.item.items.item.content.content_request_builder import (
- ContentRequestBuilder,
+from aiohttp import ClientTimeout
+from onedrive_personal_sdk.clients.large_file_upload import LargeFileUploadClient
+from onedrive_personal_sdk.exceptions import (
+ AuthenticationError,
+ HashMismatchError,
+ OneDriveException,
)
-from msgraph.generated.drives.item.items.item.create_upload_session.create_upload_session_post_request_body import (
- CreateUploadSessionPostRequestBody,
-)
-from msgraph.generated.drives.item.items.item.drive_item_item_request_builder import (
- DriveItemItemRequestBuilder,
-)
-from msgraph.generated.models.drive_item import DriveItem
-from msgraph.generated.models.drive_item_uploadable_properties import (
- DriveItemUploadableProperties,
-)
-from msgraph_core.models import LargeFileUploadSession
+from onedrive_personal_sdk.models.items import File, Folder, ItemUpdate
+from onedrive_personal_sdk.models.upload import FileInfo
from homeassistant.components.backup import (
AgentBackup,
@@ -41,14 +26,14 @@ from homeassistant.components.backup import (
suggested_filename,
)
from homeassistant.core import HomeAssistant, callback
-from homeassistant.helpers.httpx_client import get_async_client
+from homeassistant.helpers.aiohttp_client import async_get_clientsession
from . import OneDriveConfigEntry
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
_LOGGER = logging.getLogger(__name__)
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
-MAX_RETRIES = 5
+TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
async def async_get_backup_agents(
@@ -92,18 +77,18 @@ def handle_backup_errors[_R, **P](
) -> _R:
try:
return await func(self, *args, **kwargs)
- except APIError as err:
- if err.response_status_code == 403:
- self._entry.async_start_reauth(self._hass)
+ except AuthenticationError as err:
+ self._entry.async_start_reauth(self._hass)
+ raise BackupAgentError("Authentication error") from err
+ except OneDriveException as err:
_LOGGER.error(
- "Error during backup in %s: Status %s, message %s",
+ "Error during backup in %s:, message %s",
func.__name__,
- err.response_status_code,
- err.message,
+ err,
)
_LOGGER.debug("Full error: %s", err, exc_info=True)
raise BackupAgentError("Backup operation failed") from err
- except TimeoutException as err:
+ except TimeoutError as err:
_LOGGER.error(
"Error during backup in %s: Timeout",
func.__name__,
@@ -123,7 +108,8 @@ class OneDriveBackupAgent(BackupAgent):
super().__init__()
self._hass = hass
self._entry = entry
- self._items = entry.runtime_data.items
+ self._client = entry.runtime_data.client
+ self._token_function = entry.runtime_data.token_function
self._folder_id = entry.runtime_data.backup_folder_id
self.name = entry.title
assert entry.unique_id
@@ -134,24 +120,12 @@ class OneDriveBackupAgent(BackupAgent):
self, backup_id: str, **kwargs: Any
) -> AsyncIterator[bytes]:
"""Download a backup file."""
- # this forces the query to return a raw httpx response, but breaks typing
- backup = await self._find_item_by_backup_id(backup_id)
- if backup is None or backup.id is None:
+ item = await self._find_item_by_backup_id(backup_id)
+ if item is None:
raise BackupAgentError("Backup not found")
- request_config = (
- ContentRequestBuilder.ContentRequestBuilderGetRequestConfiguration(
- options=[ResponseHandlerOption(NativeResponseHandler())],
- )
- )
- response = cast(
- Response,
- await self._items.by_drive_item_id(backup.id).content.get(
- request_configuration=request_config
- ),
- )
-
- return response.aiter_bytes(chunk_size=1024)
+ stream = await self._client.download_drive_item(item.id, timeout=TIMEOUT)
+ return stream.iter_chunked(1024)
@handle_backup_errors
async def async_upload_backup(
@@ -163,27 +137,20 @@ class OneDriveBackupAgent(BackupAgent):
) -> None:
"""Upload a backup."""
- # upload file in chunks to support large files
- upload_session_request_body = CreateUploadSessionPostRequestBody(
- item=DriveItemUploadableProperties(
- additional_data={
- "@microsoft.graph.conflictBehavior": "fail",
- },
+ file = FileInfo(
+ suggested_filename(backup),
+ backup.size,
+ self._folder_id,
+ await open_stream(),
+ )
+ try:
+ item = await LargeFileUploadClient.upload(
+ self._token_function, file, session=async_get_clientsession(self._hass)
)
- )
- file_item = self._get_backup_file_item(suggested_filename(backup))
- upload_session = await file_item.create_upload_session.post(
- upload_session_request_body
- )
-
- if upload_session is None or upload_session.upload_url is None:
+ except HashMismatchError as err:
raise BackupAgentError(
- translation_domain=DOMAIN, translation_key="backup_no_upload_session"
- )
-
- await self._upload_file(
- upload_session.upload_url, await open_stream(), backup.size
- )
+ "Hash validation failed, backup file might be corrupt"
+ ) from err
# store metadata in description
backup_dict = backup.as_dict()
@@ -191,7 +158,10 @@ class OneDriveBackupAgent(BackupAgent):
description = json.dumps(backup_dict)
_LOGGER.debug("Creating metadata: %s", description)
- await file_item.patch(DriveItem(description=description))
+ await self._client.update_drive_item(
+ path_or_id=item.id,
+ data=ItemUpdate(description=description),
+ )
@handle_backup_errors
async def async_delete_backup(
@@ -200,35 +170,31 @@ class OneDriveBackupAgent(BackupAgent):
**kwargs: Any,
) -> None:
"""Delete a backup file."""
- backup = await self._find_item_by_backup_id(backup_id)
- if backup is None or backup.id is None:
+ item = await self._find_item_by_backup_id(backup_id)
+ if item is None:
return
- await self._items.by_drive_item_id(backup.id).delete()
+ await self._client.delete_drive_item(item.id)
@handle_backup_errors
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
- backups: list[AgentBackup] = []
- items = await self._items.by_drive_item_id(f"{self._folder_id}").children.get()
- if items and (values := items.value):
- for item in values:
- if (description := item.description) is None:
- continue
- if "homeassistant_version" in description:
- backups.append(self._backup_from_description(description))
- return backups
+ return [
+ self._backup_from_description(item.description)
+ for item in await self._client.list_drive_items(self._folder_id)
+ if item.description and "homeassistant_version" in item.description
+ ]
@handle_backup_errors
async def async_get_backup(
self, backup_id: str, **kwargs: Any
) -> AgentBackup | None:
"""Return a backup."""
- backup = await self._find_item_by_backup_id(backup_id)
- if backup is None:
- return None
-
- assert backup.description # already checked in _find_item_by_backup_id
- return self._backup_from_description(backup.description)
+ item = await self._find_item_by_backup_id(backup_id)
+ return (
+ self._backup_from_description(item.description)
+ if item and item.description
+ else None
+ )
def _backup_from_description(self, description: str) -> AgentBackup:
"""Create a backup object from a description."""
@@ -237,91 +203,13 @@ class OneDriveBackupAgent(BackupAgent):
) # OneDrive encodes the description on save automatically
return AgentBackup.from_dict(json.loads(description))
- async def _find_item_by_backup_id(self, backup_id: str) -> DriveItem | None:
- """Find a backup item by its backup ID."""
-
- items = await self._items.by_drive_item_id(f"{self._folder_id}").children.get()
- if items and (values := items.value):
- for item in values:
- if (description := item.description) is None:
- continue
- if backup_id in description:
- return item
- return None
-
- def _get_backup_file_item(self, backup_id: str) -> DriveItemItemRequestBuilder:
- return self._items.by_drive_item_id(f"{self._folder_id}:/{backup_id}:")
-
- async def _upload_file(
- self, upload_url: str, stream: AsyncIterator[bytes], total_size: int
- ) -> None:
- """Use custom large file upload; SDK does not support stream."""
-
- adapter = GraphRequestAdapter(
- auth_provider=AnonymousAuthenticationProvider(),
- client=get_async_client(self._hass),
+ async def _find_item_by_backup_id(self, backup_id: str) -> File | Folder | None:
+ """Find an item by backup ID."""
+ return next(
+ (
+ item
+ for item in await self._client.list_drive_items(self._folder_id)
+ if item.description and backup_id in item.description
+ ),
+ None,
)
-
- async def async_upload(
- start: int, end: int, chunk_data: bytes
- ) -> LargeFileUploadSession:
- info = RequestInformation()
- info.url = upload_url
- info.http_method = Method.PUT
- info.headers = HeadersCollection()
- info.headers.try_add("Content-Range", f"bytes {start}-{end}/{total_size}")
- info.headers.try_add("Content-Length", str(len(chunk_data)))
- info.headers.try_add("Content-Type", "application/octet-stream")
- _LOGGER.debug(info.headers.get_all())
- info.set_stream_content(chunk_data)
- result = await adapter.send_async(info, LargeFileUploadSession, {})
- _LOGGER.debug("Next expected range: %s", result.next_expected_ranges)
- return result
-
- start = 0
- buffer: list[bytes] = []
- buffer_size = 0
- retries = 0
-
- async for chunk in stream:
- buffer.append(chunk)
- buffer_size += len(chunk)
- if buffer_size >= UPLOAD_CHUNK_SIZE:
- chunk_data = b"".join(buffer)
- uploaded_chunks = 0
- while (
- buffer_size > UPLOAD_CHUNK_SIZE
- ): # Loop in case the buffer is >= UPLOAD_CHUNK_SIZE * 2
- slice_start = uploaded_chunks * UPLOAD_CHUNK_SIZE
- try:
- await async_upload(
- start,
- start + UPLOAD_CHUNK_SIZE - 1,
- chunk_data[slice_start : slice_start + UPLOAD_CHUNK_SIZE],
- )
- except APIError as err:
- if (
- err.response_status_code and err.response_status_code < 500
- ): # no retry on 4xx errors
- raise
- if retries < MAX_RETRIES:
- await asyncio.sleep(2**retries)
- retries += 1
- continue
- raise
- except TimeoutException:
- if retries < MAX_RETRIES:
- retries += 1
- continue
- raise
- retries = 0
- start += UPLOAD_CHUNK_SIZE
- uploaded_chunks += 1
- buffer_size -= UPLOAD_CHUNK_SIZE
- buffer = [chunk_data[UPLOAD_CHUNK_SIZE * uploaded_chunks :]]
-
- # upload the remaining bytes
- if buffer:
- _LOGGER.debug("Last chunk")
- chunk_data = b"".join(buffer)
- await async_upload(start, start + len(chunk_data) - 1, chunk_data)
diff --git a/homeassistant/components/onedrive/config_flow.py b/homeassistant/components/onedrive/config_flow.py
index 09c0d1b44cc..900db0177d9 100644
--- a/homeassistant/components/onedrive/config_flow.py
+++ b/homeassistant/components/onedrive/config_flow.py
@@ -4,18 +4,14 @@ from collections.abc import Mapping
import logging
from typing import Any, cast
-from kiota_abstractions.api_error import APIError
-from kiota_abstractions.authentication import BaseBearerTokenAuthenticationProvider
-from kiota_abstractions.method import Method
-from kiota_abstractions.request_information import RequestInformation
-from msgraph import GraphRequestAdapter, GraphServiceClient
+from onedrive_personal_sdk.clients.client import OneDriveClient
+from onedrive_personal_sdk.exceptions import OneDriveException
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
+from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
-from homeassistant.helpers.httpx_client import get_async_client
-from .api import OneDriveConfigFlowAccessTokenProvider
from .const import DOMAIN, OAUTH_SCOPES
@@ -39,48 +35,24 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
data: dict[str, Any],
) -> ConfigFlowResult:
"""Handle the initial step."""
- auth_provider = BaseBearerTokenAuthenticationProvider(
- access_token_provider=OneDriveConfigFlowAccessTokenProvider(
- cast(str, data[CONF_TOKEN][CONF_ACCESS_TOKEN])
- )
- )
- adapter = GraphRequestAdapter(
- auth_provider=auth_provider,
- client=get_async_client(self.hass),
+
+ async def get_access_token() -> str:
+ return cast(str, data[CONF_TOKEN][CONF_ACCESS_TOKEN])
+
+ graph_client = OneDriveClient(
+ get_access_token, async_get_clientsession(self.hass)
)
- graph_client = GraphServiceClient(
- request_adapter=adapter,
- scopes=OAUTH_SCOPES,
- )
-
- # need to get adapter from client, as client changes it
- request_adapter = cast(GraphRequestAdapter, graph_client.request_adapter)
-
- request_info = RequestInformation(
- method=Method.GET,
- url_template="{+baseurl}/me/drive/special/approot",
- path_parameters={},
- )
- parent_span = request_adapter.start_tracing_span(request_info, "get_approot")
-
- # get the OneDrive id
- # use low level methods, to avoid files.read permissions
- # which would be required by drives.me.get()
try:
- response = await request_adapter.get_http_response_message(
- request_info=request_info, parent_span=parent_span
- )
- except APIError:
+ approot = await graph_client.get_approot()
+ except OneDriveException:
self.logger.exception("Failed to connect to OneDrive")
return self.async_abort(reason="connection_error")
except Exception:
self.logger.exception("Unknown error")
return self.async_abort(reason="unknown")
- drive: dict = response.json()
-
- await self.async_set_unique_id(drive["parentReference"]["driveId"])
+ await self.async_set_unique_id(approot.parent_reference.drive_id)
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
@@ -94,10 +66,11 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
self._abort_if_unique_id_configured()
- user = drive.get("createdBy", {}).get("user", {}).get("displayName")
-
- title = f"{user}'s OneDrive" if user else "OneDrive"
-
+ title = (
+ f"{approot.created_by.user.display_name}'s OneDrive"
+ if approot.created_by.user and approot.created_by.user.display_name
+ else "OneDrive"
+ )
return self.async_create_entry(title=title, data=data)
async def async_step_reauth(
diff --git a/homeassistant/components/onedrive/manifest.json b/homeassistant/components/onedrive/manifest.json
index 056e31864a4..88d51e6d73a 100644
--- a/homeassistant/components/onedrive/manifest.json
+++ b/homeassistant/components/onedrive/manifest.json
@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/onedrive",
"integration_type": "service",
"iot_class": "cloud_polling",
- "loggers": ["msgraph", "msgraph-core", "kiota"],
+ "loggers": ["onedrive_personal_sdk"],
"quality_scale": "bronze",
- "requirements": ["msgraph-sdk==1.16.0"]
+ "requirements": ["onedrive-personal-sdk==0.0.8"]
}
diff --git a/homeassistant/components/onedrive/strings.json b/homeassistant/components/onedrive/strings.json
index 9cbdb2bdeae..7686e83e2a5 100644
--- a/homeassistant/components/onedrive/strings.json
+++ b/homeassistant/components/onedrive/strings.json
@@ -23,31 +23,18 @@
"connection_error": "Failed to connect to OneDrive.",
"wrong_drive": "New account does not contain previously configured OneDrive.",
"unknown": "[%key:common::config_flow::error::unknown%]",
- "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
- "failed_to_create_folder": "Failed to create backup folder"
+ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
}
},
"exceptions": {
- "backup_not_found": {
- "message": "Backup not found"
- },
- "backup_no_content": {
- "message": "Backup has no content"
- },
- "backup_no_upload_session": {
- "message": "Failed to start backup upload"
- },
"authentication_failed": {
"message": "Authentication failed"
},
"failed_to_get_folder": {
"message": "Failed to get {folder} folder"
- },
- "failed_to_create_folder": {
- "message": "Failed to create {folder} folder"
}
}
}
diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py
index aced98eaa97..73dafa1c48d 100644
--- a/homeassistant/components/openai_conversation/conversation.py
+++ b/homeassistant/components/openai_conversation/conversation.py
@@ -70,7 +70,9 @@ def _format_tool(
return ChatCompletionToolParam(type="function", function=tool_spec)
-def _message_convert(message: ChatCompletionMessage) -> ChatCompletionMessageParam:
+def _convert_message_to_param(
+ message: ChatCompletionMessage,
+) -> ChatCompletionMessageParam:
"""Convert from class to TypedDict."""
tool_calls: list[ChatCompletionMessageToolCallParam] = []
if message.tool_calls:
@@ -94,20 +96,42 @@ def _message_convert(message: ChatCompletionMessage) -> ChatCompletionMessagePar
return param
-def _chat_message_convert(
- message: conversation.Content
- | conversation.NativeContent[ChatCompletionMessageParam],
+def _convert_content_to_param(
+ content: conversation.Content,
) -> ChatCompletionMessageParam:
"""Convert any native chat message for this agent to the native format."""
- role = message.role
- if role == "native":
- # mypy doesn't understand that checking role ensures content type
- return message.content # type: ignore[return-value]
- if role == "system":
- role = "developer"
- return cast(
- ChatCompletionMessageParam,
- {"role": role, "content": message.content},
+ if content.role == "tool_result":
+ assert type(content) is conversation.ToolResultContent
+ return ChatCompletionToolMessageParam(
+ role="tool",
+ tool_call_id=content.tool_call_id,
+ content=json.dumps(content.tool_result),
+ )
+ if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr]
+ role = content.role
+ if role == "system":
+ role = "developer"
+ return cast(
+ ChatCompletionMessageParam,
+ {"role": content.role, "content": content.content}, # type: ignore[union-attr]
+ )
+
+ # Handle the Assistant content including tool calls.
+ assert type(content) is conversation.AssistantContent
+ return ChatCompletionAssistantMessageParam(
+ role="assistant",
+ content=content.content,
+ tool_calls=[
+ ChatCompletionMessageToolCallParam(
+ id=tool_call.id,
+ function=Function(
+ arguments=json.dumps(tool_call.tool_args),
+ name=tool_call.tool_name,
+ ),
+ type="function",
+ )
+ for tool_call in content.tool_calls
+ ],
)
@@ -171,14 +195,14 @@ class OpenAIConversationEntity(
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
- session: conversation.ChatLog[ChatCompletionMessageParam],
+ chat_log: conversation.ChatLog,
) -> conversation.ConversationResult:
"""Call the API."""
assert user_input.agent_id
options = self.entry.options
try:
- await session.async_update_llm_data(
+ await chat_log.async_update_llm_data(
DOMAIN,
user_input,
options.get(CONF_LLM_HASS_API),
@@ -188,17 +212,14 @@ class OpenAIConversationEntity(
return err.as_conversation_result()
tools: list[ChatCompletionToolParam] | None = None
- if session.llm_api:
+ if chat_log.llm_api:
tools = [
- _format_tool(tool, session.llm_api.custom_serializer)
- for tool in session.llm_api.tools
+ _format_tool(tool, chat_log.llm_api.custom_serializer)
+ for tool in chat_log.llm_api.tools
]
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
-
- messages = [
- _chat_message_convert(message) for message in session.async_get_messages()
- ]
+ messages = [_convert_content_to_param(content) for content in chat_log.content]
client = self.entry.runtime_data
@@ -213,7 +234,7 @@ class OpenAIConversationEntity(
),
"top_p": options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
"temperature": options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
- "user": session.conversation_id,
+ "user": chat_log.conversation_id,
}
if model.startswith("o"):
@@ -229,43 +250,39 @@ class OpenAIConversationEntity(
LOGGER.debug("Response %s", result)
response = result.choices[0].message
- messages.append(_message_convert(response))
+ messages.append(_convert_message_to_param(response))
- session.async_add_message(
- conversation.Content(
- role=response.role,
- agent_id=user_input.agent_id,
- content=response.content or "",
- ),
+ tool_calls: list[llm.ToolInput] | None = None
+ if response.tool_calls:
+ tool_calls = [
+ llm.ToolInput(
+ id=tool_call.id,
+ tool_name=tool_call.function.name,
+ tool_args=json.loads(tool_call.function.arguments),
+ )
+ for tool_call in response.tool_calls
+ ]
+
+ messages.extend(
+ [
+ _convert_content_to_param(tool_response)
+ async for tool_response in chat_log.async_add_assistant_content(
+ conversation.AssistantContent(
+ agent_id=user_input.agent_id,
+ content=response.content or "",
+ tool_calls=tool_calls,
+ )
+ )
+ ]
)
- if not response.tool_calls or not session.llm_api:
+ if not tool_calls:
break
- for tool_call in response.tool_calls:
- tool_input = llm.ToolInput(
- tool_name=tool_call.function.name,
- tool_args=json.loads(tool_call.function.arguments),
- )
- tool_response = await session.async_call_tool(tool_input)
- messages.append(
- ChatCompletionToolMessageParam(
- role="tool",
- tool_call_id=tool_call.id,
- content=json.dumps(tool_response),
- )
- )
- session.async_add_message(
- conversation.NativeContent(
- agent_id=user_input.agent_id,
- content=messages[-1],
- )
- )
-
intent_response = intent.IntentResponse(language=user_input.language)
intent_response.async_set_speech(response.content or "")
return conversation.ConversationResult(
- response=intent_response, conversation_id=session.conversation_id
+ response=intent_response, conversation_id=chat_log.conversation_id
)
async def _async_entry_update_listener(
diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json
index eda39821d5c..c25accd87f3 100644
--- a/homeassistant/components/overkiz/manifest.json
+++ b/homeassistant/components/overkiz/manifest.json
@@ -13,7 +13,7 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
- "requirements": ["pyoverkiz==1.15.5"],
+ "requirements": ["pyoverkiz==1.16.0"],
"zeroconf": [
{
"type": "_kizbox._tcp.local.",
diff --git a/homeassistant/components/ovo_energy/strings.json b/homeassistant/components/ovo_energy/strings.json
index 3dc11e3a601..9d8e449e1d1 100644
--- a/homeassistant/components/ovo_energy/strings.json
+++ b/homeassistant/components/ovo_energy/strings.json
@@ -16,10 +16,10 @@
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
- "account": "OVO account id (only add if you have multiple accounts)"
+ "account": "OVO account ID (only add if you have multiple accounts)"
},
"description": "Set up an OVO Energy instance to access your energy usage.",
- "title": "Add OVO Energy Account"
+ "title": "Add OVO Energy account"
},
"reauth_confirm": {
"data": {
diff --git a/homeassistant/components/picnic/coordinator.py b/homeassistant/components/picnic/coordinator.py
index b3979580990..de686cad37d 100644
--- a/homeassistant/components/picnic/coordinator.py
+++ b/homeassistant/components/picnic/coordinator.py
@@ -21,6 +21,8 @@ from .const import ADDRESS, CART_DATA, LAST_ORDER_DATA, NEXT_DELIVERY_DATA, SLOT
class PicnicUpdateCoordinator(DataUpdateCoordinator):
"""The coordinator to fetch data from the Picnic API at a set interval."""
+ config_entry: ConfigEntry
+
def __init__(
self,
hass: HomeAssistant,
@@ -29,13 +31,13 @@ class PicnicUpdateCoordinator(DataUpdateCoordinator):
) -> None:
"""Initialize the coordinator with the given Picnic API client."""
self.picnic_api_client = picnic_api_client
- self.config_entry = config_entry
self._user_address = None
logger = logging.getLogger(__name__)
super().__init__(
hass,
logger,
+ config_entry=config_entry,
name="Picnic coordinator",
update_interval=timedelta(minutes=30),
)
diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json
index 90518c81483..445affbcd57 100644
--- a/homeassistant/components/private_ble_device/manifest.json
+++ b/homeassistant/components/private_ble_device/manifest.json
@@ -6,5 +6,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
"iot_class": "local_push",
- "requirements": ["bluetooth-data-tools==1.23.3"]
+ "requirements": ["bluetooth-data-tools==1.23.4"]
}
diff --git a/homeassistant/components/rainmachine/strings.json b/homeassistant/components/rainmachine/strings.json
index a564d33e777..aad61458e88 100644
--- a/homeassistant/components/rainmachine/strings.json
+++ b/homeassistant/components/rainmachine/strings.json
@@ -5,7 +5,7 @@
"user": {
"title": "Fill in your information",
"data": {
- "ip_address": "Hostname or IP Address",
+ "ip_address": "Hostname or IP address",
"password": "[%key:common::config_flow::data::password%]",
"port": "[%key:common::config_flow::data::port%]"
}
@@ -157,7 +157,7 @@
},
"unpause_watering": {
"name": "Unpause all watering",
- "description": "Unpauses all paused watering activities.",
+ "description": "Resumes all paused watering activities.",
"fields": {
"device_id": {
"name": "[%key:component::rainmachine::services::pause_watering::fields::device_id::name%]",
@@ -167,7 +167,7 @@
},
"push_flow_meter_data": {
"name": "Push flow meter data",
- "description": "Push flow meter data to the RainMachine device.",
+ "description": "Sends flow meter data from Home Assistant to the RainMachine device.",
"fields": {
"device_id": {
"name": "[%key:component::rainmachine::services::pause_watering::fields::device_id::name%]",
@@ -185,7 +185,7 @@
},
"push_weather_data": {
"name": "Push weather data",
- "description": "Push weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integraion.\nSee details of RainMachine API Here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
+ "description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integraion.\nSee details of RainMachine API here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
"fields": {
"device_id": {
"name": "[%key:component::rainmachine::services::pause_watering::fields::device_id::name%]",
@@ -193,7 +193,7 @@
},
"timestamp": {
"name": "Timestamp",
- "description": "UNIX Timestamp for the weather data. If omitted, the RainMachine device's local time at the time of the call is used."
+ "description": "UNIX timestamp for the weather data. If omitted, the RainMachine device's local time at the time of the call is used."
},
"mintemp": {
"name": "Min temp",
@@ -251,7 +251,7 @@
},
"unrestrict_watering": {
"name": "Unrestrict all watering",
- "description": "Unrestrict all watering activities.",
+ "description": "Removes all watering restrictions.",
"fields": {
"device_id": {
"name": "[%key:component::rainmachine::services::pause_watering::fields::device_id::name%]",
diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json
index 83729fef3cd..fb3c096ee41 100644
--- a/homeassistant/components/reolink/manifest.json
+++ b/homeassistant/components/reolink/manifest.json
@@ -19,5 +19,5 @@
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"quality_scale": "platinum",
- "requirements": ["reolink-aio==0.11.8"]
+ "requirements": ["reolink-aio==0.11.9"]
}
diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py
index e4b52c85d45..d8fabfaa3b8 100644
--- a/homeassistant/components/reolink/number.py
+++ b/homeassistant/components/reolink/number.py
@@ -424,6 +424,7 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="image_brightness",
cmd_key="GetImage",
+ cmd_id=26,
translation_key="image_brightness",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
@@ -437,6 +438,7 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="image_contrast",
cmd_key="GetImage",
+ cmd_id=26,
translation_key="image_contrast",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
@@ -450,6 +452,7 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="image_saturation",
cmd_key="GetImage",
+ cmd_id=26,
translation_key="image_saturation",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
@@ -463,6 +466,7 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="image_sharpness",
cmd_key="GetImage",
+ cmd_id=26,
translation_key="image_sharpness",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
@@ -476,6 +480,7 @@ NUMBER_ENTITIES = (
ReolinkNumberEntityDescription(
key="image_hue",
cmd_key="GetImage",
+ cmd_id=26,
translation_key="image_hue",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py
index 7a74be2e28c..df8c0269957 100644
--- a/homeassistant/components/reolink/select.py
+++ b/homeassistant/components/reolink/select.py
@@ -80,6 +80,7 @@ SELECT_ENTITIES = (
ReolinkSelectEntityDescription(
key="day_night_mode",
cmd_key="GetIsp",
+ cmd_id=26,
translation_key="day_night_mode",
entity_category=EntityCategory.CONFIG,
get_options=[mode.name for mode in DayNightEnum],
diff --git a/homeassistant/components/rflink/light.py b/homeassistant/components/rflink/light.py
index 2a5b1ccf8d7..af8d2c76844 100644
--- a/homeassistant/components/rflink/light.py
+++ b/homeassistant/components/rflink/light.py
@@ -101,7 +101,7 @@ def entity_class_for_type(entity_type):
entity_device_mapping = {
# sends only 'dim' commands not compatible with on/off switches
TYPE_DIMMABLE: DimmableRflinkLight,
- # sends only 'on/off' commands not advices with dimmers and signal
+ # sends only 'on/off' commands not advised with dimmers and signal
# repetition
TYPE_SWITCHABLE: RflinkLight,
# sends 'dim' and 'on' command to support both dimmers and on/off
diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py
index c1a4e67ffd4..e0ae2b52fa0 100644
--- a/homeassistant/components/ring/camera.py
+++ b/homeassistant/components/ring/camera.py
@@ -31,6 +31,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import dt as dt_util
from . import RingConfigEntry
+from .const import DOMAIN
from .coordinator import RingDataCoordinator
from .entity import RingDeviceT, RingEntity, exception_wrap
@@ -218,8 +219,13 @@ class RingCam(RingEntity[RingDoorBell], Camera):
) -> None:
"""Handle a WebRTC candidate."""
if candidate.sdp_m_line_index is None:
- msg = "The sdp_m_line_index is required for ring webrtc streaming"
- raise HomeAssistantError(msg)
+ raise HomeAssistantError(
+ translation_domain=DOMAIN,
+ translation_key="sdp_m_line_index_required",
+ translation_placeholders={
+ "device": self._device.name,
+ },
+ )
await self._device.on_webrtc_candidate(
session_id, candidate.candidate, candidate.sdp_m_line_index
)
diff --git a/homeassistant/components/ring/coordinator.py b/homeassistant/components/ring/coordinator.py
index f35a6e10b9f..413c48c35eb 100644
--- a/homeassistant/components/ring/coordinator.py
+++ b/homeassistant/components/ring/coordinator.py
@@ -27,7 +27,7 @@ from homeassistant.helpers.update_coordinator import (
UpdateFailed,
)
-from .const import SCAN_INTERVAL
+from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
@@ -45,26 +45,6 @@ class RingData:
type RingConfigEntry = ConfigEntry[RingData]
-async def _call_api[*_Ts, _R](
- hass: HomeAssistant,
- target: Callable[[*_Ts], Coroutine[Any, Any, _R]],
- *args: *_Ts,
- msg_suffix: str = "",
-) -> _R:
- try:
- return await target(*args)
- except AuthenticationError as err:
- # Raising ConfigEntryAuthFailed will cancel future updates
- # and start a config flow with SOURCE_REAUTH (async_step_reauth)
- raise ConfigEntryAuthFailed from err
- except RingTimeout as err:
- raise UpdateFailed(
- f"Timeout communicating with API{msg_suffix}: {err}"
- ) from err
- except RingError as err:
- raise UpdateFailed(f"Error communicating with API{msg_suffix}: {err}") from err
-
-
class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
"""Base class for device coordinators."""
@@ -87,12 +67,37 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
self.ring_api: Ring = ring_api
self.first_call: bool = True
+ async def _call_api[*_Ts, _R](
+ self,
+ target: Callable[[*_Ts], Coroutine[Any, Any, _R]],
+ *args: *_Ts,
+ ) -> _R:
+ try:
+ return await target(*args)
+ except AuthenticationError as err:
+ # Raising ConfigEntryAuthFailed will cancel future updates
+ # and start a config flow with SOURCE_REAUTH (async_step_reauth)
+ raise ConfigEntryAuthFailed(
+ translation_domain=DOMAIN,
+ translation_key="api_authentication",
+ ) from err
+ except RingTimeout as err:
+ raise UpdateFailed(
+ translation_domain=DOMAIN,
+ translation_key="api_timeout",
+ ) from err
+ except RingError as err:
+ raise UpdateFailed(
+ translation_domain=DOMAIN,
+ translation_key="api_error",
+ ) from err
+
async def _async_update_data(self) -> RingDevices:
"""Fetch data from API endpoint."""
update_method: str = (
"async_update_data" if self.first_call else "async_update_devices"
)
- await _call_api(self.hass, getattr(self.ring_api, update_method))
+ await self._call_api(getattr(self.ring_api, update_method))
self.first_call = False
devices: RingDevices = self.ring_api.devices()
subscribed_device_ids = set(self.async_contexts())
@@ -104,18 +109,14 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
async with TaskGroup() as tg:
if device.has_capability("history"):
tg.create_task(
- _call_api(
- self.hass,
+ self._call_api(
lambda device: device.async_history(limit=10),
device,
- msg_suffix=f" for device {device.name}", # device_id is the mac
)
)
tg.create_task(
- _call_api(
- self.hass,
+ self._call_api(
device.async_update_health_data,
- msg_suffix=f" for device {device.name}",
)
)
except ExceptionGroup as eg:
diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py
index d48cc35a4f5..5d77bf3a285 100644
--- a/homeassistant/components/ring/entity.py
+++ b/homeassistant/components/ring/entity.py
@@ -2,6 +2,7 @@
from collections.abc import Awaitable, Callable, Coroutine
from dataclasses import dataclass
+import logging
from typing import Any, Concatenate, Generic, TypeVar, cast
from ring_doorbell import (
@@ -36,6 +37,8 @@ _RingCoordinatorT = TypeVar(
bound=(RingDataCoordinator | RingListenCoordinator),
)
+_LOGGER = logging.getLogger(__name__)
+
@dataclass(slots=True)
class DeprecatedInfo:
@@ -62,14 +65,22 @@ def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R](
return await async_func(self, *args, **kwargs)
except AuthenticationError as err:
self.coordinator.config_entry.async_start_reauth(self.hass)
- raise HomeAssistantError(err) from err
+ raise HomeAssistantError(
+ translation_domain=DOMAIN,
+ translation_key="api_authentication",
+ ) from err
except RingTimeout as err:
raise HomeAssistantError(
- f"Timeout communicating with API {async_func}: {err}"
+ translation_domain=DOMAIN,
+ translation_key="api_timeout",
) from err
except RingError as err:
+ _LOGGER.debug(
+ "Error calling %s in platform %s: ", async_func.__name__, self.platform
+ )
raise HomeAssistantError(
- f"Error communicating with API{async_func}: {err}"
+ translation_domain=DOMAIN,
+ translation_key="api_error",
) from err
return _wrap
diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json
index 219463d92d9..2d7e0b17da1 100644
--- a/homeassistant/components/ring/strings.json
+++ b/homeassistant/components/ring/strings.json
@@ -141,6 +141,20 @@
}
}
},
+ "exceptions": {
+ "api_authentication": {
+ "message": "Authentication error communicating with Ring API"
+ },
+ "api_timeout": {
+ "message": "Timeout communicating with Ring API"
+ },
+ "api_error": {
+ "message": "Error communicating with Ring API"
+ },
+ "sdp_m_line_index_required": {
+ "message": "Error negotiating stream for {device}"
+ }
+ },
"issues": {
"deprecated_entity": {
"title": "Detected deprecated {platform} entity usage",
diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json
index 76d7ab98a34..db2654d4baa 100644
--- a/homeassistant/components/roborock/manifest.json
+++ b/homeassistant/components/roborock/manifest.json
@@ -7,7 +7,7 @@
"iot_class": "local_polling",
"loggers": ["roborock"],
"requirements": [
- "python-roborock==2.9.7",
+ "python-roborock==2.11.1",
"vacuum-map-parser-roborock==0.1.2"
]
}
diff --git a/homeassistant/components/roomba/manifest.json b/homeassistant/components/roomba/manifest.json
index edb317f9752..dbfd803f89b 100644
--- a/homeassistant/components/roomba/manifest.json
+++ b/homeassistant/components/roomba/manifest.json
@@ -24,7 +24,7 @@
"documentation": "https://www.home-assistant.io/integrations/roomba",
"iot_class": "local_push",
"loggers": ["paho_mqtt", "roombapy"],
- "requirements": ["roombapy==1.8.1"],
+ "requirements": ["roombapy==1.9.0"],
"zeroconf": [
{
"type": "_amzn-alexa._tcp.local.",
diff --git a/homeassistant/components/rympro/__init__.py b/homeassistant/components/rympro/__init__.py
index f24735f4ed0..20d208cca69 100644
--- a/homeassistant/components/rympro/__init__.py
+++ b/homeassistant/components/rympro/__init__.py
@@ -38,7 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
data={**data, CONF_TOKEN: token},
)
- coordinator = RymProDataUpdateCoordinator(hass, rympro)
+ coordinator = RymProDataUpdateCoordinator(hass, entry, rympro)
await coordinator.async_config_entry_first_refresh()
hass.data.setdefault(DOMAIN, {})
diff --git a/homeassistant/components/rympro/coordinator.py b/homeassistant/components/rympro/coordinator.py
index 19f16005578..55e5f0f90df 100644
--- a/homeassistant/components/rympro/coordinator.py
+++ b/homeassistant/components/rympro/coordinator.py
@@ -7,6 +7,7 @@ import logging
from pyrympro import CannotConnectError, OperationError, RymPro, UnauthorizedError
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -20,13 +21,18 @@ _LOGGER = logging.getLogger(__name__)
class RymProDataUpdateCoordinator(DataUpdateCoordinator[dict[int, dict]]):
"""Class to manage fetching RYM Pro data."""
- def __init__(self, hass: HomeAssistant, rympro: RymPro) -> None:
+ config_entry: ConfigEntry
+
+ def __init__(
+ self, hass: HomeAssistant, config_entry: ConfigEntry, rympro: RymPro
+ ) -> None:
"""Initialize global RymPro data updater."""
self.rympro = rympro
interval = timedelta(seconds=SCAN_INTERVAL)
super().__init__(
hass,
_LOGGER,
+ config_entry=config_entry,
name=DOMAIN,
update_interval=interval,
)
@@ -40,7 +46,6 @@ class RymProDataUpdateCoordinator(DataUpdateCoordinator[dict[int, dict]]):
meter_id
)
except UnauthorizedError as error:
- assert self.config_entry
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
raise UpdateFailed(error) from error
except (CannotConnectError, OperationError) as error:
diff --git a/homeassistant/components/schedule/__init__.py b/homeassistant/components/schedule/__init__.py
index 20dc9c1256a..ea569f4e277 100644
--- a/homeassistant/components/schedule/__init__.py
+++ b/homeassistant/components/schedule/__init__.py
@@ -18,7 +18,13 @@ from homeassistant.const import (
STATE_OFF,
STATE_ON,
)
-from homeassistant.core import HomeAssistant, ServiceCall, callback
+from homeassistant.core import (
+ HomeAssistant,
+ ServiceCall,
+ ServiceResponse,
+ SupportsResponse,
+ callback,
+)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.collection import (
CollectionEntity,
@@ -44,6 +50,7 @@ from .const import (
CONF_TO,
DOMAIN,
LOGGER,
+ SERVICE_GET,
WEEKDAY_TO_CONF,
)
@@ -205,6 +212,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
reload_service_handler,
)
+ component.async_register_entity_service(
+ SERVICE_GET,
+ {},
+ async_get_schedule_service,
+ supports_response=SupportsResponse.ONLY,
+ )
+ await component.async_setup(config)
+
return True
@@ -296,6 +311,10 @@ class Schedule(CollectionEntity):
self.async_on_remove(self._clean_up_listener)
self._update()
+ def get_schedule(self) -> ConfigType:
+ """Return the schedule."""
+ return {d: self._config[d] for d in WEEKDAY_TO_CONF.values()}
+
@callback
def _update(self, _: datetime | None = None) -> None:
"""Update the states of the schedule."""
@@ -390,3 +409,10 @@ class Schedule(CollectionEntity):
data_keys.update(time_range_custom_data.keys())
return frozenset(data_keys)
+
+
+async def async_get_schedule_service(
+ schedule: Schedule, service_call: ServiceCall
+) -> ServiceResponse:
+ """Return the schedule configuration."""
+ return schedule.get_schedule()
diff --git a/homeassistant/components/schedule/const.py b/homeassistant/components/schedule/const.py
index 6687dafefdb..410cd00c3a0 100644
--- a/homeassistant/components/schedule/const.py
+++ b/homeassistant/components/schedule/const.py
@@ -37,3 +37,5 @@ WEEKDAY_TO_CONF: Final = {
5: CONF_SATURDAY,
6: CONF_SUNDAY,
}
+
+SERVICE_GET: Final = "get_schedule"
diff --git a/homeassistant/components/schedule/icons.json b/homeassistant/components/schedule/icons.json
index a9829425570..7d631cfd42d 100644
--- a/homeassistant/components/schedule/icons.json
+++ b/homeassistant/components/schedule/icons.json
@@ -2,6 +2,9 @@
"services": {
"reload": {
"service": "mdi:reload"
+ },
+ "get_schedule": {
+ "service": "mdi:calendar-export"
}
}
}
diff --git a/homeassistant/components/schedule/services.yaml b/homeassistant/components/schedule/services.yaml
index c983a105c93..1cb3f0280af 100644
--- a/homeassistant/components/schedule/services.yaml
+++ b/homeassistant/components/schedule/services.yaml
@@ -1 +1,5 @@
reload:
+get_schedule:
+ target:
+ entity:
+ domain: schedule
diff --git a/homeassistant/components/schedule/strings.json b/homeassistant/components/schedule/strings.json
index a40c5814d36..8638e4a8a84 100644
--- a/homeassistant/components/schedule/strings.json
+++ b/homeassistant/components/schedule/strings.json
@@ -25,6 +25,10 @@
"reload": {
"name": "[%key:common::action::reload%]",
"description": "Reloads schedules from the YAML-configuration."
+ },
+ "get_schedule": {
+ "name": "Get schedule",
+ "description": "Retrieve one or multiple schedules."
}
}
}
diff --git a/homeassistant/components/screenlogic/config_flow.py b/homeassistant/components/screenlogic/config_flow.py
index 0fdf5d96445..b4deb9b36aa 100644
--- a/homeassistant/components/screenlogic/config_flow.py
+++ b/homeassistant/components/screenlogic/config_flow.py
@@ -105,7 +105,7 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_gateway_select(self, user_input=None) -> ConfigFlowResult:
"""Handle the selection of a discovered ScreenLogic gateway."""
- existing = self._async_current_ids()
+ existing = self._async_current_ids(include_ignore=False)
unconfigured_gateways = {
mac: gateway[SL_GATEWAY_NAME]
for mac, gateway in self.discovered_gateways.items()
diff --git a/homeassistant/components/screenlogic/coordinator.py b/homeassistant/components/screenlogic/coordinator.py
index a90c9cb2cf4..b3c438dc641 100644
--- a/homeassistant/components/screenlogic/coordinator.py
+++ b/homeassistant/components/screenlogic/coordinator.py
@@ -52,6 +52,8 @@ async def async_get_connect_info(
class ScreenlogicDataUpdateCoordinator(DataUpdateCoordinator[None]):
"""Class to manage the data update for the Screenlogic component."""
+ config_entry: ConfigEntry
+
def __init__(
self,
hass: HomeAssistant,
@@ -60,7 +62,6 @@ class ScreenlogicDataUpdateCoordinator(DataUpdateCoordinator[None]):
gateway: ScreenLogicGateway,
) -> None:
"""Initialize the Screenlogic Data Update Coordinator."""
- self.config_entry = config_entry
self.gateway = gateway
interval = timedelta(
@@ -69,6 +70,7 @@ class ScreenlogicDataUpdateCoordinator(DataUpdateCoordinator[None]):
super().__init__(
hass,
_LOGGER,
+ config_entry=config_entry,
name=DOMAIN,
update_interval=interval,
# Debounced option since the device takes
@@ -91,7 +93,6 @@ class ScreenlogicDataUpdateCoordinator(DataUpdateCoordinator[None]):
async def _async_update_data(self) -> None:
"""Fetch data from the Screenlogic gateway."""
- assert self.config_entry is not None
try:
if not self.gateway.is_connected:
connect_info = await async_get_connect_info(
diff --git a/homeassistant/components/sharkiq/strings.json b/homeassistant/components/sharkiq/strings.json
index 40b569e13b7..3c4c98db38f 100644
--- a/homeassistant/components/sharkiq/strings.json
+++ b/homeassistant/components/sharkiq/strings.json
@@ -1,16 +1,16 @@
{
"config": {
- "flow_title": "Add Shark IQ Account",
+ "flow_title": "Add Shark IQ account",
"step": {
"user": {
- "description": "Sign into your Shark Clean account to control your devices.",
+ "description": "Sign into your SharkClean account to control your devices.",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"region": "Region"
},
"data_description": {
- "region": "Shark IQ uses different services in the EU. Select your region to connect to the correct service for your account."
+ "region": "Shark IQ uses different services in the EU. Select your region to connect to the correct service for your account."
}
},
"reauth_confirm": {
@@ -37,18 +37,18 @@
"region": {
"options": {
"europe": "Europe",
- "elsewhere": "Everywhere Else"
+ "elsewhere": "Everywhere else"
}
}
},
"exceptions": {
"invalid_room": {
- "message": "The room {room} is unavailable to your vacuum. Make sure all rooms match the Shark App, including capitalization."
+ "message": "The room {room} is unavailable to your vacuum. Make sure all rooms match the SharkClean app, including capitalization."
}
},
"services": {
"clean_room": {
- "name": "Clean Room",
+ "name": "Clean room",
"description": "Cleans a specific user-defined room or set of rooms.",
"fields": {
"rooms": {
diff --git a/homeassistant/components/shelly/binary_sensor.py b/homeassistant/components/shelly/binary_sensor.py
index 108a8236733..fb253c682d8 100644
--- a/homeassistant/components/shelly/binary_sensor.py
+++ b/homeassistant/components/shelly/binary_sensor.py
@@ -272,6 +272,18 @@ RPC_SENSORS: Final = {
entity_category=EntityCategory.DIAGNOSTIC,
entity_class=RpcBluTrvBinarySensor,
),
+ "flood": RpcBinarySensorDescription(
+ key="flood",
+ sub_key="alarm",
+ name="Flood",
+ device_class=BinarySensorDeviceClass.MOISTURE,
+ ),
+ "mute": RpcBinarySensorDescription(
+ key="flood",
+ sub_key="mute",
+ name="Mute",
+ entity_category=EntityCategory.DIAGNOSTIC,
+ ),
}
diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py
index e78a6f1a59d..c8fa72606d6 100644
--- a/homeassistant/components/shelly/const.py
+++ b/homeassistant/components/shelly/const.py
@@ -116,6 +116,10 @@ BATTERY_DEVICES_WITH_PERMANENT_CONNECTION: Final = [
# Button/Click events for Block & RPC devices
EVENT_SHELLY_CLICK: Final = "shelly.click"
+SHELLY_EMIT_EVENT_PATTERN: Final = re.compile(
+ r"(?:Shelly\s*\.\s*emitEvent\s*\(\s*[\"'`])(\w*)"
+)
+
ATTR_CLICK_TYPE: Final = "click_type"
ATTR_CHANNEL: Final = "channel"
ATTR_DEVICE: Final = "device"
diff --git a/homeassistant/components/shelly/event.py b/homeassistant/components/shelly/event.py
index 372d73dea3c..78093bec8aa 100644
--- a/homeassistant/components/shelly/event.py
+++ b/homeassistant/components/shelly/event.py
@@ -6,6 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Final
+from aioshelly.ble.const import BLE_SCRIPT_NAME
from aioshelly.block_device import Block
from aioshelly.const import MODEL_I3, RPC_GENERATIONS
@@ -28,10 +29,12 @@ from .const import (
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
from .entity import ShellyBlockEntity
from .utils import (
+ async_remove_orphaned_entities,
async_remove_shelly_entity,
get_device_entry_gen,
get_rpc_entity_name,
get_rpc_key_instances,
+ get_rpc_script_event_types,
is_block_momentary_input,
is_rpc_momentary_input,
)
@@ -68,6 +71,13 @@ RPC_EVENT: Final = ShellyRpcEventDescription(
config, status, key
),
)
+SCRIPT_EVENT: Final = ShellyRpcEventDescription(
+ key="script",
+ translation_key="script",
+ device_class=None,
+ entity_registry_enabled_default=False,
+ has_entity_name=True,
+)
async def async_setup_entry(
@@ -95,6 +105,33 @@ async def async_setup_entry(
async_remove_shelly_entity(hass, EVENT_DOMAIN, unique_id)
else:
entities.append(ShellyRpcEvent(coordinator, key, RPC_EVENT))
+
+ script_instances = get_rpc_key_instances(
+ coordinator.device.status, SCRIPT_EVENT.key
+ )
+ for script in script_instances:
+ script_name = get_rpc_entity_name(coordinator.device, script)
+ if script_name == BLE_SCRIPT_NAME:
+ continue
+
+ event_types = await get_rpc_script_event_types(
+ coordinator.device, int(script.split(":")[-1])
+ )
+ if not event_types:
+ continue
+
+ entities.append(ShellyRpcScriptEvent(coordinator, script, event_types))
+
+ # If a script is removed, from the device configuration, we need to remove orphaned entities
+ async_remove_orphaned_entities(
+ hass,
+ config_entry.entry_id,
+ coordinator.mac,
+ EVENT_DOMAIN,
+ coordinator.device.status,
+ "script",
+ )
+
else:
coordinator = config_entry.runtime_data.block
if TYPE_CHECKING:
@@ -170,7 +207,7 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
) -> None:
"""Initialize Shelly entity."""
super().__init__(coordinator)
- self.input_index = int(key.split(":")[-1])
+ self.event_id = int(key.split(":")[-1])
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, coordinator.mac)}
)
@@ -181,6 +218,7 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
+
self.async_on_remove(
self.coordinator.async_subscribe_input_events(self._async_handle_event)
)
@@ -188,6 +226,42 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
@callback
def _async_handle_event(self, event: dict[str, Any]) -> None:
"""Handle the demo button event."""
- if event["id"] == self.input_index:
+ if event["id"] == self.event_id:
self._trigger_event(event["event"])
self.async_write_ha_state()
+
+
+class ShellyRpcScriptEvent(ShellyRpcEvent):
+ """Represent RPC script event entity."""
+
+ def __init__(
+ self,
+ coordinator: ShellyRpcCoordinator,
+ key: str,
+ event_types: list[str],
+ ) -> None:
+ """Initialize Shelly script event entity."""
+ super().__init__(coordinator, key, SCRIPT_EVENT)
+
+ self.component = key
+ self._attr_event_types = event_types
+
+ async def async_added_to_hass(self) -> None:
+ """When entity is added to hass."""
+ await super(CoordinatorEntity, self).async_added_to_hass()
+
+ self.async_on_remove(
+ self.coordinator.async_subscribe_events(self._async_handle_event)
+ )
+
+ @callback
+ def _async_handle_event(self, event: dict[str, Any]) -> None:
+ """Handle script event."""
+ if event.get("component") == self.component:
+ event_type = event.get("event")
+ if event_type not in self.event_types:
+ # This can happen if we didn't find this event type in the script
+ return
+
+ self._trigger_event(event_type, event.get("data"))
+ self.async_write_ha_state()
diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py
index 81766c65388..fa310104424 100644
--- a/homeassistant/components/shelly/utils.py
+++ b/homeassistant/components/shelly/utils.py
@@ -56,6 +56,7 @@ from .const import (
RPC_INPUTS_EVENTS_TYPES,
SHBTN_INPUTS_EVENTS_TYPES,
SHBTN_MODELS,
+ SHELLY_EMIT_EVENT_PATTERN,
SHIX3_1_INPUTS_EVENTS_TYPES,
UPTIME_DEVIATION,
VIRTUAL_COMPONENTS_MAP,
@@ -598,3 +599,10 @@ def get_rpc_ws_url(hass: HomeAssistant) -> str | None:
url = URL(raw_url)
ws_url = url.with_scheme("wss" if url.scheme == "https" else "ws")
return str(ws_url.joinpath(API_WS_URL.removeprefix("/")))
+
+
+async def get_rpc_script_event_types(device: RpcDevice, id: int) -> list[str]:
+ """Return a list of event types for a specific script."""
+ code_response = await device.script_getcode(id)
+ matches = SHELLY_EMIT_EVENT_PATTERN.finditer(code_response["data"])
+ return sorted([*{str(event_type.group(1)) for event_type in matches}])
diff --git a/homeassistant/components/smart_rollos/__init__.py b/homeassistant/components/smart_rollos/__init__.py
new file mode 100644
index 00000000000..d4bb8c7fb1b
--- /dev/null
+++ b/homeassistant/components/smart_rollos/__init__.py
@@ -0,0 +1 @@
+"""Virtual integration: Smart Rollos."""
diff --git a/homeassistant/components/smart_rollos/manifest.json b/homeassistant/components/smart_rollos/manifest.json
new file mode 100644
index 00000000000..f093f740bd6
--- /dev/null
+++ b/homeassistant/components/smart_rollos/manifest.json
@@ -0,0 +1,6 @@
+{
+ "domain": "smart_rollos",
+ "name": "Smart Rollos",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+}
diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py
index cbfb8162d63..11c6ffb73fb 100644
--- a/homeassistant/components/smlight/__init__.py
+++ b/homeassistant/components/smlight/__init__.py
@@ -4,7 +4,7 @@ from __future__ import annotations
from dataclasses import dataclass
-from pysmlight import Api2
+from pysmlight import Api2, Info, Radio
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform
@@ -61,3 +61,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
+
+
+def get_radio(info: Info, idx: int) -> Radio:
+ """Get the radio object from the info."""
+ assert info.radios is not None
+ return info.radios[idx]
diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py
index 34bd0758174..667e6e2884b 100644
--- a/homeassistant/components/smlight/config_flow.py
+++ b/homeassistant/components/smlight/config_flow.py
@@ -14,6 +14,7 @@ from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResu
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import format_mac
+from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import DOMAIN
@@ -35,7 +36,8 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema(
class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for SMLIGHT Zigbee."""
- host: str
+ _host: str
+ _device_name: str
client: Api2
async def async_step_user(
@@ -45,11 +47,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
errors: dict[str, str] = {}
if user_input is not None:
- self.host = user_input[CONF_HOST]
- self.client = Api2(self.host, session=async_get_clientsession(self.hass))
+ self._host = user_input[CONF_HOST]
+ self.client = Api2(self._host, session=async_get_clientsession(self.hass))
try:
info = await self.client.get_info()
+ self._host = str(info.device_ip)
+ self._device_name = str(info.hostname)
if info.model not in Devices:
return self.async_abort(reason="unsupported_device")
@@ -93,15 +97,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle a discovered Lan coordinator."""
- local_name = discovery_info.hostname[:-1]
- node_name = local_name.removesuffix(".local")
+ mac: str | None = discovery_info.properties.get("mac")
+ self._device_name = discovery_info.hostname.removesuffix(".local.")
+ self._host = discovery_info.host
- self.host = local_name
- self.context["title_placeholders"] = {CONF_NAME: node_name}
- self.client = Api2(self.host, session=async_get_clientsession(self.hass))
+ self.context["title_placeholders"] = {CONF_NAME: self._device_name}
+ self.client = Api2(self._host, session=async_get_clientsession(self.hass))
- mac = discovery_info.properties.get("mac")
- # fallback for legacy firmware
+ # fallback for legacy firmware older than v2.3.x
if mac is None:
try:
info = await self.client.get_info()
@@ -111,7 +114,7 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
mac = info.MAC
await self.async_set_unique_id(format_mac(mac))
- self._abort_if_unique_id_configured()
+ self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
return await self.async_step_confirm_discovery()
@@ -122,7 +125,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
errors: dict[str, str] = {}
if user_input is not None:
- user_input[CONF_HOST] = self.host
try:
info = await self.client.get_info()
@@ -142,7 +144,7 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="confirm_discovery",
- description_placeholders={"host": self.host},
+ description_placeholders={"host": self._device_name},
errors=errors,
)
@@ -151,8 +153,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle reauth when API Authentication failed."""
- self.host = entry_data[CONF_HOST]
- self.client = Api2(self.host, session=async_get_clientsession(self.hass))
+ self._host = entry_data[CONF_HOST]
+ self.client = Api2(self._host, session=async_get_clientsession(self.hass))
return await self.async_step_reauth_confirm()
@@ -182,6 +184,16 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
+ async def async_step_dhcp(
+ self, discovery_info: DhcpServiceInfo
+ ) -> ConfigFlowResult:
+ """Handle DHCP discovery."""
+ await self.async_set_unique_id(format_mac(discovery_info.macaddress))
+ self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
+ # This should never happen since we only listen to DHCP requests
+ # for configured devices.
+ return self.async_abort(reason="already_configured")
+
async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool:
"""Check if auth required and attempt to authenticate."""
if await self.client.check_auth_needed():
@@ -200,11 +212,10 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(
format_mac(info.MAC), raise_on_progress=self.source != SOURCE_USER
)
- self._abort_if_unique_id_configured()
+ self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
- if user_input.get(CONF_HOST) is None:
- user_input[CONF_HOST] = self.host
+ user_input[CONF_HOST] = self._host
assert info.model is not None
- title = self.context.get("title_placeholders", {}).get(CONF_NAME) or info.model
+ title = self._device_name or info.model
return self.async_create_entry(title=title, data=user_input)
diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py
index 6be36439e9f..341c627afe5 100644
--- a/homeassistant/components/smlight/coordinator.py
+++ b/homeassistant/components/smlight/coordinator.py
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING
from pysmlight import Api2, Info, Sensors
from pysmlight.const import Settings, SettingsProp
from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError
-from pysmlight.web import Firmware
+from pysmlight.models import FirmwareList
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
@@ -38,8 +38,8 @@ class SmFwData:
"""SMLIGHT firmware data stored in the FirmwareUpdateCoordinator."""
info: Info
- esp_firmware: list[Firmware] | None
- zb_firmware: list[Firmware] | None
+ esp_firmware: FirmwareList
+ zb_firmware: list[FirmwareList]
class SmBaseDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
@@ -144,15 +144,30 @@ class SmFirmwareUpdateCoordinator(SmBaseDataUpdateCoordinator[SmFwData]):
async def _internal_update_data(self) -> SmFwData:
"""Fetch data from the SMLIGHT device."""
info = await self.client.get_info()
+ assert info.radios is not None
esp_firmware = None
- zb_firmware = None
+ zb_firmware: list[FirmwareList] = []
try:
esp_firmware = await self.client.get_firmware_version(info.fw_channel)
- zb_firmware = await self.client.get_firmware_version(
- info.fw_channel, device=info.model, mode="zigbee"
+ zb_firmware.extend(
+ [
+ await self.client.get_firmware_version(
+ info.fw_channel,
+ device=info.model,
+ mode="zigbee",
+ zb_type=r.zb_type,
+ idx=idx,
+ )
+ for idx, r in enumerate(info.radios)
+ ]
)
+
except SmlightConnectionError as err:
self.async_set_update_error(err)
- return SmFwData(info=info, esp_firmware=esp_firmware, zb_firmware=zb_firmware)
+ return SmFwData(
+ info=info,
+ esp_firmware=esp_firmware,
+ zb_firmware=zb_firmware,
+ )
diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json
index 3691c211838..3f527d1fcd9 100644
--- a/homeassistant/components/smlight/manifest.json
+++ b/homeassistant/components/smlight/manifest.json
@@ -3,10 +3,15 @@
"name": "SMLIGHT SLZB",
"codeowners": ["@tl-sl"],
"config_flow": true,
+ "dhcp": [
+ {
+ "registered_devices": true
+ }
+ ],
"documentation": "https://www.home-assistant.io/integrations/smlight",
"integration_type": "device",
"iot_class": "local_push",
- "requirements": ["pysmlight==0.2.1"],
+ "requirements": ["pysmlight==0.2.3"],
"zeroconf": [
{
"type": "_slzb-06._tcp.local."
diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py
index 147b1d766ef..50a123345c6 100644
--- a/homeassistant/components/smlight/update.py
+++ b/homeassistant/components/smlight/update.py
@@ -5,7 +5,7 @@ from __future__ import annotations
import asyncio
from collections.abc import Callable
from dataclasses import dataclass
-from typing import Any, Final
+from typing import Any
from pysmlight.const import Events as SmEvents
from pysmlight.models import Firmware, Info
@@ -22,34 +22,43 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
-from . import SmConfigEntry
+from . import SmConfigEntry, get_radio
from .const import LOGGER
from .coordinator import SmFirmwareUpdateCoordinator, SmFwData
from .entity import SmEntity
+def zigbee_latest_version(data: SmFwData, idx: int) -> Firmware | None:
+ """Get the latest Zigbee firmware version."""
+
+ if idx < len(data.zb_firmware):
+ firmware_list = data.zb_firmware[idx]
+ if firmware_list:
+ return firmware_list[0]
+ return None
+
+
@dataclass(frozen=True, kw_only=True)
class SmUpdateEntityDescription(UpdateEntityDescription):
"""Describes SMLIGHT SLZB-06 update entity."""
- installed_version: Callable[[Info], str | None]
- fw_list: Callable[[SmFwData], list[Firmware] | None]
+ installed_version: Callable[[Info, int], str | None]
+ latest_version: Callable[[SmFwData, int], Firmware | None]
-UPDATE_ENTITIES: Final = [
- SmUpdateEntityDescription(
- key="core_update",
- translation_key="core_update",
- installed_version=lambda x: x.sw_version,
- fw_list=lambda x: x.esp_firmware,
- ),
- SmUpdateEntityDescription(
- key="zigbee_update",
- translation_key="zigbee_update",
- installed_version=lambda x: x.zb_version,
- fw_list=lambda x: x.zb_firmware,
- ),
-]
+CORE_UPDATE_ENTITY = SmUpdateEntityDescription(
+ key="core_update",
+ translation_key="core_update",
+ installed_version=lambda x, idx: x.sw_version,
+ latest_version=lambda x, idx: x.esp_firmware[0] if x.esp_firmware else None,
+)
+
+ZB_UPDATE_ENTITY = SmUpdateEntityDescription(
+ key="zigbee_update",
+ translation_key="zigbee_update",
+ installed_version=lambda x, idx: get_radio(x, idx).zb_version,
+ latest_version=zigbee_latest_version,
+)
async def async_setup_entry(
@@ -58,10 +67,21 @@ async def async_setup_entry(
"""Set up the SMLIGHT update entities."""
coordinator = entry.runtime_data.firmware
- async_add_entities(
- SmUpdateEntity(coordinator, description) for description in UPDATE_ENTITIES
+ # updates not available for legacy API, user will get repair to update externally
+ if coordinator.legacy_api == 2:
+ return
+
+ entities = [SmUpdateEntity(coordinator, CORE_UPDATE_ENTITY)]
+ radios = coordinator.data.info.radios
+ assert radios is not None
+
+ entities.extend(
+ SmUpdateEntity(coordinator, ZB_UPDATE_ENTITY, idx)
+ for idx, _ in enumerate(radios)
)
+ async_add_entities(entities)
+
class SmUpdateEntity(SmEntity, UpdateEntity):
"""Representation for SLZB-06 update entities."""
@@ -80,42 +100,46 @@ class SmUpdateEntity(SmEntity, UpdateEntity):
self,
coordinator: SmFirmwareUpdateCoordinator,
description: SmUpdateEntityDescription,
+ idx: int = 0,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self.entity_description = description
- self._attr_unique_id = f"{coordinator.unique_id}-{description.key}"
+ device = description.key + (f"_{idx}" if idx else "")
+ self._attr_unique_id = f"{coordinator.unique_id}-{device}"
self._finished_event = asyncio.Event()
self._firmware: Firmware | None = None
self._unload: list[Callable] = []
+ self.idx = idx
+
+ async def async_added_to_hass(self) -> None:
+ """When entity is added to hass."""
+ await super().async_added_to_hass()
+ self._handle_coordinator_update()
+
+ @callback
+ def _handle_coordinator_update(self) -> None:
+ """Handle coordinator update callbacks."""
+ self._firmware = self.entity_description.latest_version(
+ self.coordinator.data, self.idx
+ )
+ if self._firmware:
+ self.async_write_ha_state()
@property
def installed_version(self) -> str | None:
"""Version installed.."""
data = self.coordinator.data
- version = self.entity_description.installed_version(data.info)
- return version if version != "-1" else None
+ return self.entity_description.installed_version(data.info, self.idx)
@property
def latest_version(self) -> str | None:
"""Latest version available for install."""
- data = self.coordinator.data
- if self.coordinator.legacy_api == 2:
- return None
- fw = self.entity_description.fw_list(data)
-
- if fw and self.entity_description.key == "zigbee_update":
- fw = [f for f in fw if f.type == data.info.zb_type]
-
- if fw:
- self._firmware = fw[0]
- return self._firmware.ver
-
- return None
+ return self._firmware.ver if self._firmware else None
def register_callbacks(self) -> None:
"""Register callbacks for SSE update events."""
@@ -143,9 +167,14 @@ class SmUpdateEntity(SmEntity, UpdateEntity):
def release_notes(self) -> str | None:
"""Return release notes for firmware."""
+ if "zigbee" in self.entity_description.key:
+ notes = f"### {'ZNP' if self.idx else 'EZSP'} Firmware\n\n"
+ else:
+ notes = "### Core Firmware\n\n"
if self._firmware and self._firmware.notes:
- return self._firmware.notes
+ notes += self._firmware.notes
+ return notes
return None
@@ -192,7 +221,7 @@ class SmUpdateEntity(SmEntity, UpdateEntity):
self._attr_update_percentage = None
self.register_callbacks()
- await self.coordinator.client.fw_update(self._firmware)
+ await self.coordinator.client.fw_update(self._firmware, self.idx)
# block until update finished event received
await self._finished_event.wait()
diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json
index d3774e85213..07d2e2db4e0 100644
--- a/homeassistant/components/sonos/strings.json
+++ b/homeassistant/components/sonos/strings.json
@@ -87,7 +87,7 @@
"services": {
"snapshot": {
"name": "Snapshot",
- "description": "Takes a snapshot of the media player.",
+ "description": "Takes a snapshot of a media player.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -95,13 +95,13 @@
},
"with_group": {
"name": "With group",
- "description": "True or False. Also snapshot the group layout."
+ "description": "Whether the snapshot should include the group layout and the state of other speakers in the group."
}
}
},
"restore": {
"name": "Restore",
- "description": "Restores a snapshot of the media player.",
+ "description": "Restores a snapshot of a media player.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -109,7 +109,7 @@
},
"with_group": {
"name": "[%key:component::sonos::services::snapshot::fields::with_group::name%]",
- "description": "True or False. Also restore the group layout."
+ "description": "Whether the group layout and the state of other speakers in the group should also be restored."
}
}
},
@@ -129,7 +129,7 @@
},
"play_queue": {
"name": "Play queue",
- "description": "Start playing the queue from the first item.",
+ "description": "Starts playing the queue from the first item.",
"fields": {
"queue_position": {
"name": "Queue position",
@@ -153,23 +153,23 @@
"fields": {
"alarm_id": {
"name": "Alarm ID",
- "description": "ID for the alarm to be updated."
+ "description": "The ID of the alarm to be updated."
},
"time": {
"name": "Time",
- "description": "Set time for the alarm."
+ "description": "The time for the alarm."
},
"volume": {
"name": "Volume",
- "description": "Set alarm volume level."
+ "description": "The alarm volume level."
},
"enabled": {
"name": "Alarm enabled",
- "description": "Enable or disable the alarm."
+ "description": "Whether or not to enable the alarm."
},
"include_linked_zones": {
"name": "Include linked zones",
- "description": "Enable or disable including grouped rooms."
+ "description": "Whether the alarm also plays on grouped players."
}
}
},
diff --git a/homeassistant/components/starlink/binary_sensor.py b/homeassistant/components/starlink/binary_sensor.py
index e48d28dcc44..b03648e81c5 100644
--- a/homeassistant/components/starlink/binary_sensor.py
+++ b/homeassistant/components/starlink/binary_sensor.py
@@ -65,6 +65,7 @@ BINARY_SENSORS = [
key="currently_obstructed",
translation_key="currently_obstructed",
device_class=BinarySensorDeviceClass.PROBLEM,
+ entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.status["currently_obstructed"],
),
StarlinkBinarySensorEntityDescription(
@@ -114,4 +115,9 @@ BINARY_SENSORS = [
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alert["alert_unexpected_location"],
),
+ StarlinkBinarySensorEntityDescription(
+ key="connection",
+ device_class=BinarySensorDeviceClass.CONNECTIVITY,
+ value_fn=lambda data: data.status["state"] == "CONNECTED",
+ ),
]
diff --git a/homeassistant/components/steamist/manifest.json b/homeassistant/components/steamist/manifest.json
index b15d7f87312..ab81c8b5a53 100644
--- a/homeassistant/components/steamist/manifest.json
+++ b/homeassistant/components/steamist/manifest.json
@@ -16,5 +16,5 @@
"documentation": "https://www.home-assistant.io/integrations/steamist",
"iot_class": "local_polling",
"loggers": ["aiosteamist", "discovery30303"],
- "requirements": ["aiosteamist==1.0.0", "discovery30303==0.3.2"]
+ "requirements": ["aiosteamist==1.0.1", "discovery30303==0.3.3"]
}
diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py
index 499a5073872..09bc157d4d2 100644
--- a/homeassistant/components/switchbot/__init__.py
+++ b/homeassistant/components/switchbot/__init__.py
@@ -65,6 +65,7 @@ PLATFORMS_BY_TYPE = {
SupportedModels.RELAY_SWITCH_1PM.value: [Platform.SWITCH, Platform.SENSOR],
SupportedModels.RELAY_SWITCH_1.value: [Platform.SWITCH],
SupportedModels.LEAK.value: [Platform.BINARY_SENSOR, Platform.SENSOR],
+ SupportedModels.REMOTE.value: [Platform.SENSOR],
}
CLASS_BY_DEVICE = {
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py
index 854ab32b657..16b41d75541 100644
--- a/homeassistant/components/switchbot/const.py
+++ b/homeassistant/components/switchbot/const.py
@@ -34,6 +34,7 @@ class SupportedModels(StrEnum):
RELAY_SWITCH_1PM = "relay_switch_1pm"
RELAY_SWITCH_1 = "relay_switch_1"
LEAK = "leak"
+ REMOTE = "remote"
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
@@ -60,6 +61,7 @@ NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
SwitchbotModel.CONTACT_SENSOR: SupportedModels.CONTACT,
SwitchbotModel.MOTION_SENSOR: SupportedModels.MOTION,
SwitchbotModel.LEAK: SupportedModels.LEAK,
+ SwitchbotModel.REMOTE: SupportedModels.REMOTE,
}
SUPPORTED_MODEL_TYPES = (
diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json
index 1b80da43e16..92a1c25d6f5 100644
--- a/homeassistant/components/switchbot/manifest.json
+++ b/homeassistant/components/switchbot/manifest.json
@@ -39,5 +39,5 @@
"documentation": "https://www.home-assistant.io/integrations/switchbot",
"iot_class": "local_push",
"loggers": ["switchbot"],
- "requirements": ["PySwitchbot==0.55.4"]
+ "requirements": ["PySwitchbot==0.56.0"]
}
diff --git a/homeassistant/components/synology_dsm/backup.py b/homeassistant/components/synology_dsm/backup.py
index 62a1b97b717..83c3455bdf1 100644
--- a/homeassistant/components/synology_dsm/backup.py
+++ b/homeassistant/components/synology_dsm/backup.py
@@ -10,7 +10,12 @@ from aiohttp import StreamReader
from synology_dsm.api.file_station import SynoFileStation
from synology_dsm.exceptions import SynologyDSMAPIErrorException
-from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
+from homeassistant.components.backup import (
+ AgentBackup,
+ BackupAgent,
+ BackupAgentError,
+ suggested_filename,
+)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
@@ -28,6 +33,15 @@ from .models import SynologyDSMData
LOGGER = logging.getLogger(__name__)
+def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
+ """Suggest filenames for the backup.
+
+ returns a tuple of tar_filename and meta_filename
+ """
+ base_name = suggested_filename(backup).rsplit(".", 1)[0]
+ return (f"{base_name}.tar", f"{base_name}_meta.json")
+
+
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
@@ -95,6 +109,19 @@ class SynologyDSMBackupAgent(BackupAgent):
assert self.api.file_station
return self.api.file_station
+ async def _async_suggested_filenames(
+ self,
+ backup_id: str,
+ ) -> tuple[str, str]:
+ """Suggest filenames for the backup.
+
+ :param backup_id: The ID of the backup that was returned in async_list_backups.
+ :return: A tuple of tar_filename and meta_filename
+ """
+ if (backup := await self.async_get_backup(backup_id)) is None:
+ raise BackupAgentError("Backup not found")
+ return suggested_filenames(backup)
+
async def async_download_backup(
self,
backup_id: str,
@@ -105,10 +132,12 @@ class SynologyDSMBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
+ (filename_tar, _) = await self._async_suggested_filenames(backup_id)
+
try:
resp = await self._file_station.download_file(
path=self.path,
- filename=f"{backup_id}.tar",
+ filename=filename_tar,
)
except SynologyDSMAPIErrorException as err:
raise BackupAgentError("Failed to download backup") from err
@@ -131,11 +160,13 @@ class SynologyDSMBackupAgent(BackupAgent):
:param backup: Metadata about the backup that should be uploaded.
"""
+ (filename_tar, filename_meta) = suggested_filenames(backup)
+
# upload backup.tar file first
try:
await self._file_station.upload_file(
path=self.path,
- filename=f"{backup.backup_id}.tar",
+ filename=filename_tar,
source=await open_stream(),
create_parents=True,
)
@@ -146,7 +177,7 @@ class SynologyDSMBackupAgent(BackupAgent):
try:
await self._file_station.upload_file(
path=self.path,
- filename=f"{backup.backup_id}_meta.json",
+ filename=filename_meta,
source=json_dumps(backup.as_dict()).encode(),
)
except SynologyDSMAPIErrorException as err:
@@ -162,14 +193,27 @@ class SynologyDSMBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
try:
- await self._file_station.delete_file(
- path=self.path, filename=f"{backup_id}.tar"
+ (filename_tar, filename_meta) = await self._async_suggested_filenames(
+ backup_id
)
- await self._file_station.delete_file(
- path=self.path, filename=f"{backup_id}_meta.json"
- )
- except SynologyDSMAPIErrorException as err:
- raise BackupAgentError("Failed to delete the backup") from err
+ except BackupAgentError:
+ # backup meta data could not be found, so we can't delete the backup
+ return
+
+ for filename in (filename_tar, filename_meta):
+ try:
+ await self._file_station.delete_file(path=self.path, filename=filename)
+ except SynologyDSMAPIErrorException as err:
+ err_args: dict = err.args[0]
+ if int(err_args.get("code", 0)) != 900 or (
+ (err_details := err_args.get("details")) is not None
+ and isinstance(err_details, list)
+ and isinstance(err_details[0], dict)
+ and int(err_details[0].get("code", 0))
+ != 408 # No such file or directory
+ ):
+ LOGGER.error("Failed to delete backup: %s", err)
+ raise BackupAgentError("Failed to delete backup") from err
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
diff --git a/homeassistant/components/system_health/__init__.py b/homeassistant/components/system_health/__init__.py
index ce80f6303d9..7d2224fc6fc 100644
--- a/homeassistant/components/system_health/__init__.py
+++ b/homeassistant/components/system_health/__init__.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import asyncio
-from collections.abc import Awaitable, Callable
+from collections.abc import AsyncGenerator, Awaitable, Callable
import dataclasses
from datetime import datetime
import logging
@@ -101,6 +101,57 @@ async def get_integration_info(
return result
+async def _registered_domain_data(
+ hass: HomeAssistant,
+) -> AsyncGenerator[tuple[str, dict[str, Any]]]:
+ registrations: dict[str, SystemHealthRegistration] = hass.data[DOMAIN]
+ for domain, domain_data in zip(
+ registrations,
+ await asyncio.gather(
+ *(
+ get_integration_info(hass, registration)
+ for registration in registrations.values()
+ )
+ ),
+ strict=False,
+ ):
+ yield domain, domain_data
+
+
+async def get_info(hass: HomeAssistant) -> dict[str, dict[str, str]]:
+ """Get the full set of system health information."""
+ domains: dict[str, dict[str, Any]] = {}
+
+ async def _get_info_value(value: Any) -> Any:
+ if not asyncio.iscoroutine(value):
+ return value
+ try:
+ return await value
+ except Exception as exception:
+ _LOGGER.exception("Error fetching system info for %s - %s", domain, key)
+ return f"Exception: {exception}"
+
+ async for domain, domain_data in _registered_domain_data(hass):
+ domain_info: dict[str, Any] = {}
+ for key, value in domain_data["info"].items():
+ info_value = await _get_info_value(value)
+
+ if isinstance(info_value, datetime):
+ domain_info[key] = info_value.isoformat()
+ elif (
+ isinstance(info_value, dict)
+ and "type" in info_value
+ and info_value["type"] == "failed"
+ ):
+ domain_info[key] = f"Failed: {info_value.get('error', 'unknown')}"
+ else:
+ domain_info[key] = info_value
+
+ domains[domain] = domain_info
+
+ return domains
+
+
@callback
def _format_value(val: Any) -> Any:
"""Format a system health value."""
@@ -115,20 +166,10 @@ async def handle_info(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Handle an info request via a subscription."""
- registrations: dict[str, SystemHealthRegistration] = hass.data[DOMAIN]
data = {}
pending_info: dict[tuple[str, str], asyncio.Task] = {}
- for domain, domain_data in zip(
- registrations,
- await asyncio.gather(
- *(
- get_integration_info(hass, registration)
- for registration in registrations.values()
- )
- ),
- strict=False,
- ):
+ async for domain, domain_data in _registered_domain_data(hass):
for key, value in domain_data["info"].items():
if asyncio.iscoroutine(value):
value = asyncio.create_task(value)
diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py
index c8eaec76255..db7b1823bd9 100644
--- a/homeassistant/components/tado/climate.py
+++ b/homeassistant/components/tado/climate.py
@@ -506,7 +506,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity):
offset,
)
- self._tado.set_temperature_offset(self._device_id, offset)
+ await self._tado.set_temperature_offset(self._device_id, offset)
await self.coordinator.async_request_refresh()
async def async_set_temperature(self, **kwargs: Any) -> None:
diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json
index 735fe34bcf4..f1550517457 100644
--- a/homeassistant/components/tado/strings.json
+++ b/homeassistant/components/tado/strings.json
@@ -14,7 +14,7 @@
},
"reconfigure": {
"title": "Reconfigure your Tado",
- "description": "Reconfigure the entry, for your account: `{username}`.",
+ "description": "Reconfigure the entry for your account: `{username}`.",
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
@@ -25,7 +25,7 @@
},
"error": {
"unknown": "[%key:common::config_flow::error::unknown%]",
- "no_homes": "There are no homes linked to this tado account.",
+ "no_homes": "There are no homes linked to this Tado account.",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
}
@@ -33,7 +33,7 @@
"options": {
"step": {
"init": {
- "description": "Fallback mode lets you choose when to fallback to Smart Schedule from your manual zone overlay. (NEXT_TIME_BLOCK:= Change at next Smart Schedule change; MANUAL:= Dont change until you cancel; TADO_DEFAULT:= Change based on your setting in Tado App).",
+ "description": "Fallback mode lets you choose when to fallback to Smart Schedule from your manual zone overlay. (NEXT_TIME_BLOCK:= Change at next Smart Schedule change; MANUAL:= Don't change until you cancel; TADO_DEFAULT:= Change based on your setting in the Tado app).",
"data": {
"fallback": "Choose fallback mode."
},
@@ -102,11 +102,11 @@
},
"time_period": {
"name": "Time period",
- "description": "Choose this or Overlay. Set the time period for the change if you want to be specific. Alternatively use Overlay."
+ "description": "Choose this or 'Overlay'. Set the time period for the change if you want to be specific."
},
"requested_overlay": {
"name": "Overlay",
- "description": "Choose this or Time Period. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on tado app setting."
+ "description": "Choose this or 'Time period'. Allows you to choose an overlay. MANUAL:=Overlay until user removes; NEXT_TIME_BLOCK:=Overlay until next timeblock; TADO_DEFAULT:=Overlay based on Tado app setting."
}
}
},
@@ -151,8 +151,8 @@
},
"issues": {
"water_heater_fallback": {
- "title": "Tado Water Heater entities now support fallback options",
- "description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options. Otherwise, please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)."
+ "title": "Tado water heater entities now support fallback options",
+ "description": "Due to added support for water heaters entities, these entities may use a different overlay. Please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)."
}
}
}
diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json
index aecc6a04af3..330745316d7 100644
--- a/homeassistant/components/tesla_fleet/manifest.json
+++ b/homeassistant/components/tesla_fleet/manifest.json
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
- "requirements": ["tesla-fleet-api==0.9.2"]
+ "requirements": ["tesla-fleet-api==0.9.8"]
}
diff --git a/homeassistant/components/tesla_fleet/sensor.py b/homeassistant/components/tesla_fleet/sensor.py
index 3e05e7e723b..c1d38bf85c5 100644
--- a/homeassistant/components/tesla_fleet/sensor.py
+++ b/homeassistant/components/tesla_fleet/sensor.py
@@ -303,8 +303,8 @@ VEHICLE_TIME_DESCRIPTIONS: tuple[TeslaFleetTimeEntityDescription, ...] = (
),
)
-ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
- SensorEntityDescription(
+ENERGY_LIVE_DESCRIPTIONS: tuple[TeslaFleetSensorEntityDescription, ...] = (
+ TeslaFleetSensorEntityDescription(
key="solar_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -312,7 +312,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="energy_left",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
@@ -321,7 +321,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
device_class=SensorDeviceClass.ENERGY_STORAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="total_pack_energy",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
@@ -331,14 +331,15 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="percentage_charged",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
suggested_display_precision=2,
+ value_fn=lambda value: value or 0,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="battery_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -346,7 +347,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="load_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -354,7 +355,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="grid_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -362,7 +363,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="grid_services_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -370,7 +371,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
suggested_display_precision=2,
device_class=SensorDeviceClass.POWER,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="generator_power",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
@@ -379,7 +380,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
device_class=SensorDeviceClass.POWER,
entity_registry_enabled_default=False,
),
- SensorEntityDescription(
+ TeslaFleetSensorEntityDescription(
key="island_status",
options=[
"island_status_unknown",
@@ -550,12 +551,12 @@ class TeslaFleetVehicleTimeSensorEntity(TeslaFleetVehicleEntity, SensorEntity):
class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity):
"""Base class for Tesla Fleet energy site metric sensors."""
- entity_description: SensorEntityDescription
+ entity_description: TeslaFleetSensorEntityDescription
def __init__(
self,
data: TeslaFleetEnergyData,
- description: SensorEntityDescription,
+ description: TeslaFleetSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
self.entity_description = description
@@ -563,7 +564,7 @@ class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity)
def _async_update_attrs(self) -> None:
"""Update the attributes of the sensor."""
- self._attr_native_value = self._value
+ self._attr_native_value = self.entity_description.value_fn(self._value)
class TeslaFleetEnergyHistorySensorEntity(TeslaFleetEnergyHistoryEntity, SensorEntity):
diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json
index 5774d4da228..bfa0d831a16 100644
--- a/homeassistant/components/teslemetry/manifest.json
+++ b/homeassistant/components/teslemetry/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
- "requirements": ["tesla-fleet-api==0.9.2", "teslemetry-stream==0.6.10"]
+ "requirements": ["tesla-fleet-api==0.9.8", "teslemetry-stream==0.6.10"]
}
diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py
index baf1d80ac6c..d2e90a4f5c9 100644
--- a/homeassistant/components/teslemetry/select.py
+++ b/homeassistant/components/teslemetry/select.py
@@ -2,18 +2,27 @@
from __future__ import annotations
-from collections.abc import Callable
+from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from itertools import chain
+from typing import Any
+from tesla_fleet_api import VehicleSpecific
from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode, Scope, Seat
+from teslemetry_stream import TeslemetryStreamVehicle
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
+from homeassistant.helpers.restore_state import RestoreEntity
from . import TeslemetryConfigEntry
-from .entity import TeslemetryEnergyInfoEntity, TeslemetryVehicleEntity
+from .entity import (
+ TeslemetryEnergyInfoEntity,
+ TeslemetryRootEntity,
+ TeslemetryVehicleEntity,
+ TeslemetryVehicleStreamEntity,
+)
from .helpers import handle_command, handle_vehicle_command
from .models import TeslemetryEnergyData, TeslemetryVehicleData
@@ -24,53 +33,136 @@ HIGH = "high"
PARALLEL_UPDATES = 0
+LEVEL = {OFF: 0, LOW: 1, MEDIUM: 2, HIGH: 3}
+
@dataclass(frozen=True, kw_only=True)
-class SeatHeaterDescription(SelectEntityDescription):
+class TeslemetrySelectEntityDescription(SelectEntityDescription):
"""Seat Heater entity description."""
- position: Seat
- available_fn: Callable[[TeslemetrySeatHeaterSelectEntity], bool] = lambda _: True
+ select_fn: Callable[[VehicleSpecific, int], Awaitable[Any]]
+ supported_fn: Callable[[dict], bool] = lambda _: True
+ streaming_listener: (
+ Callable[
+ [TeslemetryStreamVehicle, Callable[[int | None], None]],
+ Callable[[], None],
+ ]
+ | None
+ ) = None
+ options: list[str]
-SEAT_HEATER_DESCRIPTIONS: tuple[SeatHeaterDescription, ...] = (
- SeatHeaterDescription(
+VEHICLE_DESCRIPTIONS: tuple[TeslemetrySelectEntityDescription, ...] = (
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_left",
- position=Seat.FRONT_LEFT,
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.FRONT_LEFT, level
+ ),
+ streaming_listener=lambda x, y: x.listen_SeatHeaterLeft(y),
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_right",
- position=Seat.FRONT_RIGHT,
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.FRONT_RIGHT, level
+ ),
+ streaming_listener=lambda x, y: x.listen_SeatHeaterRight(y),
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_rear_left",
- position=Seat.REAR_LEFT,
- available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0,
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.REAR_LEFT, level
+ ),
+ supported_fn=lambda data: data.get("vehicle_config_rear_seat_heaters") != 0,
+ streaming_listener=lambda x, y: x.listen_SeatHeaterRearLeft(y),
entity_registry_enabled_default=False,
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_rear_center",
- position=Seat.REAR_CENTER,
- available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0,
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.REAR_CENTER, level
+ ),
+ supported_fn=lambda data: data.get("vehicle_config_rear_seat_heaters") != 0,
+ streaming_listener=lambda x, y: x.listen_SeatHeaterRearCenter(y),
entity_registry_enabled_default=False,
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_rear_right",
- position=Seat.REAR_RIGHT,
- available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0,
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.REAR_RIGHT, level
+ ),
+ supported_fn=lambda data: data.get("vehicle_config_rear_seat_heaters") != 0,
+ streaming_listener=lambda x, y: x.listen_SeatHeaterRearRight(y),
entity_registry_enabled_default=False,
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_third_row_left",
- position=Seat.THIRD_LEFT,
- available_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None",
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.THIRD_LEFT, level
+ ),
+ supported_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None",
entity_registry_enabled_default=False,
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
),
- SeatHeaterDescription(
+ TeslemetrySelectEntityDescription(
key="climate_state_seat_heater_third_row_right",
- position=Seat.THIRD_RIGHT,
- available_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None",
+ select_fn=lambda api, level: api.remote_seat_heater_request(
+ Seat.THIRD_RIGHT, level
+ ),
+ supported_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None",
entity_registry_enabled_default=False,
+ options=[
+ OFF,
+ LOW,
+ MEDIUM,
+ HIGH,
+ ],
+ ),
+ TeslemetrySelectEntityDescription(
+ key="climate_state_steering_wheel_heat_level",
+ select_fn=lambda api, level: api.remote_steering_wheel_heat_level_request(
+ level
+ ),
+ streaming_listener=lambda x, y: x.listen_HvacSteeringWheelHeatLevel(y),
+ options=[
+ OFF,
+ LOW,
+ HIGH,
+ ],
),
)
@@ -85,17 +177,18 @@ async def async_setup_entry(
async_add_entities(
chain(
(
- TeslemetrySeatHeaterSelectEntity(
+ TeslemetryPollingSelectEntity(
vehicle, description, entry.runtime_data.scopes
)
- for description in SEAT_HEATER_DESCRIPTIONS
+ if vehicle.api.pre2021
+ or vehicle.firmware < "2024.26"
+ or description.streaming_listener is None
+ else TeslemetryStreamingSelectEntity(
+ vehicle, description, entry.runtime_data.scopes
+ )
+ for description in VEHICLE_DESCRIPTIONS
for vehicle in entry.runtime_data.vehicles
- if description.key in vehicle.coordinator.data
- ),
- (
- TeslemetryWheelHeaterSelectEntity(vehicle, entry.runtime_data.scopes)
- for vehicle in entry.runtime_data.vehicles
- if vehicle.coordinator.data.get("climate_state_steering_wheel_heater")
+ if description.supported_fn(vehicle.coordinator.data)
),
(
TeslemetryOperationSelectEntity(energysite, entry.runtime_data.scopes)
@@ -112,22 +205,31 @@ async def async_setup_entry(
)
-class TeslemetrySeatHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity):
- """Select entity for vehicle seat heater."""
+class TeslemetrySelectEntity(TeslemetryRootEntity, SelectEntity):
+ """Parent vehicle select entity class."""
- entity_description: SeatHeaterDescription
+ entity_description: TeslemetrySelectEntityDescription
+ _climate: bool = False
- _attr_options = [
- OFF,
- LOW,
- MEDIUM,
- HIGH,
- ]
+ async def async_select_option(self, option: str) -> None:
+ """Change the selected option."""
+ self.raise_for_scope(Scope.VEHICLE_CMDS)
+ level = LEVEL[option]
+ # AC must be on to turn on heaters
+ if level and not self._climate:
+ await handle_vehicle_command(self.api.auto_conditioning_start())
+ await handle_vehicle_command(self.entity_description.select_fn(self.api, level))
+ self._attr_current_option = option
+ self.async_write_ha_state()
+
+
+class TeslemetryPollingSelectEntity(TeslemetryVehicleEntity, TeslemetrySelectEntity):
+ """Base polling vehicle select entity class."""
def __init__(
self,
data: TeslemetryVehicleData,
- description: SeatHeaterDescription,
+ description: TeslemetrySelectEntityDescription,
scopes: list[Scope],
) -> None:
"""Initialize the vehicle seat select entity."""
@@ -137,72 +239,63 @@ class TeslemetrySeatHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity):
def _async_update_attrs(self) -> None:
"""Handle updated data from the coordinator."""
- self._attr_available = self.entity_description.available_fn(self)
- value = self._value
- if not isinstance(value, int):
+ self._climate = bool(self.get("climate_state_is_climate_on"))
+ if not isinstance(self._value, int):
self._attr_current_option = None
else:
- self._attr_current_option = self._attr_options[value]
-
- async def async_select_option(self, option: str) -> None:
- """Change the selected option."""
- self.raise_for_scope(Scope.VEHICLE_CMDS)
- await self.wake_up_if_asleep()
- level = self._attr_options.index(option)
- # AC must be on to turn on seat heater
- if level and not self.get("climate_state_is_climate_on"):
- await handle_vehicle_command(self.api.auto_conditioning_start())
- await handle_vehicle_command(
- self.api.remote_seat_heater_request(self.entity_description.position, level)
- )
- self._attr_current_option = option
- self.async_write_ha_state()
+ self._attr_current_option = self.entity_description.options[self._value]
-class TeslemetryWheelHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity):
- """Select entity for vehicle steering wheel heater."""
-
- _attr_options = [
- OFF,
- LOW,
- HIGH,
- ]
+class TeslemetryStreamingSelectEntity(
+ TeslemetryVehicleStreamEntity, TeslemetrySelectEntity, RestoreEntity
+):
+ """Base streaming vehicle select entity class."""
def __init__(
self,
data: TeslemetryVehicleData,
+ description: TeslemetrySelectEntityDescription,
scopes: list[Scope],
) -> None:
- """Initialize the vehicle steering wheel select entity."""
+ """Initialize the vehicle seat select entity."""
+ self.entity_description = description
self.scoped = Scope.VEHICLE_CMDS in scopes
- super().__init__(
- data,
- "climate_state_steering_wheel_heat_level",
+ self._attr_current_option = None
+ super().__init__(data, description.key)
+
+ async def async_added_to_hass(self) -> None:
+ """Handle entity which will be added."""
+ await super().async_added_to_hass()
+
+ # Restore state
+ if (state := await self.async_get_last_state()) is not None:
+ if state.state in self.entity_description.options:
+ self._attr_current_option = state.state
+
+ # Listen for streaming data
+ assert self.entity_description.streaming_listener is not None
+ self.async_on_remove(
+ self.entity_description.streaming_listener(
+ self.vehicle.stream_vehicle, self._value_callback
+ )
)
- def _async_update_attrs(self) -> None:
- """Handle updated data from the coordinator."""
+ self.async_on_remove(
+ self.vehicle.stream_vehicle.listen_HvacACEnabled(self._climate_callback)
+ )
- value = self._value
- if not isinstance(value, int):
+ def _value_callback(self, value: int | None) -> None:
+ """Update the value of the entity."""
+ if value is None:
self._attr_current_option = None
else:
- self._attr_current_option = self._attr_options[value]
-
- async def async_select_option(self, option: str) -> None:
- """Change the selected option."""
- self.raise_for_scope(Scope.VEHICLE_CMDS)
- await self.wake_up_if_asleep()
- level = self._attr_options.index(option)
- # AC must be on to turn on steering wheel heater
- if level and not self.get("climate_state_is_climate_on"):
- await handle_vehicle_command(self.api.auto_conditioning_start())
- await handle_vehicle_command(
- self.api.remote_steering_wheel_heat_level_request(level)
- )
- self._attr_current_option = option
+ self._attr_current_option = self.entity_description.options[value]
self.async_write_ha_state()
+ def _climate_callback(self, value: bool | None) -> None:
+ """Update the value of the entity."""
+ self._climate = bool(value)
+
class TeslemetryOperationSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity):
"""Select entity for operation mode select entities."""
diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json
index 8f7c9890664..ef4d366c779 100644
--- a/homeassistant/components/tessie/manifest.json
+++ b/homeassistant/components/tessie/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/tessie",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
- "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.2"]
+ "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.8"]
}
diff --git a/homeassistant/components/tessie/sensor.py b/homeassistant/components/tessie/sensor.py
index 7f09cef2acd..323fa76ef1f 100644
--- a/homeassistant/components/tessie/sensor.py
+++ b/homeassistant/components/tessie/sensor.py
@@ -258,6 +258,7 @@ DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
),
)
+
ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
TessieSensorEntityDescription(
key="solar_power",
@@ -292,6 +293,7 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[TessieSensorEntityDescription, ...] = (
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
suggested_display_precision=2,
+ value_fn=lambda value: value or 0,
),
TessieSensorEntityDescription(
key="battery_power",
diff --git a/homeassistant/components/thermopro/manifest.json b/homeassistant/components/thermopro/manifest.json
index 2c066d785ca..6027e4bc99c 100644
--- a/homeassistant/components/thermopro/manifest.json
+++ b/homeassistant/components/thermopro/manifest.json
@@ -24,5 +24,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/thermopro",
"iot_class": "local_push",
- "requirements": ["thermopro-ble==0.10.1"]
+ "requirements": ["thermopro-ble==0.11.0"]
}
diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py
index 94581439ae9..8c61394d300 100644
--- a/homeassistant/components/todoist/calendar.py
+++ b/homeassistant/components/todoist/calendar.py
@@ -541,9 +541,8 @@ class TodoistProjectData:
return None
# All task Labels (optional parameter).
- task[LABELS] = [
- label.name for label in self._labels if label.name in data.labels
- ]
+ labels = data.labels or []
+ task[LABELS] = [label.name for label in self._labels if label.name in labels]
if self._label_whitelist and (
not any(label in task[LABELS] for label in self._label_whitelist)
):
diff --git a/homeassistant/components/todoist/manifest.json b/homeassistant/components/todoist/manifest.json
index 72d76108353..791f5642aad 100644
--- a/homeassistant/components/todoist/manifest.json
+++ b/homeassistant/components/todoist/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/todoist",
"iot_class": "cloud_polling",
"loggers": ["todoist"],
- "requirements": ["todoist-api-python==2.1.2"]
+ "requirements": ["todoist-api-python==2.1.7"]
}
diff --git a/homeassistant/components/tolo/manifest.json b/homeassistant/components/tolo/manifest.json
index 14125a857f6..613fc810683 100644
--- a/homeassistant/components/tolo/manifest.json
+++ b/homeassistant/components/tolo/manifest.json
@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/tolo",
"iot_class": "local_polling",
"loggers": ["tololib"],
- "requirements": ["tololib==1.1.0"]
+ "requirements": ["tololib==1.2.2"]
}
diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py
index 38aab26cf8b..9b21ba775a9 100644
--- a/homeassistant/components/tplink/sensor.py
+++ b/homeassistant/components/tplink/sensor.py
@@ -135,13 +135,17 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
TPLinkSensorEntityDescription(
key="clean_area",
device_class=SensorDeviceClass.AREA,
+ state_class=SensorStateClass.MEASUREMENT,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="clean_progress",
+ state_class=SensorStateClass.MEASUREMENT,
),
TPLinkSensorEntityDescription(
key="last_clean_time",
device_class=SensorDeviceClass.DURATION,
+ state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfTime.SECONDS,
suggested_unit_of_measurement=UnitOfTime.MINUTES,
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
@@ -155,20 +159,26 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.TIMESTAMP,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="total_clean_time",
device_class=SensorDeviceClass.DURATION,
+ state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfTime.SECONDS,
suggested_unit_of_measurement=UnitOfTime.MINUTES,
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="total_clean_area",
device_class=SensorDeviceClass.AREA,
+ state_class=SensorStateClass.TOTAL_INCREASING,
),
TPLinkSensorEntityDescription(
key="total_clean_count",
+ state_class=SensorStateClass.TOTAL_INCREASING,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="main_brush_remaining",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -176,6 +186,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="main_brush_used",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -183,6 +194,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="side_brush_remaining",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -190,6 +202,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="side_brush_used",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -197,6 +210,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="filter_remaining",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -204,6 +218,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="filter_used",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -211,6 +226,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="sensor_remaining",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -218,6 +234,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="sensor_used",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -225,6 +242,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="charging_contacts_remaining",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
@@ -232,6 +250,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = (
convert_fn=_TOTAL_SECONDS_METHOD_CALLER,
),
TPLinkSensorEntityDescription(
+ entity_registry_enabled_default=False,
key="charging_contacts_used",
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.SECONDS,
diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py
index f766c744998..756564c6a03 100644
--- a/homeassistant/components/tuya/sensor.py
+++ b/homeassistant/components/tuya/sensor.py
@@ -45,7 +45,7 @@ class TuyaSensorEntityDescription(SensorEntityDescription):
subkey: str | None = None
-# Commonly used battery sensors, that are re-used in the sensors down below.
+# Commonly used battery sensors, that are reused in the sensors down below.
BATTERY_SENSORS: tuple[TuyaSensorEntityDescription, ...] = (
TuyaSensorEntityDescription(
key=DPCode.BATTERY_PERCENTAGE,
diff --git a/homeassistant/components/ublockout/__init__.py b/homeassistant/components/ublockout/__init__.py
new file mode 100644
index 00000000000..87127e331da
--- /dev/null
+++ b/homeassistant/components/ublockout/__init__.py
@@ -0,0 +1 @@
+"""Virtual integration: Ublockout."""
diff --git a/homeassistant/components/ublockout/manifest.json b/homeassistant/components/ublockout/manifest.json
new file mode 100644
index 00000000000..d5ef46b8ad2
--- /dev/null
+++ b/homeassistant/components/ublockout/manifest.json
@@ -0,0 +1,6 @@
+{
+ "domain": "ublockout",
+ "name": "Ublockout",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+}
diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json
index 69c7f8b205b..a4bb6d20841 100644
--- a/homeassistant/components/unifiprotect/manifest.json
+++ b/homeassistant/components/unifiprotect/manifest.json
@@ -40,7 +40,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
- "requirements": ["uiprotect==7.5.0", "unifi-discovery==1.2.0"],
+ "requirements": ["uiprotect==7.5.1", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",
diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json
index cde8c88d169..d5a7d615399 100644
--- a/homeassistant/components/unifiprotect/strings.json
+++ b/homeassistant/components/unifiprotect/strings.json
@@ -3,8 +3,8 @@
"flow_title": "{name} ({ip_address})",
"step": {
"user": {
- "title": "UniFi Protect Setup",
- "description": "You will need a local user created in your UniFi OS Console to log in with. Ubiquiti Cloud Users will not work. For more information: {local_user_documentation_url}",
+ "title": "UniFi Protect setup",
+ "description": "You will need a local user created in your UniFi OS Console to log in with. Ubiquiti Cloud users will not work. For more information: {local_user_documentation_url}",
"data": {
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]",
@@ -17,17 +17,17 @@
}
},
"reauth_confirm": {
- "title": "UniFi Protect Reauth",
+ "title": "UniFi Protect reauth",
"data": {
- "host": "IP/Host of UniFi Protect Server",
+ "host": "IP/Host of UniFi Protect server",
"port": "[%key:common::config_flow::data::port%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
}
},
"discovery_confirm": {
- "title": "UniFi Protect Discovered",
- "description": "Do you want to set up {name} ({ip_address})? You will need a local user created in your UniFi OS Console to log in with. Ubiquiti Cloud Users will not work. For more information: {local_user_documentation_url}",
+ "title": "UniFi Protect discovered",
+ "description": "Do you want to set up {name} ({ip_address})? You will need a local user created in your UniFi OS Console to log in with. Ubiquiti Cloud users will not work. For more information: {local_user_documentation_url}",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
@@ -38,7 +38,7 @@
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"protect_version": "Minimum required version is v1.20.0. Please upgrade UniFi Protect and then retry.",
- "cloud_user": "Ubiquiti Cloud users are not Supported. Please use a Local only user."
+ "cloud_user": "Ubiquiti Cloud users are not supported. Please use a local user instead."
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
@@ -49,12 +49,12 @@
"options": {
"step": {
"init": {
- "title": "UniFi Protect Options",
+ "title": "UniFi Protect options",
"description": "Realtime metrics option should only be enabled if you have enabled the diagnostics sensors and want them updated in realtime. If not enabled, they will only update once every 15 minutes.",
"data": {
"disable_rtsp": "Disable the RTSP stream",
"all_updates": "Realtime metrics (WARNING: Greatly increases CPU usage)",
- "override_connection_host": "Override Connection Host",
+ "override_connection_host": "Override connection host",
"max_media": "Max number of event to load for Media Browser (increases RAM usage)",
"allow_ea_channel": "Allow Early Access versions of Protect (WARNING: Will mark your integration as unsupported)"
}
@@ -68,7 +68,7 @@
"step": {
"start": {
"title": "UniFi Protect Early Access enabled",
- "description": "You are either running an Early Access version of UniFi Protect (v{version}) or opt-ed into a release channel that is not the Official Release Channel.\n\nAs these Early Access releases may not be tested yet, using it may cause the UniFi Protect integration to behave unexpectedly. [Read more about Early Access and Home Assistant]({learn_more}).\n\nSubmit to dismiss this message."
+ "description": "You are either running an Early Access version of UniFi Protect (v{version}) or opt-ed into a release channel that is not the official release channel.\n\nAs these Early Access releases may not be tested yet, using it may cause the UniFi Protect integration to behave unexpectedly. [Read more about Early Access and Home Assistant]({learn_more}).\n\nSubmit to dismiss this message."
},
"confirm": {
"title": "[%key:component::unifiprotect::issues::ea_channel_warning::fix_flow::step::start::title%]",
@@ -123,8 +123,8 @@
}
},
"deprecate_hdr_switch": {
- "title": "HDR Mode Switch Deprecated",
- "description": "UniFi Protect v3 added a new state for HDR (auto). As a result, the HDR Mode Switch has been replaced with an HDR Mode Select, and it is deprecated.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly."
+ "title": "HDR Mode switch deprecated",
+ "description": "UniFi Protect v3 added a new state for HDR (auto). As a result, the HDR Mode switch has been replaced with an HDR Mode select, and it is deprecated.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly."
}
},
"entity": {
@@ -171,22 +171,22 @@
},
"services": {
"add_doorbell_text": {
- "name": "Add custom doorbell text",
+ "name": "Add doorbell text",
"description": "Adds a new custom message for doorbells.",
"fields": {
"device_id": {
"name": "UniFi Protect NVR",
- "description": "Any device from the UniFi Protect instance you want to change. In case you have multiple Protect Instances."
+ "description": "Any device from the UniFi Protect instance you want to change. In case you have multiple Protect instances."
},
"message": {
"name": "Custom message",
- "description": "New custom message to add for doorbells. Must be less than 30 characters."
+ "description": "New custom message to add. Must be less than 30 characters."
}
}
},
"remove_doorbell_text": {
- "name": "Remove custom doorbell text",
- "description": "Removes an existing message for doorbells.",
+ "name": "Remove doorbell text",
+ "description": "Removes an existing custom message for doorbells.",
"fields": {
"device_id": {
"name": "[%key:component::unifiprotect::services::add_doorbell_text::fields::device_id::name%]",
@@ -194,13 +194,13 @@
},
"message": {
"name": "[%key:component::unifiprotect::services::add_doorbell_text::fields::message::name%]",
- "description": "Existing custom message to remove for doorbells."
+ "description": "Existing custom message to remove."
}
}
},
"set_chime_paired_doorbells": {
"name": "Set chime paired doorbells",
- "description": "Use to set the paired doorbell(s) with a smart chime.",
+ "description": "Pairs doorbell(s) with a smart chime.",
"fields": {
"device_id": {
"name": "Chime",
@@ -213,22 +213,22 @@
}
},
"remove_privacy_zone": {
- "name": "Remove camera privacy zone",
- "description": "Use to remove a privacy zone from a camera.",
+ "name": "Remove privacy zone",
+ "description": "Removes a privacy zone from a camera.",
"fields": {
"device_id": {
"name": "Camera",
- "description": "Camera you want to remove privacy zone from."
+ "description": "Camera you want to remove the privacy zone from."
},
"name": {
- "name": "Privacy Zone Name",
+ "name": "Privacy zone",
"description": "The name of the zone to remove."
}
}
},
"get_user_keyring_info": {
- "name": "Retrieve Keyring Details for Users",
- "description": "Fetch a detailed list of users with NFC and fingerprint associations for automations.",
+ "name": "Get user keyring info",
+ "description": "Fetches a detailed list of users with NFC and fingerprint associations for automations.",
"fields": {
"device_id": {
"name": "UniFi Protect NVR",
diff --git a/homeassistant/components/upb/__init__.py b/homeassistant/components/upb/__init__.py
index c9f3a2df105..ebfc8eaeece 100644
--- a/homeassistant/components/upb/__init__.py
+++ b/homeassistant/components/upb/__init__.py
@@ -27,6 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
file = config_entry.data[CONF_FILE_PATH]
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file})
+ await upb.load_upstart_file()
await upb.async_connect()
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = {"upb": upb}
diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py
index 788a0336d73..af1ee7d5ab0 100644
--- a/homeassistant/components/upb/config_flow.py
+++ b/homeassistant/components/upb/config_flow.py
@@ -40,8 +40,9 @@ async def _validate_input(data):
url = _make_url_from_data(data)
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path})
-
- await upb.async_connect(_connected_callback)
+ upb.add_handler("connected", _connected_callback)
+ await upb.load_upstart_file()
+ await upb.async_connect()
if not upb.config_ok:
_LOGGER.error("Missing or invalid UPB file: %s", file_path)
diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json
index 1e61747b3f1..e5da4c4d621 100644
--- a/homeassistant/components/upb/manifest.json
+++ b/homeassistant/components/upb/manifest.json
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/upb",
"iot_class": "local_push",
"loggers": ["upb_lib"],
- "requirements": ["upb-lib==0.5.9"]
+ "requirements": ["upb-lib==0.6.0"]
}
diff --git a/homeassistant/components/upb/services.yaml b/homeassistant/components/upb/services.yaml
index cf415705d72..985ce11c436 100644
--- a/homeassistant/components/upb/services.yaml
+++ b/homeassistant/components/upb/services.yaml
@@ -49,7 +49,7 @@ link_deactivate:
target:
entity:
integration: upb
- domain: light
+ domain: scene
link_goto:
target:
diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py
index 27e626faeac..1c55d932425 100644
--- a/homeassistant/components/vesync/__init__.py
+++ b/homeassistant/components/vesync/__init__.py
@@ -7,6 +7,7 @@ from pyvesync import VeSync
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant, ServiceCall
+from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .common import async_generate_device_list
@@ -91,3 +92,37 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data.pop(DOMAIN)
return unload_ok
+
+
+async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
+ """Migrate old entry."""
+ _LOGGER.debug(
+ "Migrating VeSync config entry: %s minor version: %s",
+ config_entry.version,
+ config_entry.minor_version,
+ )
+ if config_entry.minor_version == 1:
+ # Migrate switch/outlets entity to a new unique ID
+ _LOGGER.debug("Migrating VeSync config entry from version 1 to version 2")
+ entity_registry = er.async_get(hass)
+ registry_entries = er.async_entries_for_config_entry(
+ entity_registry, config_entry.entry_id
+ )
+ for reg_entry in registry_entries:
+ if "-" not in reg_entry.unique_id and reg_entry.entity_id.startswith(
+ Platform.SWITCH
+ ):
+ _LOGGER.debug(
+ "Migrating switch/outlet entity from unique_id: %s to unique_id: %s",
+ reg_entry.unique_id,
+ reg_entry.unique_id + "-device_status",
+ )
+ entity_registry.async_update_entity(
+ reg_entry.entity_id,
+ new_unique_id=reg_entry.unique_id + "-device_status",
+ )
+ else:
+ _LOGGER.debug("Skipping entity with unique_id: %s", reg_entry.unique_id)
+ hass.config_entries.async_update_entry(config_entry, minor_version=2)
+
+ return True
diff --git a/homeassistant/components/vesync/common.py b/homeassistant/components/vesync/common.py
index e2f4e1db2e4..f817c1d0714 100644
--- a/homeassistant/components/vesync/common.py
+++ b/homeassistant/components/vesync/common.py
@@ -4,6 +4,8 @@ import logging
from pyvesync import VeSync
from pyvesync.vesyncbasedevice import VeSyncBaseDevice
+from pyvesync.vesyncoutlet import VeSyncOutlet
+from pyvesync.vesyncswitch import VeSyncWallSwitch
from homeassistant.core import HomeAssistant
@@ -54,3 +56,15 @@ def is_humidifier(device: VeSyncBaseDevice) -> bool:
"""Check if the device represents a humidifier."""
return isinstance(device, VeSyncHumidifierDevice)
+
+
+def is_outlet(device: VeSyncBaseDevice) -> bool:
+ """Check if the device represents an outlet."""
+
+ return isinstance(device, VeSyncOutlet)
+
+
+def is_wall_switch(device: VeSyncBaseDevice) -> bool:
+ """Check if the device represents a wall switch, note this doessn't include dimming switches."""
+
+ return isinstance(device, VeSyncWallSwitch)
diff --git a/homeassistant/components/vesync/config_flow.py b/homeassistant/components/vesync/config_flow.py
index e19c46e5490..07543440e91 100644
--- a/homeassistant/components/vesync/config_flow.py
+++ b/homeassistant/components/vesync/config_flow.py
@@ -24,6 +24,7 @@ class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
+ MINOR_VERSION = 2
@callback
def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult:
diff --git a/homeassistant/components/vesync/humidifier.py b/homeassistant/components/vesync/humidifier.py
index 40ea015f4d8..5afe7360673 100644
--- a/homeassistant/components/vesync/humidifier.py
+++ b/homeassistant/components/vesync/humidifier.py
@@ -157,11 +157,15 @@ class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity):
"""Set the mode of the device."""
if mode not in self.available_modes:
raise HomeAssistantError(
- "{mode} is not one of the valid available modes: {self.available_modes}"
+ f"{mode} is not one of the valid available modes: {self.available_modes}"
)
if not self.device.set_humidity_mode(self._get_vs_mode(mode)):
raise HomeAssistantError(f"An error occurred while setting mode {mode}.")
+ if mode == MODE_SLEEP:
+ # We successfully changed the mode. Consider it a success even if display operation fails.
+ self.device.set_display(False)
+
# Changing mode while humidifier is off actually turns it on, as per the app. But
# the library does not seem to update the device_status. It is also possible that
# other attributes get updated. Scheduling a forced refresh to get device status.
diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json
index cdb5ed96652..b3697844f19 100644
--- a/homeassistant/components/vesync/manifest.json
+++ b/homeassistant/components/vesync/manifest.json
@@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/vesync",
"iot_class": "cloud_polling",
"loggers": ["pyvesync"],
- "requirements": ["pyvesync==2.1.16"]
+ "requirements": ["pyvesync==2.1.17"]
}
diff --git a/homeassistant/components/vesync/switch.py b/homeassistant/components/vesync/switch.py
index ef8e6c6051f..3d2dc8a8e96 100644
--- a/homeassistant/components/vesync/switch.py
+++ b/homeassistant/components/vesync/switch.py
@@ -1,29 +1,59 @@
"""Support for VeSync switches."""
+from collections.abc import Callable
+from dataclasses import dataclass
import logging
-from typing import Any
+from typing import Any, Final
from pyvesync.vesyncbasedevice import VeSyncBaseDevice
-from homeassistant.components.switch import SwitchEntity
+from homeassistant.components.switch import (
+ SwitchDeviceClass,
+ SwitchEntity,
+ SwitchEntityDescription,
+)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
-from .const import DEV_TYPE_TO_HA, DOMAIN, VS_COORDINATOR, VS_DEVICES, VS_DISCOVERY
+from .common import is_outlet, is_wall_switch
+from .const import DOMAIN, VS_COORDINATOR, VS_DEVICES, VS_DISCOVERY
from .coordinator import VeSyncDataCoordinator
from .entity import VeSyncBaseEntity
_LOGGER = logging.getLogger(__name__)
+@dataclass(frozen=True, kw_only=True)
+class VeSyncSwitchEntityDescription(SwitchEntityDescription):
+ """A class that describes custom switch entities."""
+
+ is_on: Callable[[VeSyncBaseDevice], bool]
+ exists_fn: Callable[[VeSyncBaseDevice], bool]
+ on_fn: Callable[[VeSyncBaseDevice], bool]
+ off_fn: Callable[[VeSyncBaseDevice], bool]
+
+
+SENSOR_DESCRIPTIONS: Final[tuple[VeSyncSwitchEntityDescription, ...]] = (
+ VeSyncSwitchEntityDescription(
+ key="device_status",
+ is_on=lambda device: device.device_status == "on",
+ # Other types of wall switches support dimming. Those use light.py platform.
+ exists_fn=lambda device: is_wall_switch(device) or is_outlet(device),
+ name=None,
+ on_fn=lambda device: device.turn_on(),
+ off_fn=lambda device: device.turn_off(),
+ ),
+)
+
+
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
- """Set up switches."""
+ """Set up switch platform."""
coordinator = hass.data[DOMAIN][VS_COORDINATOR]
@@ -45,53 +75,46 @@ def _setup_entities(
async_add_entities,
coordinator: VeSyncDataCoordinator,
):
- """Check if device is a switch and add entity."""
- entities: list[VeSyncBaseSwitch] = []
- for dev in devices:
- if DEV_TYPE_TO_HA.get(dev.device_type) == "outlet":
- entities.append(VeSyncSwitchHA(dev, coordinator))
- elif DEV_TYPE_TO_HA.get(dev.device_type) == "switch":
- entities.append(VeSyncLightSwitch(dev, coordinator))
-
- async_add_entities(entities, update_before_add=True)
+ """Check if device is online and add entity."""
+ async_add_entities(
+ VeSyncSwitchEntity(dev, description, coordinator)
+ for dev in devices
+ for description in SENSOR_DESCRIPTIONS
+ if description.exists_fn(dev)
+ )
-class VeSyncBaseSwitch(VeSyncBaseEntity, SwitchEntity):
- """Base class for VeSync switch Device Representations."""
+class VeSyncSwitchEntity(SwitchEntity, VeSyncBaseEntity):
+ """VeSync switch entity class."""
- _attr_name = None
+ entity_description: VeSyncSwitchEntityDescription
- def turn_on(self, **kwargs: Any) -> None:
- """Turn the device on."""
- self.device.turn_on()
+ def __init__(
+ self,
+ device: VeSyncBaseDevice,
+ description: VeSyncSwitchEntityDescription,
+ coordinator: VeSyncDataCoordinator,
+ ) -> None:
+ """Initialize the sensor."""
+ super().__init__(device, coordinator)
+ self.entity_description = description
+ self._attr_unique_id = f"{super().unique_id}-{description.key}"
+ if is_outlet(self.device):
+ self._attr_device_class = SwitchDeviceClass.OUTLET
+ elif is_wall_switch(self.device):
+ self._attr_device_class = SwitchDeviceClass.SWITCH
@property
- def is_on(self) -> bool:
- """Return True if device is on."""
- return self.device.device_status == "on"
+ def is_on(self) -> bool | None:
+ """Return the entity value to represent the entity state."""
+ return self.entity_description.is_on(self.device)
def turn_off(self, **kwargs: Any) -> None:
- """Turn the device off."""
- self.device.turn_off()
+ """Turn the entity off."""
+ if self.entity_description.off_fn(self.device):
+ self.schedule_update_ha_state()
-
-class VeSyncSwitchHA(VeSyncBaseSwitch, SwitchEntity):
- """Representation of a VeSync switch."""
-
- def __init__(
- self, plug: VeSyncBaseDevice, coordinator: VeSyncDataCoordinator
- ) -> None:
- """Initialize the VeSync switch device."""
- super().__init__(plug, coordinator)
- self.smartplug = plug
-
-
-class VeSyncLightSwitch(VeSyncBaseSwitch, SwitchEntity):
- """Handle representation of VeSync Light Switch."""
-
- def __init__(
- self, switch: VeSyncBaseDevice, coordinator: VeSyncDataCoordinator
- ) -> None:
- """Initialize Light Switch device class."""
- super().__init__(switch, coordinator)
- self.switch = switch
+ def turn_on(self, **kwargs: Any) -> None:
+ """Turn the entity on."""
+ if self.entity_description.on_fn(self.device):
+ self.schedule_update_ha_state()
diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py
index 61a5abce942..9d216404156 100644
--- a/homeassistant/components/vicare/binary_sensor.py
+++ b/homeassistant/components/vicare/binary_sensor.py
@@ -106,6 +106,12 @@ GLOBAL_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = (
device_class=BinarySensorDeviceClass.RUNNING,
value_getter=lambda api: api.getDomesticHotWaterPumpActive(),
),
+ ViCareBinarySensorEntityDescription(
+ key="one_time_charge",
+ translation_key="one_time_charge",
+ device_class=BinarySensorDeviceClass.RUNNING,
+ value_getter=lambda api: api.getOneTimeCharge(),
+ ),
)
diff --git a/homeassistant/components/vicare/icons.json b/homeassistant/components/vicare/icons.json
index 52148b1fa32..c54be7af0d5 100644
--- a/homeassistant/components/vicare/icons.json
+++ b/homeassistant/components/vicare/icons.json
@@ -18,6 +18,9 @@
},
"domestic_hot_water_pump": {
"default": "mdi:pump"
+ },
+ "one_time_charge": {
+ "default": "mdi:shower-head"
}
},
"button": {
diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json
index 26ca0f5a264..50eeaf038e0 100644
--- a/homeassistant/components/vicare/strings.json
+++ b/homeassistant/components/vicare/strings.json
@@ -63,6 +63,9 @@
},
"domestic_hot_water_pump": {
"name": "DHW pump"
+ },
+ "one_time_charge": {
+ "name": "One-time charge"
}
},
"button": {
diff --git a/homeassistant/components/vodafone_station/__init__.py b/homeassistant/components/vodafone_station/__init__.py
index b4c44ea9130..871afe09a2e 100644
--- a/homeassistant/components/vodafone_station/__init__.py
+++ b/homeassistant/components/vodafone_station/__init__.py
@@ -17,7 +17,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.data[CONF_HOST],
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
- entry.unique_id,
+ entry,
)
await coordinator.async_config_entry_first_refresh()
diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py
index de794488040..cd640d10cb6 100644
--- a/homeassistant/components/vodafone_station/coordinator.py
+++ b/homeassistant/components/vodafone_station/coordinator.py
@@ -8,13 +8,16 @@ from typing import Any
from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions
from homeassistant.components.device_tracker import DEFAULT_CONSIDER_HOME
+from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
+from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL
+from .helpers import cleanup_device_tracker
CONSIDER_HOME_SECONDS = DEFAULT_CONSIDER_HOME.total_seconds()
@@ -39,13 +42,15 @@ class UpdateCoordinatorDataType:
class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"""Queries router running Vodafone Station firmware."""
+ config_entry: ConfigEntry
+
def __init__(
self,
hass: HomeAssistant,
host: str,
username: str,
password: str,
- config_entry_unique_id: str | None,
+ config_entry: ConfigEntry,
) -> None:
"""Initialize the scanner."""
@@ -53,14 +58,26 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self.api = VodafoneStationSercommApi(host, username, password)
# Last resort as no MAC or S/N can be retrieved via API
- self._id = config_entry_unique_id
+ self._id = config_entry.unique_id
super().__init__(
hass=hass,
logger=_LOGGER,
name=f"{DOMAIN}-{host}-coordinator",
update_interval=timedelta(seconds=SCAN_INTERVAL),
+ config_entry=config_entry,
)
+ device_reg = dr.async_get(self.hass)
+ device_list = dr.async_entries_for_config_entry(
+ device_reg, self.config_entry.entry_id
+ )
+
+ self.previous_devices = {
+ connection[1].upper()
+ for device in device_list
+ for connection in device.connections
+ if connection[0] == dr.CONNECTION_NETWORK_MAC
+ }
def _calculate_update_time_and_consider_home(
self, device: VodafoneStationDevice, utc_point_in_time: datetime
@@ -125,6 +142,18 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
)
for dev_info in (raw_data_devices).values()
}
+ current_devices = set(data_devices)
+ _LOGGER.debug(
+ "Loaded current %s devices: %s", len(current_devices), current_devices
+ )
+ if stale_devices := self.previous_devices - current_devices:
+ _LOGGER.debug(
+ "Found %s stale devices: %s", len(stale_devices), stale_devices
+ )
+ await cleanup_device_tracker(self.hass, self.config_entry, data_devices)
+
+ self.previous_devices = current_devices
+
return UpdateCoordinatorDataType(data_devices, data_sensors)
@property
diff --git a/homeassistant/components/vodafone_station/device_tracker.py b/homeassistant/components/vodafone_station/device_tracker.py
index 3e4d7763bff..4af0b85e003 100644
--- a/homeassistant/components/vodafone_station/device_tracker.py
+++ b/homeassistant/components/vodafone_station/device_tracker.py
@@ -61,6 +61,7 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn
"""Representation of a Vodafone Station device."""
_attr_translation_key = "device_tracker"
+ _attr_has_entity_name = True
mac_address: str
def __init__(
@@ -72,7 +73,9 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn
mac = device_info.device.mac
self._attr_mac_address = mac
self._attr_unique_id = mac
- self._attr_hostname = device_info.device.name or mac.replace(":", "_")
+ self._attr_hostname = self._attr_name = device_info.device.name or mac.replace(
+ ":", "_"
+ )
@property
def _device_info(self) -> VodafoneStationDeviceInfo:
diff --git a/homeassistant/components/vodafone_station/helpers.py b/homeassistant/components/vodafone_station/helpers.py
new file mode 100644
index 00000000000..aa0fda3f6be
--- /dev/null
+++ b/homeassistant/components/vodafone_station/helpers.py
@@ -0,0 +1,72 @@
+"""Vodafone Station helpers."""
+
+from typing import Any
+
+from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers import device_registry as dr, entity_registry as er
+
+from .const import _LOGGER
+
+
+async def cleanup_device_tracker(
+ hass: HomeAssistant, config_entry: ConfigEntry, devices: dict[str, Any]
+) -> None:
+ """Cleanup stale device tracker."""
+ entity_reg: er.EntityRegistry = er.async_get(hass)
+
+ entities_removed: bool = False
+
+ device_hosts_macs: set[str] = set()
+ device_hosts_names: set[str] = set()
+ for mac, device_info in devices.items():
+ device_hosts_macs.add(mac)
+ device_hosts_names.add(device_info.device.name)
+
+ for entry in er.async_entries_for_config_entry(entity_reg, config_entry.entry_id):
+ if entry.domain != DEVICE_TRACKER_DOMAIN:
+ continue
+ entry_name = entry.name or entry.original_name
+ entry_host = entry_name.partition(" ")[0] if entry_name else None
+ entry_mac = entry.unique_id.partition("_")[0]
+
+ # Some devices, mainly routers, allow to change the hostname of the connected devices.
+ # This can lead to entities no longer aligned to the device UI
+ if (
+ entry_host
+ and entry_host in device_hosts_names
+ and entry_mac in device_hosts_macs
+ ):
+ _LOGGER.debug(
+ "Skipping entity %s [mac=%s, host=%s]",
+ entry_name,
+ entry_mac,
+ entry_host,
+ )
+ continue
+ # Entity is removed so that at the next coordinator update
+ # the correct one will be created
+ _LOGGER.info("Removing entity: %s", entry_name)
+ entity_reg.async_remove(entry.entity_id)
+ entities_removed = True
+
+ if entities_removed:
+ _async_remove_empty_devices(hass, entity_reg, config_entry)
+
+
+def _async_remove_empty_devices(
+ hass: HomeAssistant, entity_reg: er.EntityRegistry, config_entry: ConfigEntry
+) -> None:
+ """Remove devices with no entities."""
+
+ device_reg = dr.async_get(hass)
+ device_list = dr.async_entries_for_config_entry(device_reg, config_entry.entry_id)
+ for device_entry in device_list:
+ if not er.async_entries_for_device(
+ entity_reg,
+ device_entry.id,
+ include_disabled_entities=True,
+ ):
+ _LOGGER.info("Removing device: %s", device_entry.name)
+ device_reg.async_remove_device(device_entry.id)
diff --git a/homeassistant/components/webmin/coordinator.py b/homeassistant/components/webmin/coordinator.py
index 45261787e75..261139faf10 100644
--- a/homeassistant/components/webmin/coordinator.py
+++ b/homeassistant/components/webmin/coordinator.py
@@ -22,6 +22,7 @@ from .helpers import get_instance_from_options, get_sorted_mac_addresses
class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""The Webmin data update coordinator."""
+ config_entry: ConfigEntry
mac_address: str
unique_id: str
@@ -29,7 +30,11 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Initialize the Webmin data update coordinator."""
super().__init__(
- hass, logger=LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL
+ hass,
+ logger=LOGGER,
+ config_entry=config_entry,
+ name=DOMAIN,
+ update_interval=DEFAULT_SCAN_INTERVAL,
)
self.instance, base_url = get_instance_from_options(hass, config_entry.options)
@@ -53,7 +58,6 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
(DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses
}
else:
- assert self.config_entry
self.unique_id = self.config_entry.entry_id
async def _async_update_data(self) -> dict[str, Any]:
diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json
index 4b9d072f747..cbb11a06aec 100644
--- a/homeassistant/components/workday/manifest.json
+++ b/homeassistant/components/workday/manifest.json
@@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["holidays"],
"quality_scale": "internal",
- "requirements": ["holidays==0.65"]
+ "requirements": ["holidays==0.66"]
}
diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json
index f1cde31d066..5c8e98b1e6e 100644
--- a/homeassistant/components/yale/manifest.json
+++ b/homeassistant/components/yale/manifest.json
@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/yale",
"iot_class": "cloud_push",
"loggers": ["socketio", "engineio", "yalexs"],
- "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
+ "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
}
diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json
index 15b11719fdb..c44f0fdd1e9 100644
--- a/homeassistant/components/yalexs_ble/manifest.json
+++ b/homeassistant/components/yalexs_ble/manifest.json
@@ -12,5 +12,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
"iot_class": "local_push",
- "requirements": ["yalexs-ble==2.5.6"]
+ "requirements": ["yalexs-ble==2.5.7"]
}
diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json
index cbb092405d7..8ec7612fd73 100644
--- a/homeassistant/components/yolink/strings.json
+++ b/homeassistant/components/yolink/strings.json
@@ -6,7 +6,7 @@
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
- "description": "The yolink integration needs to re-authenticate your account"
+ "description": "The YoLink integration needs to re-authenticate your account"
}
},
"abort": {
@@ -99,11 +99,11 @@
"services": {
"play_on_speaker_hub": {
"name": "Play on SpeakerHub",
- "description": "Convert text to audio play on YoLink SpeakerHub",
+ "description": "Converts text to speech for playback on a YoLink SpeakerHub",
"fields": {
"target_device": {
- "name": "SpeakerHub Device",
- "description": "SpeakerHub Device"
+ "name": "SpeakerHub device",
+ "description": "SpeakerHub device for audio playback."
},
"message": {
"name": "Text message",
@@ -115,7 +115,7 @@
},
"volume": {
"name": "Volume",
- "description": "Override the speaker volume during playback of this message only."
+ "description": "Overrides the speaker volume during playback of this message only."
},
"repeat": {
"name": "Repeat",
diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py
index 3c8a1d40dc2..d0a8e821f8d 100644
--- a/homeassistant/generated/config_flows.py
+++ b/homeassistant/generated/config_flows.py
@@ -289,6 +289,7 @@ FLOWS = {
"inkbird",
"insteon",
"intellifire",
+ "iometer",
"ios",
"iotawatt",
"iotty",
diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py
index b9d51ac1006..3dba5a98f3c 100644
--- a/homeassistant/generated/dhcp.py
+++ b/homeassistant/generated/dhcp.py
@@ -616,6 +616,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [
"hostname": "hub*",
"macaddress": "286D97*",
},
+ {
+ "domain": "smlight",
+ "registered_devices": True,
+ },
{
"domain": "solaredge",
"hostname": "target",
diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json
index 49546265f17..026eab30f8f 100644
--- a/homeassistant/generated/integrations.json
+++ b/homeassistant/generated/integrations.json
@@ -2521,6 +2521,11 @@
"config_flow": false,
"iot_class": "local_polling"
},
+ "heicko": {
+ "name": "Heicko",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+ },
"heiwa": {
"name": "Heiwa",
"integration_type": "virtual",
@@ -2924,6 +2929,12 @@
"config_flow": false,
"iot_class": "cloud_push"
},
+ "iometer": {
+ "name": "IOmeter",
+ "integration_type": "device",
+ "config_flow": true,
+ "iot_class": "local_polling"
+ },
"ios": {
"name": "Home Assistant iOS",
"integration_type": "hub",
@@ -5806,6 +5817,11 @@
"config_flow": true,
"iot_class": "cloud_polling"
},
+ "smart_rollos": {
+ "name": "Smart Rollos",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+ },
"smarther": {
"name": "Smarther",
"integration_type": "virtual",
@@ -6741,6 +6757,11 @@
"integration_type": "virtual",
"supported_by": "overkiz"
},
+ "ublockout": {
+ "name": "Ublockout",
+ "integration_type": "virtual",
+ "supported_by": "motion_blinds"
+ },
"uk_transport": {
"name": "UK Transport",
"integration_type": "hub",
diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py
index be15d88aec2..8244f19660f 100644
--- a/homeassistant/generated/zeroconf.py
+++ b/homeassistant/generated/zeroconf.py
@@ -614,6 +614,11 @@ ZEROCONF = {
"domain": "homewizard",
},
],
+ "_iometer._tcp.local.": [
+ {
+ "domain": "iometer",
+ },
+ ],
"_ipp._tcp.local.": [
{
"domain": "ipp",
diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py
index b5f5ee9a961..3d8dc247857 100644
--- a/homeassistant/helpers/aiohttp_client.py
+++ b/homeassistant/helpers/aiohttp_client.py
@@ -15,7 +15,7 @@ import aiohttp
from aiohttp import web
from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT
from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout
-from aiohttp_asyncmdnsresolver.api import AsyncMDNSResolver
+from aiohttp_asyncmdnsresolver.api import AsyncDualMDNSResolver
from homeassistant import config_entries
from homeassistant.components import zeroconf
@@ -377,5 +377,5 @@ def _async_get_connector(
@callback
-def _async_make_resolver(hass: HomeAssistant) -> AsyncMDNSResolver:
- return AsyncMDNSResolver(async_zeroconf=zeroconf.async_get_async_zeroconf(hass))
+def _async_make_resolver(hass: HomeAssistant) -> AsyncDualMDNSResolver:
+ return AsyncDualMDNSResolver(async_zeroconf=zeroconf.async_get_async_zeroconf(hass))
diff --git a/homeassistant/helpers/debounce.py b/homeassistant/helpers/debounce.py
index 83555b56dcb..c46c6806d5d 100644
--- a/homeassistant/helpers/debounce.py
+++ b/homeassistant/helpers/debounce.py
@@ -146,6 +146,10 @@ class Debouncer[_R_co]:
"""Cancel any scheduled call, and prevent new runs."""
self._shutdown_requested = True
self.async_cancel()
+ # Release hard references to parent function
+ # https://github.com/home-assistant/core/issues/137237
+ self._function = None
+ self._job = None
@callback
def async_cancel(self) -> None:
diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py
index 2bca4c8528b..2ef785e7f71 100644
--- a/homeassistant/helpers/llm.py
+++ b/homeassistant/helpers/llm.py
@@ -4,7 +4,7 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from collections.abc import Callable
-from dataclasses import dataclass
+from dataclasses import dataclass, field as dc_field
from datetime import timedelta
from decimal import Decimal
from enum import Enum
@@ -36,6 +36,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util, yaml as yaml_util
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.json import JsonObjectType
+from homeassistant.util.ulid import ulid_now
from . import (
area_registry as ar,
@@ -139,6 +140,8 @@ class ToolInput:
tool_name: str
tool_args: dict[str, Any]
+ # Using lambda for default to allow patching in tests
+ id: str = dc_field(default_factory=lambda: ulid_now()) # pylint: disable=unnecessary-lambda
class Tool:
@@ -326,7 +329,7 @@ class AssistAPI(API):
def _async_get_api_prompt(
self, llm_context: LLMContext, exposed_entities: dict | None
) -> str:
- if not exposed_entities:
+ if not exposed_entities or not exposed_entities["entities"]:
return (
"Only if the user wants to control a device, tell them to expose entities "
"to their voice assistant in Home Assistant."
@@ -389,11 +392,11 @@ class AssistAPI(API):
"""Return the prompt for the API for exposed entities."""
prompt = []
- if exposed_entities:
+ if exposed_entities and exposed_entities["entities"]:
prompt.append(
"An overview of the areas and the devices in this smart home:"
)
- prompt.append(yaml_util.dump(list(exposed_entities.values())))
+ prompt.append(yaml_util.dump(list(exposed_entities["entities"].values())))
return prompt
@@ -425,8 +428,9 @@ class AssistAPI(API):
exposed_domains: set[str] | None = None
if exposed_entities is not None:
exposed_domains = {
- split_entity_id(entity_id)[0] for entity_id in exposed_entities
+ info["domain"] for info in exposed_entities["entities"].values()
}
+
intent_handlers = [
intent_handler
for intent_handler in intent_handlers
@@ -438,25 +442,29 @@ class AssistAPI(API):
IntentTool(self.cached_slugify(intent_handler.intent_type), intent_handler)
for intent_handler in intent_handlers
]
- if exposed_domains and CALENDAR_DOMAIN in exposed_domains:
- tools.append(CalendarGetEventsTool())
- if llm_context.assistant is not None:
- for state in self.hass.states.async_all(SCRIPT_DOMAIN):
- if not async_should_expose(
- self.hass, llm_context.assistant, state.entity_id
- ):
- continue
+ if exposed_entities:
+ if exposed_entities[CALENDAR_DOMAIN]:
+ names = []
+ for info in exposed_entities[CALENDAR_DOMAIN].values():
+ names.extend(info["names"].split(", "))
+ tools.append(CalendarGetEventsTool(names))
- tools.append(ScriptTool(self.hass, state.entity_id))
+ tools.extend(
+ ScriptTool(self.hass, script_entity_id)
+ for script_entity_id in exposed_entities[SCRIPT_DOMAIN]
+ )
return tools
def _get_exposed_entities(
hass: HomeAssistant, assistant: str
-) -> dict[str, dict[str, Any]]:
- """Get exposed entities."""
+) -> dict[str, dict[str, dict[str, Any]]]:
+ """Get exposed entities.
+
+ Splits out calendars and scripts.
+ """
area_registry = ar.async_get(hass)
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
@@ -477,12 +485,13 @@ def _get_exposed_entities(
}
entities = {}
+ data: dict[str, dict[str, Any]] = {
+ SCRIPT_DOMAIN: {},
+ CALENDAR_DOMAIN: {},
+ }
for state in hass.states.async_all():
- if (
- not async_should_expose(hass, assistant, state.entity_id)
- or state.domain == SCRIPT_DOMAIN
- ):
+ if not async_should_expose(hass, assistant, state.entity_id):
continue
description: str | None = None
@@ -529,9 +538,13 @@ def _get_exposed_entities(
}:
info["attributes"] = attributes
- entities[state.entity_id] = info
+ if state.domain in data:
+ data[state.domain][state.entity_id] = info
+ else:
+ entities[state.entity_id] = info
- return entities
+ data["entities"] = entities
+ return data
def _selector_serializer(schema: Any) -> Any: # noqa: C901
@@ -813,15 +826,18 @@ class CalendarGetEventsTool(Tool):
name = "calendar_get_events"
description = (
"Get events from a calendar. "
- "When asked when something happens, search the whole week. "
+ "When asked if something happens, search the whole week. "
"Results are RFC 5545 which means 'end' is exclusive."
)
- parameters = vol.Schema(
- {
- vol.Required("calendar"): cv.string,
- vol.Required("range"): vol.In(["today", "week"]),
- }
- )
+
+ def __init__(self, calendars: list[str]) -> None:
+ """Init the get events tool."""
+ self.parameters = vol.Schema(
+ {
+ vol.Required("calendar"): vol.In(calendars),
+ vol.Required("range"): vol.In(["today", "week"]),
+ }
+ )
async def async_call(
self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext
diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py
index 943eadff19a..be765ff422d 100644
--- a/homeassistant/helpers/update_coordinator.py
+++ b/homeassistant/helpers/update_coordinator.py
@@ -6,6 +6,7 @@ from abc import abstractmethod
import asyncio
from collections.abc import Awaitable, Callable, Coroutine, Generator
from datetime import datetime, timedelta
+from functools import partial
import logging
from random import randint
from time import monotonic
@@ -103,7 +104,8 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6
)
- self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
+ self._listeners: dict[int, tuple[CALLBACK_TYPE, object | None]] = {}
+ self._last_listener_id: int = 0
self._unsub_refresh: CALLBACK_TYPE | None = None
self._unsub_shutdown: CALLBACK_TYPE | None = None
self._request_refresh_task: asyncio.TimerHandle | None = None
@@ -148,21 +150,26 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
) -> Callable[[], None]:
"""Listen for data updates."""
schedule_refresh = not self._listeners
-
- @callback
- def remove_listener() -> None:
- """Remove update listener."""
- self._listeners.pop(remove_listener)
- if not self._listeners:
- self._unschedule_refresh()
-
- self._listeners[remove_listener] = (update_callback, context)
+ self._last_listener_id += 1
+ self._listeners[self._last_listener_id] = (update_callback, context)
# This is the first listener, set up interval.
if schedule_refresh:
self._schedule_refresh()
- return remove_listener
+ return partial(self.__async_remove_listener_internal, self._last_listener_id)
+
+ @callback
+ def __async_remove_listener_internal(self, listener_id: int) -> None:
+ """Remove a listener.
+
+ This is an internal function that is not to be overridden
+ in subclasses as it may change in the future.
+ """
+ self._listeners.pop(listener_id)
+ if not self._listeners:
+ self._unschedule_refresh()
+ self._debounced_refresh.async_cancel()
@callback
def async_update_listeners(self) -> None:
diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt
index bc6002e72b8..cd768198541 100644
--- a/homeassistant/package_constraints.txt
+++ b/homeassistant/package_constraints.txt
@@ -1,44 +1,44 @@
# Automatically generated by gen_requirements_all.py, do not edit
-aiodhcpwatcher==1.0.3
-aiodiscover==2.1.0
+aiodhcpwatcher==1.1.0
+aiodiscover==2.2.2
aiodns==3.2.0
-aiohasupervisor==0.2.2b6
-aiohttp-asyncmdnsresolver==0.0.3
-aiohttp-fast-zlib==0.2.0
-aiohttp==3.11.11
+aiohasupervisor==0.3.0
+aiohttp-asyncmdnsresolver==0.1.0
+aiohttp-fast-zlib==0.2.2
+aiohttp==3.11.12
aiohttp_cors==0.7.0
aiousbwatcher==1.1.1
-aiozoneinfo==0.2.1
+aiozoneinfo==0.2.3
astral==2.2
-async-interrupt==1.2.0
+async-interrupt==1.2.1
async-upnp-client==0.43.0
atomicwrites-homeassistant==1.4.1
-attrs==24.2.0
+attrs==25.1.0
audioop-lts==0.2.1;python_version>='3.13'
av==13.1.0
awesomeversion==24.6.0
bcrypt==4.2.0
-bleak-retry-connector==3.8.0
+bleak-retry-connector==3.8.1
bleak==0.22.3
-bluetooth-adapters==0.21.1
+bluetooth-adapters==0.21.4
bluetooth-auto-recovery==1.4.2
-bluetooth-data-tools==1.23.3
+bluetooth-data-tools==1.23.4
cached-ipaddress==0.8.0
certifi>=2021.5.30
ciso8601==2.3.2
cronsim==2.6
cryptography==44.0.0
-dbus-fast==2.31.0
+dbus-fast==2.33.0
fnv-hash-fast==1.2.2
go2rtc-client==0.1.2
ha-ffmpeg==3.2.2
-habluetooth==3.21.0
-hass-nabucasa==0.88.1
-hassil==2.2.0
-home-assistant-bluetooth==1.13.0
-home-assistant-frontend==20250131.0
-home-assistant-intents==2025.1.28
+habluetooth==3.21.1
+hass-nabucasa==0.89.0
+hassil==2.2.3
+home-assistant-bluetooth==1.13.1
+home-assistant-frontend==20250205.0
+home-assistant-intents==2025.2.5
httpx==0.28.1
ifaddr==0.2.0
Jinja2==3.1.5
@@ -46,14 +46,14 @@ lru-dict==1.3.0
mutagen==1.47.0
orjson==3.10.12
packaging>=23.1
-paho-mqtt==1.6.1
+paho-mqtt==2.1.0
Pillow==11.1.0
propcache==0.2.1
psutil-home-assistant==0.0.1
PyJWT==2.10.1
pymicro-vad==1.0.1
PyNaCl==1.5.0
-pyOpenSSL==24.3.0
+pyOpenSSL==25.0.0
pyserial==3.5
pyspeex-noise==1.0.2
python-slugify==8.0.4
@@ -67,7 +67,7 @@ standard-telnetlib==3.13.0;python_version>='3.13'
typing-extensions>=4.12.2,<5.0
ulid-transform==1.2.0
urllib3>=1.26.5,<2
-uv==0.5.21
+uv==0.5.27
voluptuous-openapi==0.0.6
voluptuous-serialize==2.6.0
voluptuous==0.15.2
diff --git a/pylint/plugins/hass_enforce_class_module.py b/pylint/plugins/hass_enforce_class_module.py
index 09fe61b68c6..cc7b33d9946 100644
--- a/pylint/plugins/hass_enforce_class_module.py
+++ b/pylint/plugins/hass_enforce_class_module.py
@@ -140,7 +140,7 @@ class HassEnforceClassModule(BaseChecker):
for ancestor in top_level_ancestors:
if ancestor.name in _BASE_ENTITY_MODULES and not any(
- anc.name in _MODULE_CLASSES for anc in ancestors
+ parent.name in _MODULE_CLASSES for parent in ancestors
):
self.add_message(
"hass-enforce-class-module",
diff --git a/pyproject.toml b/pyproject.toml
index afed8fd7091..ab927b21b7c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,15 +27,15 @@ dependencies = [
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11
- "aiohasupervisor==0.2.2b6",
- "aiohttp==3.11.11",
+ "aiohasupervisor==0.3.0",
+ "aiohttp==3.11.12",
"aiohttp_cors==0.7.0",
- "aiohttp-fast-zlib==0.2.0",
- "aiohttp-asyncmdnsresolver==0.0.3",
- "aiozoneinfo==0.2.1",
+ "aiohttp-fast-zlib==0.2.2",
+ "aiohttp-asyncmdnsresolver==0.1.0",
+ "aiozoneinfo==0.2.3",
"astral==2.2",
- "async-interrupt==1.2.0",
- "attrs==24.2.0",
+ "async-interrupt==1.2.1",
+ "attrs==25.1.0",
"atomicwrites-homeassistant==1.4.1",
"audioop-lts==0.2.1;python_version>='3.13'",
"awesomeversion==24.6.0",
@@ -46,11 +46,11 @@ dependencies = [
"fnv-hash-fast==1.2.2",
# hass-nabucasa is imported by helpers which don't depend on the cloud
# integration
- "hass-nabucasa==0.88.1",
+ "hass-nabucasa==0.89.0",
# When bumping httpx, please check the version pins of
# httpcore, anyio, and h11 in gen_requirements_all
"httpx==0.28.1",
- "home-assistant-bluetooth==1.13.0",
+ "home-assistant-bluetooth==1.13.1",
"ifaddr==0.2.0",
"Jinja2==3.1.5",
"lru-dict==1.3.0",
@@ -59,7 +59,7 @@ dependencies = [
"cryptography==44.0.0",
"Pillow==11.1.0",
"propcache==0.2.1",
- "pyOpenSSL==24.3.0",
+ "pyOpenSSL==25.0.0",
"orjson==3.10.12",
"packaging>=23.1",
"psutil-home-assistant==0.0.1",
@@ -76,7 +76,7 @@ dependencies = [
# Temporary setting an upper bound, to prevent compat issues with urllib3>=2
# https://github.com/home-assistant/core/issues/97248
"urllib3>=1.26.5,<2",
- "uv==0.5.21",
+ "uv==0.5.27",
"voluptuous==0.15.2",
"voluptuous-serialize==2.6.0",
"voluptuous-openapi==0.0.6",
diff --git a/requirements.txt b/requirements.txt
index a58065a3a7a..5e5fabb5723 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,15 +4,15 @@
# Home Assistant Core
aiodns==3.2.0
-aiohasupervisor==0.2.2b6
-aiohttp==3.11.11
+aiohasupervisor==0.3.0
+aiohttp==3.11.12
aiohttp_cors==0.7.0
-aiohttp-fast-zlib==0.2.0
-aiohttp-asyncmdnsresolver==0.0.3
-aiozoneinfo==0.2.1
+aiohttp-fast-zlib==0.2.2
+aiohttp-asyncmdnsresolver==0.1.0
+aiozoneinfo==0.2.3
astral==2.2
-async-interrupt==1.2.0
-attrs==24.2.0
+async-interrupt==1.2.1
+attrs==25.1.0
atomicwrites-homeassistant==1.4.1
audioop-lts==0.2.1;python_version>='3.13'
awesomeversion==24.6.0
@@ -21,9 +21,9 @@ certifi>=2021.5.30
ciso8601==2.3.2
cronsim==2.6
fnv-hash-fast==1.2.2
-hass-nabucasa==0.88.1
+hass-nabucasa==0.89.0
httpx==0.28.1
-home-assistant-bluetooth==1.13.0
+home-assistant-bluetooth==1.13.1
ifaddr==0.2.0
Jinja2==3.1.5
lru-dict==1.3.0
@@ -31,7 +31,7 @@ PyJWT==2.10.1
cryptography==44.0.0
Pillow==11.1.0
propcache==0.2.1
-pyOpenSSL==24.3.0
+pyOpenSSL==25.0.0
orjson==3.10.12
packaging>=23.1
psutil-home-assistant==0.0.1
@@ -45,7 +45,7 @@ standard-telnetlib==3.13.0;python_version>='3.13'
typing-extensions>=4.12.2,<5.0
ulid-transform==1.2.0
urllib3>=1.26.5,<2
-uv==0.5.21
+uv==0.5.27
voluptuous==0.15.2
voluptuous-serialize==2.6.0
voluptuous-openapi==0.0.6
diff --git a/requirements_all.txt b/requirements_all.txt
index d8bd4ec6a64..5c8bf3923c7 100644
--- a/requirements_all.txt
+++ b/requirements_all.txt
@@ -84,7 +84,7 @@ PyQRCode==1.2.1
PyRMVtransport==0.3.3
# homeassistant.components.switchbot
-PySwitchbot==0.55.4
+PySwitchbot==0.56.0
# homeassistant.components.switchmate
PySwitchmate==0.5.1
@@ -176,7 +176,7 @@ aio-georss-gdacs==0.10
aioacaia==0.1.14
# homeassistant.components.airq
-aioairq==0.4.3
+aioairq==0.4.4
# homeassistant.components.airzone_cloud
aioairzone-cloud==0.6.10
@@ -216,10 +216,10 @@ aiobotocore==2.13.1
aiocomelit==0.10.1
# homeassistant.components.dhcp
-aiodhcpwatcher==1.0.3
+aiodhcpwatcher==1.1.0
# homeassistant.components.dhcp
-aiodiscover==2.1.0
+aiodiscover==2.2.2
# homeassistant.components.dnsip
aiodns==3.2.0
@@ -261,7 +261,7 @@ aioguardian==2022.07.0
aioharmony==0.4.1
# homeassistant.components.hassio
-aiohasupervisor==0.2.2b6
+aiohasupervisor==0.3.0
# homeassistant.components.home_connect
aiohomeconnect==0.12.3
@@ -315,10 +315,10 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.nut
-aionut==4.3.3
+aionut==4.3.4
# homeassistant.components.oncue
-aiooncue==0.3.7
+aiooncue==0.3.9
# homeassistant.components.openexchangerates
aioopenexchangerates==0.6.8
@@ -383,7 +383,7 @@ aioslimproto==3.0.0
aiosolaredge==0.2.0
# homeassistant.components.steamist
-aiosteamist==1.0.0
+aiosteamist==1.0.1
# homeassistant.components.cambridge_audio
aiostreammagic==2.10.0
@@ -494,7 +494,7 @@ apsystems-ez1==2.4.0
aqualogic==2.6
# homeassistant.components.aranet
-aranet4==2.5.0
+aranet4==2.5.1
# homeassistant.components.arcam_fmj
arcam-fmj==1.5.2
@@ -597,10 +597,10 @@ bizkaibus==0.1.1
# homeassistant.components.eq3btsmart
# homeassistant.components.esphome
-bleak-esphome==2.6.0
+bleak-esphome==2.7.1
# homeassistant.components.bluetooth
-bleak-retry-connector==3.8.0
+bleak-retry-connector==3.8.1
# homeassistant.components.bluetooth
bleak==0.22.3
@@ -625,7 +625,7 @@ bluemaestro-ble==0.2.3
# bluepy==1.3.0
# homeassistant.components.bluetooth
-bluetooth-adapters==0.21.1
+bluetooth-adapters==0.21.4
# homeassistant.components.bluetooth
bluetooth-auto-recovery==1.4.2
@@ -634,7 +634,7 @@ bluetooth-auto-recovery==1.4.2
# homeassistant.components.ld2410_ble
# homeassistant.components.led_ble
# homeassistant.components.private_ble_device
-bluetooth-data-tools==1.23.3
+bluetooth-data-tools==1.23.4
# homeassistant.components.bond
bond-async==0.2.1
@@ -668,7 +668,7 @@ brunt==1.2.0
bt-proximity==0.2.1
# homeassistant.components.bthome
-bthome-ble==3.12.3
+bthome-ble==3.12.4
# homeassistant.components.bt_home_hub_5
bthomehub5-devicelist==0.1.1
@@ -738,7 +738,7 @@ datadog==0.15.0
datapoint==0.9.9
# homeassistant.components.bluetooth
-dbus-fast==2.31.0
+dbus-fast==2.33.0
# homeassistant.components.debugpy
debugpy==1.8.11
@@ -750,7 +750,7 @@ debugpy==1.8.11
# decora==0.6
# homeassistant.components.ecovacs
-deebot-client==12.0.0b0
+deebot-client==12.0.0
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@@ -785,7 +785,7 @@ directv==0.4.0
discogs-client==2.3.0
# homeassistant.components.steamist
-discovery30303==0.3.2
+discovery30303==0.3.3
# homeassistant.components.dremel_3d_printer
dremel3dpy==2.1.1
@@ -824,7 +824,7 @@ ecoaliface==0.4.0
eheimdigital==1.0.5
# homeassistant.components.electric_kiwi
-electrickiwi-api==0.8.5
+electrickiwi-api==0.9.12
# homeassistant.components.elevenlabs
elevenlabs==1.9.0
@@ -1052,10 +1052,10 @@ goslide-api==0.7.0
gotailwind==0.3.0
# homeassistant.components.govee_ble
-govee-ble==0.42.0
+govee-ble==0.42.1
# homeassistant.components.govee_light_local
-govee-local-api==1.5.3
+govee-local-api==2.0.0
# homeassistant.components.remote_rpi_gpio
gpiozero==1.6.2
@@ -1103,16 +1103,16 @@ ha-philipsjs==3.2.2
habiticalib==0.3.4
# homeassistant.components.bluetooth
-habluetooth==3.21.0
+habluetooth==3.21.1
# homeassistant.components.cloud
-hass-nabucasa==0.88.1
+hass-nabucasa==0.89.0
# homeassistant.components.splunk
hass-splunk==0.1.1
# homeassistant.components.conversation
-hassil==2.2.0
+hassil==2.2.3
# homeassistant.components.jewish_calendar
hdate==0.11.1
@@ -1143,13 +1143,13 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
-holidays==0.65
+holidays==0.66
# homeassistant.components.frontend
-home-assistant-frontend==20250131.0
+home-assistant-frontend==20250205.0
# homeassistant.components.conversation
-home-assistant-intents==2025.1.28
+home-assistant-intents==2025.2.5
# homeassistant.components.homematicip_cloud
homematicip==1.1.7
@@ -1228,6 +1228,9 @@ insteon-frontend-home-assistant==0.5.0
# homeassistant.components.intellifire
intellifire4py==4.1.9
+# homeassistant.components.iometer
+iometer==0.1.0
+
# homeassistant.components.iotty
iottycloud==0.3.0
@@ -1238,7 +1241,7 @@ iperf3==0.1.11
isal==1.7.1
# homeassistant.components.gogogate2
-ismartgate==5.0.1
+ismartgate==5.0.2
# homeassistant.components.israel_rail
israel-rail-api==0.1.2
@@ -1281,7 +1284,7 @@ konnected==1.2.0
krakenex==2.2.2
# homeassistant.components.lacrosse_view
-lacrosse-view==1.0.4
+lacrosse-view==1.1.1
# homeassistant.components.eufy
lakeside==0.13
@@ -1299,7 +1302,7 @@ ld2410-ble==0.1.1
leaone-ble==0.1.0
# homeassistant.components.led_ble
-led-ble==1.1.4
+led-ble==1.1.6
# homeassistant.components.lektrico
lektricowifi==0.0.43
@@ -1405,7 +1408,7 @@ microBeesPy==0.3.5
mill-local==0.3.0
# homeassistant.components.mill
-millheater==0.12.2
+millheater==0.12.3
# homeassistant.components.minio
minio==7.1.12
@@ -1434,9 +1437,6 @@ motioneye-client==0.3.14
# homeassistant.components.bang_olufsen
mozart-api==4.1.1.116.4
-# homeassistant.components.onedrive
-msgraph-sdk==1.16.0
-
# homeassistant.components.mullvad
mullvad-api==1.0.0
@@ -1477,7 +1477,7 @@ nettigo-air-monitor==4.0.0
neurio==0.3.1
# homeassistant.components.nexia
-nexia==2.0.8
+nexia==2.0.9
# homeassistant.components.nextcloud
nextcloudmonitor==1.5.1
@@ -1489,7 +1489,7 @@ nextcord==2.6.0
nextdns==4.0.0
# homeassistant.components.niko_home_control
-nhc==0.3.9
+nhc==0.4.4
# homeassistant.components.nibe_heatpump
nibe==2.14.0
@@ -1558,6 +1558,9 @@ omnilogic==0.4.5
# homeassistant.components.ondilo_ico
ondilo==0.5.0
+# homeassistant.components.onedrive
+onedrive-personal-sdk==0.0.8
+
# homeassistant.components.onvif
onvif-zeep-async==3.2.5
@@ -1613,7 +1616,7 @@ ovoenergy==2.0.0
p1monitor==3.1.0
# homeassistant.components.mqtt
-paho-mqtt==1.6.1
+paho-mqtt==2.1.0
# homeassistant.components.panasonic_bluray
panacotta==0.2
@@ -1909,7 +1912,7 @@ pyebox==1.1.4
pyecoforest==0.4.0
# homeassistant.components.econet
-pyeconet==0.1.23
+pyeconet==0.1.26
# homeassistant.components.ista_ecotrend
pyecotrend-ista==3.3.1
@@ -2190,7 +2193,7 @@ pyotgw==2.2.2
pyotp==2.8.0
# homeassistant.components.overkiz
-pyoverkiz==1.15.5
+pyoverkiz==1.16.0
# homeassistant.components.onewire
pyownet==0.10.0.post1
@@ -2202,7 +2205,7 @@ pypalazzetti==0.1.19
pypca==0.0.7
# homeassistant.components.lcn
-pypck==0.8.3
+pypck==0.8.5
# homeassistant.components.pjlink
pypjlink2==1.2.1
@@ -2310,7 +2313,7 @@ pysmarty2==0.10.1
pysml==0.0.12
# homeassistant.components.smlight
-pysmlight==0.2.1
+pysmlight==0.2.3
# homeassistant.components.snmp
pysnmp==6.2.6
@@ -2388,7 +2391,7 @@ python-gitlab==1.6.0
python-google-drive-api==0.0.2
# homeassistant.components.analytics_insights
-python-homeassistant-analytics==0.8.1
+python-homeassistant-analytics==0.9.0
# homeassistant.components.homewizard
python-homewizard-energy==v8.3.2
@@ -2449,7 +2452,7 @@ python-rabbitair==0.0.8
python-ripple-api==0.0.3
# homeassistant.components.roborock
-python-roborock==2.9.7
+python-roborock==2.11.1
# homeassistant.components.smarttub
python-smarttub==0.0.38
@@ -2513,7 +2516,7 @@ pyvera==0.3.15
pyversasense==0.0.6
# homeassistant.components.vesync
-pyvesync==2.1.16
+pyvesync==2.1.17
# homeassistant.components.vizio
pyvizio==0.1.61
@@ -2603,7 +2606,7 @@ renault-api==0.2.9
renson-endura-delta==1.7.2
# homeassistant.components.reolink
-reolink-aio==0.11.8
+reolink-aio==0.11.9
# homeassistant.components.idteck_prox
rfk101py==0.0.1
@@ -2630,7 +2633,7 @@ rokuecp==0.19.3
romy==0.0.10
# homeassistant.components.roomba
-roombapy==1.8.1
+roombapy==1.9.0
# homeassistant.components.roon
roonapi==0.1.6
@@ -2854,7 +2857,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
-tesla-fleet-api==0.9.2
+tesla-fleet-api==0.9.8
# homeassistant.components.powerwall
tesla-powerwall==0.5.2
@@ -2875,13 +2878,13 @@ tessie-api==0.1.1
thermobeacon-ble==0.7.0
# homeassistant.components.thermopro
-thermopro-ble==0.10.1
+thermopro-ble==0.11.0
# homeassistant.components.thingspeak
thingspeak==1.0.0
# homeassistant.components.lg_thinq
-thinqconnect==1.0.2
+thinqconnect==1.0.4
# homeassistant.components.tikteck
tikteck==0.4
@@ -2893,10 +2896,10 @@ tilt-ble==0.2.3
tmb==0.0.4
# homeassistant.components.todoist
-todoist-api-python==2.1.2
+todoist-api-python==2.1.7
# homeassistant.components.tolo
-tololib==1.1.0
+tololib==1.2.2
# homeassistant.components.toon
toonapi==0.3.0
@@ -2941,7 +2944,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
-uiprotect==7.5.0
+uiprotect==7.5.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -2959,7 +2962,7 @@ unifiled==0.11
universal-silabs-flasher==0.0.25
# homeassistant.components.upb
-upb-lib==0.5.9
+upb-lib==0.6.0
# homeassistant.components.upcloud
upcloud-api==2.6.0
@@ -3091,7 +3094,7 @@ yalesmartalarmclient==0.4.3
# homeassistant.components.august
# homeassistant.components.yale
# homeassistant.components.yalexs_ble
-yalexs-ble==2.5.6
+yalexs-ble==2.5.7
# homeassistant.components.august
# homeassistant.components.yale
diff --git a/requirements_test.txt b/requirements_test.txt
index cf0a1e5473f..16983de5706 100644
--- a/requirements_test.txt
+++ b/requirements_test.txt
@@ -8,48 +8,47 @@
-c homeassistant/package_constraints.txt
-r requirements_test_pre_commit.txt
astroid==3.3.8
-coverage==7.6.8
+coverage==7.6.10
freezegun==1.5.1
-license-expression==30.4.0
+license-expression==30.4.1
mock-open==1.4.0
mypy-dev==1.16.0a1
pre-commit==4.0.0
pydantic==2.10.6
-pylint==3.3.3
-pylint-per-file-ignores==1.3.2
-pipdeptree==2.23.4
-pytest-asyncio==0.24.0
-pytest-aiohttp==1.0.5
+pylint==3.3.4
+pylint-per-file-ignores==1.4.0
+pipdeptree==2.25.0
+pytest-asyncio==0.25.3
+pytest-aiohttp==1.1.0
pytest-cov==6.0.0
-pytest-freezer==0.4.8
-pytest-github-actions-annotate-failures==0.2.0
+pytest-freezer==0.4.9
+pytest-github-actions-annotate-failures==0.3.0
pytest-socket==0.7.0
pytest-sugar==1.0.0
pytest-timeout==2.3.1
pytest-unordered==0.6.1
-pytest-picked==0.5.0
+pytest-picked==0.5.1
pytest-xdist==3.6.1
pytest==8.3.4
requests-mock==1.12.1
respx==0.22.0
-syrupy==4.8.0
-tqdm==4.66.5
+syrupy==4.8.1
+tqdm==4.67.1
types-aiofiles==24.1.0.20241221
types-atomicwrites==1.4.5.1
types-croniter==5.0.1.20241205
-types-beautifulsoup4==4.12.0.20241020
+types-beautifulsoup4==4.12.0.20250204
types-caldav==1.3.0.20241107
types-chardet==0.1.5
-types-decorator==5.1.8.20240310
-types-paho-mqtt==1.6.0.20240321
+types-decorator==5.1.8.20250121
types-pexpect==4.9.0.20241208
types-pillow==10.2.0.20240822
types-protobuf==5.29.1.20241207
types-psutil==6.1.0.20241221
-types-pyserial==3.5.0.20241221
+types-pyserial==3.5.0.20250130
types-python-dateutil==2.9.0.20241206
types-python-slugify==8.0.2.20240310
-types-pytz==2024.2.0.20241221
+types-pytz==2025.1.0.20250204
types-PyYAML==6.0.12.20241230
types-requests==2.31.0.3
types-xmltodict==0.13.0.3
diff --git a/requirements_test_all.txt b/requirements_test_all.txt
index 7515a1cc342..38d21a52091 100644
--- a/requirements_test_all.txt
+++ b/requirements_test_all.txt
@@ -81,7 +81,7 @@ PyQRCode==1.2.1
PyRMVtransport==0.3.3
# homeassistant.components.switchbot
-PySwitchbot==0.55.4
+PySwitchbot==0.56.0
# homeassistant.components.syncthru
PySyncThru==0.8.0
@@ -164,7 +164,7 @@ aio-georss-gdacs==0.10
aioacaia==0.1.14
# homeassistant.components.airq
-aioairq==0.4.3
+aioairq==0.4.4
# homeassistant.components.airzone_cloud
aioairzone-cloud==0.6.10
@@ -204,10 +204,10 @@ aiobotocore==2.13.1
aiocomelit==0.10.1
# homeassistant.components.dhcp
-aiodhcpwatcher==1.0.3
+aiodhcpwatcher==1.1.0
# homeassistant.components.dhcp
-aiodiscover==2.1.0
+aiodiscover==2.2.2
# homeassistant.components.dnsip
aiodns==3.2.0
@@ -246,7 +246,7 @@ aioguardian==2022.07.0
aioharmony==0.4.1
# homeassistant.components.hassio
-aiohasupervisor==0.2.2b6
+aiohasupervisor==0.3.0
# homeassistant.components.home_connect
aiohomeconnect==0.12.3
@@ -297,10 +297,10 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.nut
-aionut==4.3.3
+aionut==4.3.4
# homeassistant.components.oncue
-aiooncue==0.3.7
+aiooncue==0.3.9
# homeassistant.components.openexchangerates
aioopenexchangerates==0.6.8
@@ -365,7 +365,7 @@ aioslimproto==3.0.0
aiosolaredge==0.2.0
# homeassistant.components.steamist
-aiosteamist==1.0.0
+aiosteamist==1.0.1
# homeassistant.components.cambridge_audio
aiostreammagic==2.10.0
@@ -464,7 +464,7 @@ aprslib==0.7.2
apsystems-ez1==2.4.0
# homeassistant.components.aranet
-aranet4==2.5.0
+aranet4==2.5.1
# homeassistant.components.arcam_fmj
arcam-fmj==1.5.2
@@ -528,10 +528,10 @@ bimmer-connected[china]==0.17.2
# homeassistant.components.eq3btsmart
# homeassistant.components.esphome
-bleak-esphome==2.6.0
+bleak-esphome==2.7.1
# homeassistant.components.bluetooth
-bleak-retry-connector==3.8.0
+bleak-retry-connector==3.8.1
# homeassistant.components.bluetooth
bleak==0.22.3
@@ -549,7 +549,7 @@ bluecurrent-api==1.2.3
bluemaestro-ble==0.2.3
# homeassistant.components.bluetooth
-bluetooth-adapters==0.21.1
+bluetooth-adapters==0.21.4
# homeassistant.components.bluetooth
bluetooth-auto-recovery==1.4.2
@@ -558,7 +558,7 @@ bluetooth-auto-recovery==1.4.2
# homeassistant.components.ld2410_ble
# homeassistant.components.led_ble
# homeassistant.components.private_ble_device
-bluetooth-data-tools==1.23.3
+bluetooth-data-tools==1.23.4
# homeassistant.components.bond
bond-async==0.2.1
@@ -585,7 +585,7 @@ brottsplatskartan==1.0.5
brunt==1.2.0
# homeassistant.components.bthome
-bthome-ble==3.12.3
+bthome-ble==3.12.4
# homeassistant.components.buienradar
buienradar==1.0.6
@@ -634,13 +634,13 @@ datadog==0.15.0
datapoint==0.9.9
# homeassistant.components.bluetooth
-dbus-fast==2.31.0
+dbus-fast==2.33.0
# homeassistant.components.debugpy
debugpy==1.8.11
# homeassistant.components.ecovacs
-deebot-client==12.0.0b0
+deebot-client==12.0.0
# homeassistant.components.ihc
# homeassistant.components.namecheapdns
@@ -672,7 +672,7 @@ dio-chacon-wifi-api==1.2.1
directv==0.4.0
# homeassistant.components.steamist
-discovery30303==0.3.2
+discovery30303==0.3.3
# homeassistant.components.dremel_3d_printer
dremel3dpy==2.1.1
@@ -702,7 +702,7 @@ easyenergy==2.1.2
eheimdigital==1.0.5
# homeassistant.components.electric_kiwi
-electrickiwi-api==0.8.5
+electrickiwi-api==0.9.12
# homeassistant.components.elevenlabs
elevenlabs==1.9.0
@@ -902,10 +902,10 @@ goslide-api==0.7.0
gotailwind==0.3.0
# homeassistant.components.govee_ble
-govee-ble==0.42.0
+govee-ble==0.42.1
# homeassistant.components.govee_light_local
-govee-local-api==1.5.3
+govee-local-api==2.0.0
# homeassistant.components.gpsd
gps3==0.33.3
@@ -944,13 +944,13 @@ ha-philipsjs==3.2.2
habiticalib==0.3.4
# homeassistant.components.bluetooth
-habluetooth==3.21.0
+habluetooth==3.21.1
# homeassistant.components.cloud
-hass-nabucasa==0.88.1
+hass-nabucasa==0.89.0
# homeassistant.components.conversation
-hassil==2.2.0
+hassil==2.2.3
# homeassistant.components.jewish_calendar
hdate==0.11.1
@@ -972,13 +972,13 @@ hole==0.8.0
# homeassistant.components.holiday
# homeassistant.components.workday
-holidays==0.65
+holidays==0.66
# homeassistant.components.frontend
-home-assistant-frontend==20250131.0
+home-assistant-frontend==20250205.0
# homeassistant.components.conversation
-home-assistant-intents==2025.1.28
+home-assistant-intents==2025.2.5
# homeassistant.components.homematicip_cloud
homematicip==1.1.7
@@ -1042,6 +1042,9 @@ insteon-frontend-home-assistant==0.5.0
# homeassistant.components.intellifire
intellifire4py==4.1.9
+# homeassistant.components.iometer
+iometer==0.1.0
+
# homeassistant.components.iotty
iottycloud==0.3.0
@@ -1049,7 +1052,7 @@ iottycloud==0.3.0
isal==1.7.1
# homeassistant.components.gogogate2
-ismartgate==5.0.1
+ismartgate==5.0.2
# homeassistant.components.israel_rail
israel-rail-api==0.1.2
@@ -1083,7 +1086,7 @@ konnected==1.2.0
krakenex==2.2.2
# homeassistant.components.lacrosse_view
-lacrosse-view==1.0.4
+lacrosse-view==1.1.1
# homeassistant.components.laundrify
laundrify-aio==1.2.2
@@ -1098,7 +1101,7 @@ ld2410-ble==0.1.1
leaone-ble==0.1.0
# homeassistant.components.led_ble
-led-ble==1.1.4
+led-ble==1.1.6
# homeassistant.components.lektrico
lektricowifi==0.0.43
@@ -1177,7 +1180,7 @@ microBeesPy==0.3.5
mill-local==0.3.0
# homeassistant.components.mill
-millheater==0.12.2
+millheater==0.12.3
# homeassistant.components.minio
minio==7.1.12
@@ -1206,9 +1209,6 @@ motioneye-client==0.3.14
# homeassistant.components.bang_olufsen
mozart-api==4.1.1.116.4
-# homeassistant.components.onedrive
-msgraph-sdk==1.16.0
-
# homeassistant.components.mullvad
mullvad-api==1.0.0
@@ -1240,7 +1240,7 @@ netmap==0.7.0.2
nettigo-air-monitor==4.0.0
# homeassistant.components.nexia
-nexia==2.0.8
+nexia==2.0.9
# homeassistant.components.nextcloud
nextcloudmonitor==1.5.1
@@ -1252,7 +1252,7 @@ nextcord==2.6.0
nextdns==4.0.0
# homeassistant.components.niko_home_control
-nhc==0.3.9
+nhc==0.4.4
# homeassistant.components.nibe_heatpump
nibe==2.14.0
@@ -1306,6 +1306,9 @@ omnilogic==0.4.5
# homeassistant.components.ondilo_ico
ondilo==0.5.0
+# homeassistant.components.onedrive
+onedrive-personal-sdk==0.0.8
+
# homeassistant.components.onvif
onvif-zeep-async==3.2.5
@@ -1343,7 +1346,7 @@ ovoenergy==2.0.0
p1monitor==3.1.0
# homeassistant.components.mqtt
-paho-mqtt==1.6.1
+paho-mqtt==2.1.0
# homeassistant.components.panasonic_viera
panasonic-viera==0.4.2
@@ -1556,7 +1559,7 @@ pydroid-ipcam==2.0.0
pyecoforest==0.4.0
# homeassistant.components.econet
-pyeconet==0.1.23
+pyeconet==0.1.26
# homeassistant.components.ista_ecotrend
pyecotrend-ista==3.3.1
@@ -1786,7 +1789,7 @@ pyotgw==2.2.2
pyotp==2.8.0
# homeassistant.components.overkiz
-pyoverkiz==1.15.5
+pyoverkiz==1.16.0
# homeassistant.components.onewire
pyownet==0.10.0.post1
@@ -1795,7 +1798,7 @@ pyownet==0.10.0.post1
pypalazzetti==0.1.19
# homeassistant.components.lcn
-pypck==0.8.3
+pypck==0.8.5
# homeassistant.components.pjlink
pypjlink2==1.2.1
@@ -1882,7 +1885,7 @@ pysmarty2==0.10.1
pysml==0.0.12
# homeassistant.components.smlight
-pysmlight==0.2.1
+pysmlight==0.2.3
# homeassistant.components.snmp
pysnmp==6.2.6
@@ -1933,7 +1936,7 @@ python-fullykiosk==0.0.14
python-google-drive-api==0.0.2
# homeassistant.components.analytics_insights
-python-homeassistant-analytics==0.8.1
+python-homeassistant-analytics==0.9.0
# homeassistant.components.homewizard
python-homewizard-energy==v8.3.2
@@ -1982,7 +1985,7 @@ python-picnic-api==1.1.0
python-rabbitair==0.0.8
# homeassistant.components.roborock
-python-roborock==2.9.7
+python-roborock==2.11.1
# homeassistant.components.smarttub
python-smarttub==0.0.38
@@ -2031,7 +2034,7 @@ pyuptimerobot==22.2.0
pyvera==0.3.15
# homeassistant.components.vesync
-pyvesync==2.1.16
+pyvesync==2.1.17
# homeassistant.components.vizio
pyvizio==0.1.61
@@ -2106,7 +2109,7 @@ renault-api==0.2.9
renson-endura-delta==1.7.2
# homeassistant.components.reolink
-reolink-aio==0.11.8
+reolink-aio==0.11.9
# homeassistant.components.rflink
rflink==0.0.66
@@ -2121,7 +2124,7 @@ rokuecp==0.19.3
romy==0.0.10
# homeassistant.components.roomba
-roombapy==1.8.1
+roombapy==1.9.0
# homeassistant.components.roon
roonapi==0.1.6
@@ -2294,7 +2297,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
-tesla-fleet-api==0.9.2
+tesla-fleet-api==0.9.8
# homeassistant.components.powerwall
tesla-powerwall==0.5.2
@@ -2312,19 +2315,19 @@ tessie-api==0.1.1
thermobeacon-ble==0.7.0
# homeassistant.components.thermopro
-thermopro-ble==0.10.1
+thermopro-ble==0.11.0
# homeassistant.components.lg_thinq
-thinqconnect==1.0.2
+thinqconnect==1.0.4
# homeassistant.components.tilt_ble
tilt-ble==0.2.3
# homeassistant.components.todoist
-todoist-api-python==2.1.2
+todoist-api-python==2.1.7
# homeassistant.components.tolo
-tololib==1.1.0
+tololib==1.2.2
# homeassistant.components.toon
toonapi==0.3.0
@@ -2366,7 +2369,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
-uiprotect==7.5.0
+uiprotect==7.5.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -2375,7 +2378,7 @@ ultraheat-api==0.5.7
unifi-discovery==1.2.0
# homeassistant.components.upb
-upb-lib==0.5.9
+upb-lib==0.6.0
# homeassistant.components.upcloud
upcloud-api==2.6.0
@@ -2489,7 +2492,7 @@ yalesmartalarmclient==0.4.3
# homeassistant.components.august
# homeassistant.components.yale
# homeassistant.components.yalexs_ble
-yalexs-ble==2.5.6
+yalexs-ble==2.5.7
# homeassistant.components.august
# homeassistant.components.yale
diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt
index 4dd3bc46010..1cf3d91defa 100644
--- a/requirements_test_pre_commit.txt
+++ b/requirements_test_pre_commit.txt
@@ -1,5 +1,5 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
-codespell==2.3.0
+codespell==2.4.1
ruff==0.9.1
yamllint==1.35.1
diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile
index 2c433ba362e..5598c839257 100644
--- a/script/hassfest/docker/Dockerfile
+++ b/script/hassfest/docker/Dockerfile
@@ -14,7 +14,7 @@ WORKDIR "/github/workspace"
COPY . /usr/src/homeassistant
# Uv is only needed during build
-RUN --mount=from=ghcr.io/astral-sh/uv:0.5.21,source=/uv,target=/bin/uv \
+RUN --mount=from=ghcr.io/astral-sh/uv:0.5.27,source=/uv,target=/bin/uv \
# Uv creates a lock file in /tmp
--mount=type=tmpfs,target=/tmp \
# Required for PyTurboJPEG
@@ -24,8 +24,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.21,source=/uv,target=/bin/uv \
--no-cache \
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
-r /usr/src/homeassistant/requirements.txt \
- stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.9.1 \
- PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.0 home-assistant-intents==2025.1.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
+ stdlib-list==0.10.0 pipdeptree==2.25.0 tqdm==4.67.1 ruff==0.9.1 \
+ PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.2.5 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
LABEL "name"="hassfest"
LABEL "maintainer"="Home Assistant "
diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py
index cfc4c5224de..3562d897967 100644
--- a/script/hassfest/quality_scale_validation/runtime_data.py
+++ b/script/hassfest/quality_scale_validation/runtime_data.py
@@ -10,7 +10,7 @@ from homeassistant.const import Platform
from script.hassfest import ast_parse_module
from script.hassfest.model import Config, Integration
-_ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$")
+_ANNOTATION_MATCH = re.compile(r"^[A-Za-z][A-Za-z0-9]+ConfigEntry$")
_FUNCTIONS: dict[str, dict[str, int]] = {
"__init__": { # based on ComponentProtocol
"async_migrate_entry": 2,
diff --git a/script/licenses.py b/script/licenses.py
index 464a2fc456b..aa15a58f3bd 100644
--- a/script/licenses.py
+++ b/script/licenses.py
@@ -199,7 +199,6 @@ EXCEPTIONS = {
"pigpio", # https://github.com/joan2937/pigpio/pull/608
"pymitv", # MIT
"pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5
- "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41
"pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6
"pyvera", # https://github.com/maximvelichko/pyvera/pull/164
"repoze.lru",
diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py
index fa5bcb8137a..bb77e2ff926 100644
--- a/tests/components/anthropic/test_conversation.py
+++ b/tests/components/anthropic/test_conversation.py
@@ -236,6 +236,7 @@ async def test_function_call(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="toolu_0123456789AbCdEfGhIjKlM",
tool_name="test_tool",
tool_args={"param1": "test_value"},
),
@@ -373,6 +374,7 @@ async def test_function_exception(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="toolu_0123456789AbCdEfGhIjKlM",
tool_name="test_tool",
tool_args={"param1": "test_value"},
),
diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr
index 526e1bff151..11e6bc2339a 100644
--- a/tests/components/assist_pipeline/snapshots/test_init.ambr
+++ b/tests/components/assist_pipeline/snapshots/test_init.ambr
@@ -3,6 +3,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
}),
@@ -32,7 +33,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -94,6 +95,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
}),
@@ -123,7 +125,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -185,6 +187,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
}),
@@ -214,7 +217,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -276,6 +279,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
}),
@@ -329,7 +333,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -391,6 +395,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
}),
@@ -427,6 +432,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
}),
@@ -434,7 +440,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-conversation-id',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test input',
@@ -478,6 +484,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
}),
@@ -485,7 +492,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-conversation-id',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test input',
@@ -529,6 +536,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
}),
@@ -536,7 +544,7 @@
}),
dict({
'data': dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-conversation-id',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test input',
@@ -580,6 +588,7 @@
list([
dict({
'data': dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
}),
diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr
index 5f06172404b..f677fa6d8cf 100644
--- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr
+++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr
@@ -1,6 +1,7 @@
# serializer version: 1
# name: test_audio_pipeline
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -31,7 +32,7 @@
# ---
# name: test_audio_pipeline.3
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -84,6 +85,7 @@
# ---
# name: test_audio_pipeline_debug
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -114,7 +116,7 @@
# ---
# name: test_audio_pipeline_debug.3
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -179,6 +181,7 @@
# ---
# name: test_audio_pipeline_with_enhancements
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -209,7 +212,7 @@
# ---
# name: test_audio_pipeline_with_enhancements.3
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -262,6 +265,7 @@
# ---
# name: test_audio_pipeline_with_wake_word_no_timeout
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -314,7 +318,7 @@
# ---
# name: test_audio_pipeline_with_wake_word_no_timeout.5
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'test transcript',
@@ -367,6 +371,7 @@
# ---
# name: test_audio_pipeline_with_wake_word_timeout
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -399,6 +404,7 @@
# ---
# name: test_device_capture
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -425,6 +431,7 @@
# ---
# name: test_device_capture_override
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -473,6 +480,7 @@
# ---
# name: test_device_capture_queue_full
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -512,6 +520,7 @@
# ---
# name: test_intent_failed
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -522,7 +531,7 @@
# ---
# name: test_intent_failed.1
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'Are the lights on?',
@@ -535,6 +544,7 @@
# ---
# name: test_intent_timeout
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -545,7 +555,7 @@
# ---
# name: test_intent_timeout.1
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'Are the lights on?',
@@ -564,6 +574,7 @@
# ---
# name: test_pipeline_empty_tts_output
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -574,7 +585,7 @@
# ---
# name: test_pipeline_empty_tts_output.1
dict({
- 'conversation_id': None,
+ 'conversation_id': 'mock-ulid',
'device_id': None,
'engine': 'conversation.home_assistant',
'intent_input': 'never mind',
@@ -611,6 +622,7 @@
# ---
# name: test_stt_cooldown_different_ids
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -621,6 +633,7 @@
# ---
# name: test_stt_cooldown_different_ids.1
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -631,6 +644,7 @@
# ---
# name: test_stt_cooldown_same_id
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -641,6 +655,7 @@
# ---
# name: test_stt_cooldown_same_id.1
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -651,6 +666,7 @@
# ---
# name: test_stt_stream_failed
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -677,6 +693,7 @@
# ---
# name: test_text_only_pipeline[extra_msg0]
dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -723,6 +740,7 @@
# ---
# name: test_text_only_pipeline[extra_msg1]
dict({
+ 'conversation_id': 'mock-conversation-id',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -775,6 +793,7 @@
# ---
# name: test_tts_failed
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -796,6 +815,7 @@
# ---
# name: test_wake_word_cooldown_different_entities
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -806,6 +826,7 @@
# ---
# name: test_wake_word_cooldown_different_entities.1
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -857,6 +878,7 @@
# ---
# name: test_wake_word_cooldown_different_ids
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -867,6 +889,7 @@
# ---
# name: test_wake_word_cooldown_different_ids.1
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -921,6 +944,7 @@
# ---
# name: test_wake_word_cooldown_same_id
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
@@ -931,6 +955,7 @@
# ---
# name: test_wake_word_cooldown_same_id.1
dict({
+ 'conversation_id': 'mock-ulid',
'language': 'en',
'pipeline': ,
'runner_data': dict({
diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py
index a2cb9ef382a..1651950c173 100644
--- a/tests/components/assist_pipeline/test_init.py
+++ b/tests/components/assist_pipeline/test_init.py
@@ -1,11 +1,12 @@
"""Test Voice Assistant init."""
import asyncio
+from collections.abc import Generator
from dataclasses import asdict
import itertools as it
from pathlib import Path
import tempfile
-from unittest.mock import ANY, patch
+from unittest.mock import ANY, Mock, patch
import wave
import hass_nabucasa
@@ -41,6 +42,14 @@ from .conftest import (
from tests.typing import ClientSessionGenerator, WebSocketGenerator
+@pytest.fixture(autouse=True)
+def mock_ulid() -> Generator[Mock]:
+ """Mock the ulid of chat sessions."""
+ with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now:
+ mock_ulid_now.return_value = "mock-ulid"
+ yield mock_ulid_now
+
+
def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]:
"""Process events to remove dynamic values."""
processed = []
@@ -684,7 +693,7 @@ async def test_wake_word_detection_aborted(
pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id)
pipeline_input = assist_pipeline.pipeline.PipelineInput(
- conversation_id=None,
+ conversation_id="mock-conversation-id",
device_id=None,
stt_metadata=stt.SpeechMetadata(
language="",
@@ -771,7 +780,7 @@ async def test_tts_audio_output(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
tts_input="This is a test.",
- conversation_id=None,
+ conversation_id="mock-conversation-id",
device_id=None,
run=assist_pipeline.pipeline.PipelineRun(
hass,
@@ -828,7 +837,7 @@ async def test_tts_wav_preferred_format(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
tts_input="This is a test.",
- conversation_id=None,
+ conversation_id="mock-conversation-id",
device_id=None,
run=assist_pipeline.pipeline.PipelineRun(
hass,
@@ -896,7 +905,7 @@ async def test_tts_dict_preferred_format(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
tts_input="This is a test.",
- conversation_id=None,
+ conversation_id="mock-conversation-id",
device_id=None,
run=assist_pipeline.pipeline.PipelineRun(
hass,
@@ -982,6 +991,7 @@ async def test_sentence_trigger_overrides_conversation_agent(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
intent_input="test trigger sentence",
+ conversation_id="mock-conversation-id",
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
@@ -1059,6 +1069,7 @@ async def test_prefer_local_intents(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
intent_input="I'd like to order a stout please",
+ conversation_id="mock-conversation-id",
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
@@ -1136,6 +1147,7 @@ async def test_stt_language_used_instead_of_conversation_language(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
intent_input="test input",
+ conversation_id="mock-conversation-id",
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
@@ -1210,6 +1222,7 @@ async def test_tts_language_used_instead_of_conversation_language(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
intent_input="test input",
+ conversation_id="mock-conversation-id",
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
@@ -1284,6 +1297,7 @@ async def test_pipeline_language_used_instead_of_conversation_language(
pipeline_input = assist_pipeline.pipeline.PipelineInput(
intent_input="test input",
+ conversation_id="mock-conversation-id",
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py
index c1caf6f86a4..2cd56f094dd 100644
--- a/tests/components/assist_pipeline/test_websocket.py
+++ b/tests/components/assist_pipeline/test_websocket.py
@@ -2,8 +2,9 @@
import asyncio
import base64
+from collections.abc import Generator
from typing import Any
-from unittest.mock import ANY, patch
+from unittest.mock import ANY, Mock, patch
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -35,6 +36,14 @@ from tests.common import MockConfigEntry
from tests.typing import WebSocketGenerator
+@pytest.fixture(autouse=True)
+def mock_ulid() -> Generator[Mock]:
+ """Mock the ulid of chat sessions."""
+ with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now:
+ mock_ulid_now.return_value = "mock-ulid"
+ yield mock_ulid_now
+
+
@pytest.mark.parametrize(
"extra_msg",
[
diff --git a/tests/components/assist_satellite/conftest.py b/tests/components/assist_satellite/conftest.py
index 0cc0e94e149..79e4061bacc 100644
--- a/tests/components/assist_satellite/conftest.py
+++ b/tests/components/assist_satellite/conftest.py
@@ -94,7 +94,9 @@ class MockAssistSatellite(AssistSatelliteEntity):
self, start_announcement: AssistSatelliteConfiguration
) -> None:
"""Start a conversation from the satellite."""
- self.start_conversations.append((self._extra_system_prompt, start_announcement))
+ self.start_conversations.append(
+ (self._conversation_id, self._extra_system_prompt, start_announcement)
+ )
@pytest.fixture
diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py
index 46facb80844..b3437bf5c5d 100644
--- a/tests/components/assist_satellite/test_entity.py
+++ b/tests/components/assist_satellite/test_entity.py
@@ -1,7 +1,8 @@
"""Test the Assist Satellite entity."""
import asyncio
-from unittest.mock import patch
+from collections.abc import Generator
+from unittest.mock import Mock, patch
import pytest
@@ -31,6 +32,14 @@ from . import ENTITY_ID
from .conftest import MockAssistSatellite
+@pytest.fixture
+def mock_chat_session_conversation_id() -> Generator[Mock]:
+ """Mock the ulid library."""
+ with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now:
+ mock_ulid_now.return_value = "mock-conversation-id"
+ yield mock_ulid_now
+
+
@pytest.fixture(autouse=True)
async def set_pipeline_tts(hass: HomeAssistant, init_components: ConfigEntry) -> None:
"""Set up a pipeline with a TTS engine."""
@@ -487,6 +496,7 @@ async def test_vad_sensitivity_entity_not_found(
"extra_system_prompt": "Better system prompt",
},
(
+ "mock-conversation-id",
"Better system prompt",
AssistSatelliteAnnouncement(
message="Hello",
@@ -502,6 +512,7 @@ async def test_vad_sensitivity_entity_not_found(
"start_media_id": "media-source://given",
},
(
+ "mock-conversation-id",
"Hello",
AssistSatelliteAnnouncement(
message="Hello",
@@ -514,6 +525,7 @@ async def test_vad_sensitivity_entity_not_found(
(
{"start_media_id": "http://example.com/given.mp3"},
(
+ "mock-conversation-id",
None,
AssistSatelliteAnnouncement(
message="",
@@ -525,6 +537,7 @@ async def test_vad_sensitivity_entity_not_found(
),
],
)
+@pytest.mark.usefixtures("mock_chat_session_conversation_id")
async def test_start_conversation(
hass: HomeAssistant,
init_components: ConfigEntry,
diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py
index a7888dbd08c..1e7278134d4 100644
--- a/tests/components/backup/common.py
+++ b/tests/components/backup/common.py
@@ -5,7 +5,7 @@ from __future__ import annotations
from collections.abc import AsyncIterator, Callable, Coroutine, Iterable
from pathlib import Path
from typing import Any
-from unittest.mock import ANY, AsyncMock, Mock, patch
+from unittest.mock import AsyncMock, Mock, patch
from homeassistant.components.backup import (
DOMAIN,
@@ -29,7 +29,7 @@ TEST_BACKUP_ABC123 = AgentBackup(
backup_id="abc123",
database_included=True,
date="1970-01-01T00:00:00.000Z",
- extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
+ extra_metadata={"instance_id": "our_uuid", "with_automatic_settings": True},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py
index d0d9ac7e0e1..eb38399eb79 100644
--- a/tests/components/backup/conftest.py
+++ b/tests/components/backup/conftest.py
@@ -18,6 +18,16 @@ from .common import TEST_BACKUP_PATH_ABC123, TEST_BACKUP_PATH_DEF456
from tests.common import get_fixture_path
+@pytest.fixture(name="instance_id", autouse=True)
+def instance_id_fixture(hass: HomeAssistant) -> Generator[None]:
+ """Mock instance ID."""
+ with patch(
+ "homeassistant.components.backup.manager.instance_id.async_get",
+ return_value="our_uuid",
+ ):
+ yield
+
+
@pytest.fixture(name="mocked_json_bytes")
def mocked_json_bytes_fixture() -> Generator[Mock]:
"""Mock json_bytes."""
diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr
index 68b00632a6b..28ee9b834c1 100644
--- a/tests/components/backup/snapshots/test_backup.ambr
+++ b/tests/components/backup/snapshots/test_backup.ambr
@@ -71,6 +71,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -94,6 +98,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'unknown_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr
index 08c19906241..421432fb66e 100644
--- a/tests/components/backup/snapshots/test_websocket.ambr
+++ b/tests/components/backup/snapshots/test_websocket.ambr
@@ -229,6 +229,28 @@
'type': 'result',
})
# ---
+# name: test_can_decrypt_on_download_with_agent_error[BackupAgentError]
+ dict({
+ 'error': dict({
+ 'code': 'home_assistant_error',
+ 'message': 'Unknown error',
+ }),
+ 'id': 1,
+ 'success': False,
+ 'type': 'result',
+ })
+# ---
+# name: test_can_decrypt_on_download_with_agent_error[BackupNotFound]
+ dict({
+ 'error': dict({
+ 'code': 'backup_not_found',
+ 'message': 'Backup not found',
+ }),
+ 'id': 1,
+ 'success': False,
+ 'type': 'result',
+ })
+# ---
# name: test_config_info[storage_data0]
dict({
'id': 1,
@@ -3040,6 +3062,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3117,6 +3143,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3175,6 +3205,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3217,6 +3251,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'unknown_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3270,6 +3308,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'unknown_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3321,6 +3363,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3379,6 +3425,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3438,6 +3488,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3497,6 +3549,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
'test.remote',
]),
@@ -3556,6 +3610,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3614,6 +3670,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3672,6 +3730,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3730,6 +3790,8 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00Z',
+ 'extra_metadata': dict({
+ }),
'failed_agent_ids': list([
'test.remote',
]),
@@ -3789,6 +3851,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3828,6 +3894,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3883,6 +3953,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -3923,6 +3997,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4199,6 +4277,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4246,6 +4328,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4297,6 +4383,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4339,6 +4429,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'unknown_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4367,6 +4461,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -4415,6 +4513,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'our_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
diff --git a/tests/components/backup/test_backup.py b/tests/components/backup/test_backup.py
index c441cae292c..38b61ce65ea 100644
--- a/tests/components/backup/test_backup.py
+++ b/tests/components/backup/test_backup.py
@@ -103,9 +103,7 @@ async def test_upload(
assert resp.status == 201
assert open_mock.call_count == 1
assert move_mock.call_count == 1
- assert (
- move_mock.mock_calls[0].args[1].name == "Test_-_1970-01-01_00.00_00000000.tar"
- )
+ assert move_mock.mock_calls[0].args[1].name == "Test_1970-01-01_00.00_00000000.tar"
@pytest.mark.usefixtures("read_backup")
diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py
index aac39c04d31..24fd15fc4fe 100644
--- a/tests/components/backup/test_http.py
+++ b/tests/components/backup/test_http.py
@@ -11,7 +11,13 @@ from unittest.mock import patch
from aiohttp import web
import pytest
-from homeassistant.components.backup import AddonInfo, AgentBackup, Folder
+from homeassistant.components.backup import (
+ AddonInfo,
+ AgentBackup,
+ BackupAgentError,
+ BackupNotFound,
+ Folder,
+)
from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN
from homeassistant.core import HomeAssistant
@@ -141,6 +147,50 @@ async def test_downloading_remote_encrypted_backup(
await _test_downloading_encrypted_backup(hass_client, "domain.test")
+@pytest.mark.parametrize(
+ ("error", "status"),
+ [
+ (BackupAgentError, 500),
+ (BackupNotFound, 404),
+ ],
+)
+@patch.object(BackupAgentTest, "async_download_backup")
+async def test_downloading_remote_encrypted_backup_with_error(
+ download_mock,
+ hass: HomeAssistant,
+ hass_client: ClientSessionGenerator,
+ error: Exception,
+ status: int,
+) -> None:
+ """Test downloading a local backup file."""
+ await setup_backup_integration(hass)
+ hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest(
+ "test",
+ [
+ AgentBackup(
+ addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
+ backup_id="abc123",
+ database_included=True,
+ date="1970-01-01T00:00:00Z",
+ extra_metadata={},
+ folders=[Folder.MEDIA, Folder.SHARE],
+ homeassistant_included=True,
+ homeassistant_version="2024.12.0",
+ name="Test",
+ protected=True,
+ size=13,
+ )
+ ],
+ )
+
+ download_mock.side_effect = error
+ client = await hass_client()
+ resp = await client.get(
+ "/api/backup/download/abc123?agent_id=domain.test&password=blah"
+ )
+ assert resp.status == status
+
+
async def _test_downloading_encrypted_backup(
hass_client: ClientSessionGenerator,
agent_id: str,
diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py
index b98cec47e8d..bdcb9f068b6 100644
--- a/tests/components/backup/test_manager.py
+++ b/tests/components/backup/test_manager.py
@@ -46,6 +46,7 @@ from homeassistant.components.backup.manager import (
RestoreBackupState,
WrittenBackup,
)
+from homeassistant.components.backup.util import password_to_key
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import issue_registry as ir
@@ -136,7 +137,7 @@ async def test_create_backup_service(
agent_ids=["backup.local"],
backup_name="Custom backup 2025.1.0",
extra_metadata={
- "instance_id": hass.data["core.uuid"],
+ "instance_id": "our_uuid",
"with_automatic_settings": False,
},
include_addons=None,
@@ -359,8 +360,14 @@ async def test_create_backup_when_busy(
@pytest.mark.parametrize(
("parameters", "expected_error"),
[
- ({"agent_ids": []}, "At least one agent must be selected"),
- ({"agent_ids": ["non_existing"]}, "Invalid agents selected: ['non_existing']"),
+ (
+ {"agent_ids": []},
+ "At least one available backup agent must be selected, got []",
+ ),
+ (
+ {"agent_ids": ["non_existing"]},
+ "At least one available backup agent must be selected, got ['non_existing']",
+ ),
(
{"include_addons": ["ssl"], "include_all_addons": True},
"Cannot include all addons and specify specific addons",
@@ -410,6 +417,8 @@ async def test_create_backup_wrong_parameters(
"name",
"expected_name",
"expected_filename",
+ "expected_agent_ids",
+ "expected_failed_agent_ids",
"temp_file_unlink_call_count",
),
[
@@ -418,7 +427,9 @@ async def test_create_backup_wrong_parameters(
"backups",
None,
"Custom backup 2025.1.0",
- "Custom_backup_2025.1.0_-_2025-01-30_05.42_12345678.tar",
+ "Custom_backup_2025.1.0_2025-01-30_05.42_12345678.tar",
+ [LOCAL_AGENT_ID],
+ [],
0,
),
(
@@ -427,6 +438,8 @@ async def test_create_backup_wrong_parameters(
None,
"Custom backup 2025.1.0",
"abc123.tar", # We don't use friendly name for temporary backups
+ ["test.remote"],
+ [],
1,
),
(
@@ -434,7 +447,9 @@ async def test_create_backup_wrong_parameters(
"backups",
None,
"Custom backup 2025.1.0",
- "Custom_backup_2025.1.0_-_2025-01-30_05.42_12345678.tar",
+ "Custom_backup_2025.1.0_2025-01-30_05.42_12345678.tar",
+ [LOCAL_AGENT_ID, "test.remote"],
+ [],
0,
),
(
@@ -442,7 +457,9 @@ async def test_create_backup_wrong_parameters(
"backups",
"custom_name",
"custom_name",
- "custom_name_-_2025-01-30_05.42_12345678.tar",
+ "custom_name_2025-01-30_05.42_12345678.tar",
+ [LOCAL_AGENT_ID],
+ [],
0,
),
(
@@ -451,6 +468,8 @@ async def test_create_backup_wrong_parameters(
"custom_name",
"custom_name",
"abc123.tar", # We don't use friendly name for temporary backups
+ ["test.remote"],
+ [],
1,
),
(
@@ -458,7 +477,20 @@ async def test_create_backup_wrong_parameters(
"backups",
"custom_name",
"custom_name",
- "custom_name_-_2025-01-30_05.42_12345678.tar",
+ "custom_name_2025-01-30_05.42_12345678.tar",
+ [LOCAL_AGENT_ID, "test.remote"],
+ [],
+ 0,
+ ),
+ (
+ # Test we create a backup when at least one agent is available
+ [LOCAL_AGENT_ID, "test.unavailable"],
+ "backups",
+ "custom_name",
+ "custom_name",
+ "custom_name_2025-01-30_05.42_12345678.tar",
+ [LOCAL_AGENT_ID],
+ ["test.unavailable"],
0,
),
],
@@ -486,6 +518,8 @@ async def test_initiate_backup(
name: str | None,
expected_name: str,
expected_filename: str,
+ expected_agent_ids: list[str],
+ expected_failed_agent_ids: list[str],
temp_file_unlink_call_count: int,
) -> None:
"""Test generate backup."""
@@ -595,7 +629,7 @@ async def test_initiate_backup(
"compressed": True,
"date": ANY,
"extra": {
- "instance_id": hass.data["core.uuid"],
+ "instance_id": "our_uuid",
"with_automatic_settings": False,
},
"homeassistant": {
@@ -620,12 +654,13 @@ async def test_initiate_backup(
"addons": [],
"agents": {
agent_id: {"protected": bool(password), "size": ANY}
- for agent_id in agent_ids
+ for agent_id in expected_agent_ids
},
"backup_id": backup_id,
"database_included": include_database,
"date": ANY,
- "failed_agent_ids": [],
+ "extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
+ "failed_agent_ids": expected_failed_agent_ids,
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2025.1.0",
@@ -675,6 +710,10 @@ async def test_initiate_backup_with_agent_error(
"backup_id": "backup1",
"database_included": True,
"date": "1970-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "our_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -691,6 +730,10 @@ async def test_initiate_backup_with_agent_error(
"backup_id": "backup2",
"database_included": False,
"date": "1980-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "unknown_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -713,6 +756,10 @@ async def test_initiate_backup_with_agent_error(
"backup_id": "backup3",
"database_included": True,
"date": "1970-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "our_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -836,6 +883,7 @@ async def test_initiate_backup_with_agent_error(
"backup_id": "abc123",
"database_included": True,
"date": ANY,
+ "extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
"failed_agent_ids": ["test.remote"],
"folders": [],
"homeassistant_included": True,
@@ -945,6 +993,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
@pytest.mark.parametrize(
(
+ "automatic_agents",
"create_backup_command",
"create_backup_side_effect",
"agent_upload_side_effect",
@@ -954,6 +1003,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
[
# No error
(
+ ["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
None,
None,
@@ -961,14 +1011,38 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
+ ["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
None,
None,
True,
{},
),
+ # One agent unavailable
+ (
+ ["test.remote", "test.unknown"],
+ {"type": "backup/generate", "agent_ids": ["test.remote", "test.unknown"]},
+ None,
+ None,
+ True,
+ {},
+ ),
+ (
+ ["test.remote", "test.unknown"],
+ {"type": "backup/generate_with_automatic_settings"},
+ None,
+ None,
+ True,
+ {
+ (DOMAIN, "automatic_backup_failed"): {
+ "translation_key": "automatic_backup_failed_upload_agents",
+ "translation_placeholders": {"failed_agents": "test.unknown"},
+ }
+ },
+ ),
# Error raised in async_initiate_backup
(
+ ["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
Exception("Boom!"),
None,
@@ -976,6 +1050,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
+ ["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
Exception("Boom!"),
None,
@@ -989,6 +1064,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
),
# Error raised when awaiting the backup task
(
+ ["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
delayed_boom,
None,
@@ -996,6 +1072,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
+ ["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
delayed_boom,
None,
@@ -1009,6 +1086,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
),
# Error raised in async_upload_backup
(
+ ["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
None,
Exception("Boom!"),
@@ -1016,6 +1094,7 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
{},
),
(
+ ["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
None,
Exception("Boom!"),
@@ -1033,6 +1112,7 @@ async def test_create_backup_failure_raises_issue(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
create_backup: AsyncMock,
+ automatic_agents: list[str],
create_backup_command: dict[str, Any],
create_backup_side_effect: Exception | None,
agent_upload_side_effect: Exception | None,
@@ -1063,7 +1143,7 @@ async def test_create_backup_failure_raises_issue(
await ws_client.send_json_auto_id(
{
"type": "backup/config/update",
- "create_backup": {"agent_ids": ["test.remote"]},
+ "create_backup": {"agent_ids": automatic_agents},
}
)
result = await ws_client.receive_json()
@@ -1597,7 +1677,7 @@ async def test_exception_platform_post(hass: HomeAssistant) -> None:
"agent_id=backup.local&agent_id=test.remote",
2,
1,
- ["Test_-_1970-01-01_00.00_00000000.tar"],
+ ["Test_1970-01-01_00.00_00000000.tar"],
{TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123},
b"test",
0,
@@ -1606,7 +1686,7 @@ async def test_exception_platform_post(hass: HomeAssistant) -> None:
"agent_id=backup.local",
1,
1,
- ["Test_-_1970-01-01_00.00_00000000.tar"],
+ ["Test_1970-01-01_00.00_00000000.tar"],
{},
None,
0,
@@ -1770,6 +1850,10 @@ async def test_receive_backup_agent_error(
"backup_id": "backup1",
"database_included": True,
"date": "1970-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "our_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -1786,6 +1870,10 @@ async def test_receive_backup_agent_error(
"backup_id": "backup2",
"database_included": False,
"date": "1980-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "unknown_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -1808,6 +1896,10 @@ async def test_receive_backup_agent_error(
"backup_id": "backup3",
"database_included": True,
"date": "1970-01-01T00:00:00.000Z",
+ "extra_metadata": {
+ "instance_id": "our_uuid",
+ "with_automatic_settings": True,
+ },
"failed_agent_ids": [],
"folders": [
"media",
@@ -3116,17 +3208,21 @@ async def test_restore_backup_file_error(
@pytest.mark.parametrize(
- ("commands", "password", "protected_backup"),
+ ("commands", "agent_ids", "password", "protected_backup", "inner_tar_key"),
[
(
[],
+ ["backup.local", "test.remote"],
None,
{"backup.local": False, "test.remote": False},
+ None,
),
(
[],
+ ["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": True},
+ password_to_key("hunter2"),
),
(
[
@@ -3138,8 +3234,10 @@ async def test_restore_backup_file_error(
},
}
],
+ ["backup.local", "test.remote"],
"hunter2",
{"backup.local": False, "test.remote": False},
+ None, # None of the agents are protected
),
(
[
@@ -3151,8 +3249,10 @@ async def test_restore_backup_file_error(
},
}
],
+ ["backup.local", "test.remote"],
"hunter2",
{"backup.local": False, "test.remote": True},
+ None, # Local agent is not protected
),
(
[
@@ -3164,8 +3264,10 @@ async def test_restore_backup_file_error(
},
}
],
+ ["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": False},
+ password_to_key("hunter2"), # Local agent is protected
),
(
[
@@ -3177,8 +3279,10 @@ async def test_restore_backup_file_error(
},
}
],
+ ["backup.local", "test.remote"],
"hunter2",
{"backup.local": True, "test.remote": True},
+ password_to_key("hunter2"),
),
(
[
@@ -3190,8 +3294,40 @@ async def test_restore_backup_file_error(
},
}
],
+ ["backup.local", "test.remote"],
None,
{"backup.local": False, "test.remote": False},
+ None, # No password supplied
+ ),
+ (
+ [
+ {
+ "type": "backup/config/update",
+ "agents": {
+ "backup.local": {"protected": False},
+ "test.remote": {"protected": True},
+ },
+ }
+ ],
+ ["test.remote"],
+ "hunter2",
+ {"test.remote": True},
+ password_to_key("hunter2"),
+ ),
+ (
+ [
+ {
+ "type": "backup/config/update",
+ "agents": {
+ "backup.local": {"protected": False},
+ "test.remote": {"protected": False},
+ },
+ }
+ ],
+ ["test.remote"],
+ "hunter2",
+ {"test.remote": False},
+ password_to_key("hunter2"), # Temporary backup protected when password set
),
],
)
@@ -3200,13 +3336,15 @@ async def test_initiate_backup_per_agent_encryption(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
generate_backup_id: MagicMock,
+ mocked_tarfile: Mock,
path_glob: MagicMock,
commands: dict[str, Any],
+ agent_ids: list[str],
password: str | None,
protected_backup: dict[str, bool],
+ inner_tar_key: bytes | None,
) -> None:
"""Test generate backup where encryption is selectively set on agents."""
- agent_ids = ["backup.local", "test.remote"]
local_agent = local_backup_platform.CoreLocalBackupAgent(hass)
remote_agent = BackupAgentTest("remote", backups=[])
@@ -3282,6 +3420,10 @@ async def test_initiate_backup_per_agent_encryption(
await hass.async_block_till_done()
+ mocked_tarfile.return_value.create_inner_tar.assert_called_once_with(
+ ANY, gzip=True, key=inner_tar_key
+ )
+
result = await ws_client.receive_json()
assert result["event"] == {
"manager_state": BackupManagerState.CREATE_BACKUP,
@@ -3325,6 +3467,7 @@ async def test_initiate_backup_per_agent_encryption(
"backup_id": backup_id,
"database_included": True,
"date": ANY,
+ "extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
"failed_agent_ids": [],
"folders": [],
"homeassistant_included": True,
diff --git a/tests/components/backup/test_util.py b/tests/components/backup/test_util.py
index 3bcb53f7c86..504e0d56d58 100644
--- a/tests/components/backup/test_util.py
+++ b/tests/components/backup/test_util.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import asyncio
from collections.abc import AsyncIterator
import dataclasses
import tarfile
@@ -189,6 +190,73 @@ async def test_decrypted_backup_streamer(hass: HomeAssistant) -> None:
assert decrypted_output == decrypted_backup_data + expected_padding
+async def test_decrypted_backup_streamer_interrupt_stuck_reader(
+ hass: HomeAssistant,
+) -> None:
+ """Test the decrypted backup streamer."""
+ encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
+ backup = AgentBackup(
+ addons=["addon_1", "addon_2"],
+ backup_id="1234",
+ date="2024-12-02T07:23:58.261875-05:00",
+ database_included=False,
+ extra_metadata={},
+ folders=[],
+ homeassistant_included=True,
+ homeassistant_version="2024.12.0.dev0",
+ name="test",
+ protected=True,
+ size=encrypted_backup_path.stat().st_size,
+ )
+
+ stuck = asyncio.Event()
+
+ async def send_backup() -> AsyncIterator[bytes]:
+ f = encrypted_backup_path.open("rb")
+ while chunk := f.read(1024):
+ await stuck.wait()
+ yield chunk
+
+ async def open_backup() -> AsyncIterator[bytes]:
+ return send_backup()
+
+ decryptor = DecryptedBackupStreamer(hass, backup, open_backup, "hunter2")
+ await decryptor.open_stream()
+ await decryptor.wait()
+
+
+async def test_decrypted_backup_streamer_interrupt_stuck_writer(
+ hass: HomeAssistant,
+) -> None:
+ """Test the decrypted backup streamer."""
+ encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
+ backup = AgentBackup(
+ addons=["addon_1", "addon_2"],
+ backup_id="1234",
+ date="2024-12-02T07:23:58.261875-05:00",
+ database_included=False,
+ extra_metadata={},
+ folders=[],
+ homeassistant_included=True,
+ homeassistant_version="2024.12.0.dev0",
+ name="test",
+ protected=True,
+ size=encrypted_backup_path.stat().st_size,
+ )
+
+ async def send_backup() -> AsyncIterator[bytes]:
+ f = encrypted_backup_path.open("rb")
+ while chunk := f.read(1024):
+ yield chunk
+
+ async def open_backup() -> AsyncIterator[bytes]:
+ return send_backup()
+
+ decryptor = DecryptedBackupStreamer(hass, backup, open_backup, "hunter2")
+ await decryptor.open_stream()
+ await decryptor.wait()
+
+
async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) -> None:
"""Test the decrypted backup streamer with wrong password."""
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
@@ -279,6 +347,77 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
assert encrypted_output == encrypted_backup_data + expected_padding
+async def test_encrypted_backup_streamer_interrupt_stuck_reader(
+ hass: HomeAssistant,
+) -> None:
+ """Test the encrypted backup streamer."""
+ decrypted_backup_path = get_fixture_path(
+ "test_backups/c0cb53bd.tar.decrypted", DOMAIN
+ )
+ backup = AgentBackup(
+ addons=["addon_1", "addon_2"],
+ backup_id="1234",
+ date="2024-12-02T07:23:58.261875-05:00",
+ database_included=False,
+ extra_metadata={},
+ folders=[],
+ homeassistant_included=True,
+ homeassistant_version="2024.12.0.dev0",
+ name="test",
+ protected=False,
+ size=decrypted_backup_path.stat().st_size,
+ )
+
+ stuck = asyncio.Event()
+
+ async def send_backup() -> AsyncIterator[bytes]:
+ f = decrypted_backup_path.open("rb")
+ while chunk := f.read(1024):
+ await stuck.wait()
+ yield chunk
+
+ async def open_backup() -> AsyncIterator[bytes]:
+ return send_backup()
+
+ decryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
+ await decryptor.open_stream()
+ await decryptor.wait()
+
+
+async def test_encrypted_backup_streamer_interrupt_stuck_writer(
+ hass: HomeAssistant,
+) -> None:
+ """Test the encrypted backup streamer."""
+ decrypted_backup_path = get_fixture_path(
+ "test_backups/c0cb53bd.tar.decrypted", DOMAIN
+ )
+ backup = AgentBackup(
+ addons=["addon_1", "addon_2"],
+ backup_id="1234",
+ date="2024-12-02T07:23:58.261875-05:00",
+ database_included=False,
+ extra_metadata={},
+ folders=[],
+ homeassistant_included=True,
+ homeassistant_version="2024.12.0.dev0",
+ name="test",
+ protected=True,
+ size=decrypted_backup_path.stat().st_size,
+ )
+
+ async def send_backup() -> AsyncIterator[bytes]:
+ f = decrypted_backup_path.open("rb")
+ while chunk := f.read(1024):
+ yield chunk
+
+ async def open_backup() -> AsyncIterator[bytes]:
+ return send_backup()
+
+ decryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
+ await decryptor.open_stream()
+ await decryptor.wait()
+
+
async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> None:
"""Test the encrypted backup streamer."""
decrypted_backup_path = get_fixture_path(
@@ -390,10 +529,10 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
@pytest.mark.parametrize(
("name", "resulting_filename"),
[
- ("test", "test_-_2025-01-30_13.42_12345678.tar"),
- (" leading spaces", "leading_spaces_-_2025-01-30_13.42_12345678.tar"),
- ("trailing spaces ", "trailing_spaces_-_2025-01-30_13.42_12345678.tar"),
- ("double spaces ", "double_spaces_-_2025-01-30_13.42_12345678.tar"),
+ ("test", "test_2025-01-30_13.42_12345678.tar"),
+ (" leading spaces", "leading_spaces_2025-01-30_13.42_12345678.tar"),
+ ("trailing spaces ", "trailing_spaces_2025-01-30_13.42_12345678.tar"),
+ ("double spaces ", "double_spaces_2025-01-30_13.42_12345678.tar"),
],
)
def test_suggested_filename(name: str, resulting_filename: str) -> None:
diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py
index 613c0b69b6b..5af6d595938 100644
--- a/tests/components/backup/test_websocket.py
+++ b/tests/components/backup/test_websocket.py
@@ -12,6 +12,7 @@ from homeassistant.components.backup import (
AgentBackup,
BackupAgentError,
BackupAgentPlatformProtocol,
+ BackupNotFound,
BackupReaderWriterError,
Folder,
store,
@@ -2967,3 +2968,39 @@ async def test_can_decrypt_on_download(
}
)
assert await client.receive_json() == snapshot
+
+
+@pytest.mark.parametrize(
+ "error",
+ [
+ BackupAgentError,
+ BackupNotFound,
+ ],
+)
+@pytest.mark.usefixtures("mock_backups")
+async def test_can_decrypt_on_download_with_agent_error(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ snapshot: SnapshotAssertion,
+ error: Exception,
+) -> None:
+ """Test can decrypt on download."""
+
+ await setup_backup_integration(
+ hass,
+ with_hassio=False,
+ backups={"test.remote": [TEST_BACKUP_ABC123]},
+ remote_agents=["remote"],
+ )
+ client = await hass_ws_client(hass)
+
+ with patch.object(BackupAgentTest, "async_download_backup", side_effect=error):
+ await client.send_json_auto_id(
+ {
+ "type": "backup/can_decrypt_on_download",
+ "backup_id": TEST_BACKUP_ABC123.backup_id,
+ "agent_id": "test.remote",
+ "password": "hunter2",
+ }
+ )
+ assert await client.receive_json() == snapshot
diff --git a/tests/components/bluetooth/test_config_flow.py b/tests/components/bluetooth/test_config_flow.py
index b8f90b3a4aa..f0136396c22 100644
--- a/tests/components/bluetooth/test_config_flow.py
+++ b/tests/components/bluetooth/test_config_flow.py
@@ -20,7 +20,7 @@ from homeassistant.components.bluetooth.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
-from homeassistant.helpers import device_registry as dr
+from homeassistant.helpers import area_registry as ar, device_registry as dr
from homeassistant.setup import async_setup_component
from . import FakeRemoteScanner, MockBleakClient, _get_manager
@@ -517,8 +517,10 @@ async def test_options_flow_local_no_passive_support(hass: HomeAssistant) -> Non
@pytest.mark.usefixtures("one_adapter")
-async def test_async_step_user_linux_adapter_is_ignored(hass: HomeAssistant) -> None:
- """Test we give a hint that the adapter is ignored."""
+async def test_async_step_user_linux_adapter_replace_ignored(
+ hass: HomeAssistant,
+) -> None:
+ """Test we can replace an ignored adapter from user flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="00:00:00:00:00:01",
@@ -530,14 +532,26 @@ async def test_async_step_user_linux_adapter_is_ignored(hass: HomeAssistant) ->
context={"source": config_entries.SOURCE_USER},
data={},
)
- assert result["type"] is FlowResultType.ABORT
- assert result["reason"] == "no_adapters"
- assert result["description_placeholders"] == {"ignored_adapters": "1"}
+ with (
+ patch("homeassistant.components.bluetooth.async_setup", return_value=True),
+ patch(
+ "homeassistant.components.bluetooth.async_setup_entry", return_value=True
+ ) as mock_setup_entry,
+ ):
+ result2 = await hass.config_entries.flow.async_configure(
+ result["flow_id"], user_input={}
+ )
+ assert result2["type"] is FlowResultType.CREATE_ENTRY
+ assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)"
+ assert result2["data"] == {}
+ assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.usefixtures("enable_bluetooth")
async def test_async_step_integration_discovery_remote_adapter(
- hass: HomeAssistant, device_registry: dr.DeviceRegistry
+ hass: HomeAssistant,
+ device_registry: dr.DeviceRegistry,
+ area_registry: ar.AreaRegistry,
) -> None:
"""Test remote adapter configuration via integration discovery."""
entry = MockConfigEntry(domain="test")
@@ -547,10 +561,12 @@ async def test_async_step_integration_discovery_remote_adapter(
)
scanner = FakeRemoteScanner("esp32", "esp32", connector, True)
manager = _get_manager()
+ area_entry = area_registry.async_get_or_create("test")
cancel_scanner = manager.async_register_scanner(scanner)
device_entry = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={("test", "BB:BB:BB:BB:BB:BB")},
+ suggested_area=area_entry.id,
)
result = await hass.config_entries.flow.async_init(
@@ -585,6 +601,7 @@ async def test_async_step_integration_discovery_remote_adapter(
)
assert ble_device_entry is not None
assert ble_device_entry.via_device_id == device_entry.id
+ assert ble_device_entry.area_id == area_entry.id
await hass.config_entries.async_unload(new_entry.entry_id)
await hass.config_entries.async_unload(entry.entry_id)
diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py
index da90980640b..738cae90c22 100644
--- a/tests/components/bluetooth_le_tracker/test_device_tracker.py
+++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py
@@ -215,7 +215,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None:
@pytest.mark.usefixtures("mock_bluetooth", "mock_device_tracker_conf")
async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None:
- """Test preserving tracked device name across new seens."""
+ """Test preserving tracked device name across new seens.""" # codespell:ignore seens
address = "DE:AD:BE:EF:13:37"
name = "Mock device name"
diff --git a/tests/components/cloud/snapshots/test_http_api.ambr b/tests/components/cloud/snapshots/test_http_api.ambr
new file mode 100644
index 00000000000..9b2f2e0eb33
--- /dev/null
+++ b/tests/components/cloud/snapshots/test_http_api.ambr
@@ -0,0 +1,49 @@
+# serializer version: 1
+# name: test_download_support_package
+ '''
+ ## System Information
+
+ version | core-2025.2.0
+ --- | ---
+ installation_type | Home Assistant Core
+ dev | False
+ hassio | False
+ docker | False
+ user | hass
+ virtualenv | False
+ python_version | 3.13.1
+ os_name | Linux
+ os_version | 6.12.9
+ arch | x86_64
+ timezone | US/Pacific
+ config_dir | config
+
+ mock_no_info_integration
+
+ No information available
+
+
+ cloud
+
+ logged_in | True
+ --- | ---
+ subscription_expiration | 2025-01-17T11:19:31+00:00
+ relayer_connected | True
+ relayer_region | xx-earth-616
+ remote_enabled | True
+ remote_connected | False
+ alexa_enabled | True
+ google_enabled | False
+ cloud_ice_servers_enabled | True
+ remote_server | us-west-1
+ certificate_status | CertificateStatus.READY
+ instance_id | 12345678901234567890
+ can_reach_cert_server | Exception: Unexpected exception
+ can_reach_cloud_auth | Failed: unreachable
+ can_reach_cloud | ok
+
+
+
+
+ '''
+# ---
diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py
index c2513168ab9..5b2b8751311 100644
--- a/tests/components/cloud/test_backup.py
+++ b/tests/components/cloud/test_backup.py
@@ -174,6 +174,7 @@ async def test_agents_list_backups(
"backup_id": "23e64aec",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
+ "extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
@@ -223,6 +224,7 @@ async def test_agents_list_backups_fail_cloud(
"backup_id": "23e64aec",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
+ "extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py
index 910fa03d46c..e4a526ceadd 100644
--- a/tests/components/cloud/test_http_api.py
+++ b/tests/components/cloud/test_http_api.py
@@ -1,10 +1,11 @@
"""Tests for the HTTP API for the cloud component."""
+from collections.abc import Callable, Coroutine
from copy import deepcopy
from http import HTTPStatus
import json
from typing import Any
-from unittest.mock import AsyncMock, MagicMock, Mock, patch
+from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch
import aiohttp
from hass_nabucasa import thingtalk
@@ -15,9 +16,12 @@ from hass_nabucasa.auth import (
UnknownError,
)
from hass_nabucasa.const import STATE_CONNECTED
+from hass_nabucasa.remote import CertificateStatus
from hass_nabucasa.voice import TTS_VOICES
import pytest
+from syrupy.assertion import SnapshotAssertion
+from homeassistant.components import system_health
from homeassistant.components.alexa import errors as alexa_errors
# pylint: disable-next=hass-component-root-import
@@ -30,8 +34,10 @@ from homeassistant.components.websocket_api import ERR_INVALID_FORMAT
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
+from homeassistant.util import dt as dt_util
from homeassistant.util.location import LocationInfo
+from tests.common import mock_platform
from tests.components.google_assistant import MockConfig
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@@ -113,6 +119,7 @@ async def setup_cloud_fixture(hass: HomeAssistant, cloud: MagicMock) -> None:
"user_pool_id": "user_pool_id",
"region": "region",
"relayer_server": "relayer",
+ "acme_server": "cert-server",
"accounts_server": "api-test.hass.io",
"google_actions": {"filter": {"include_domains": "light"}},
"alexa": {
@@ -1860,3 +1867,96 @@ async def test_logout_view_dispatch_event(
assert async_dispatcher_send_mock.call_count == 1
assert async_dispatcher_send_mock.mock_calls[0][1][1] == "cloud_event"
assert async_dispatcher_send_mock.mock_calls[0][1][2] == {"type": "logout"}
+
+
+async def test_download_support_package(
+ hass: HomeAssistant,
+ cloud: MagicMock,
+ set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]],
+ hass_client: ClientSessionGenerator,
+ aioclient_mock: AiohttpClientMocker,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test downloading a support package file."""
+ aioclient_mock.get("https://cloud.bla.com/status", text="")
+ aioclient_mock.get(
+ "https://cert-server/directory", exc=Exception("Unexpected exception")
+ )
+ aioclient_mock.get(
+ "https://cognito-idp.us-east-1.amazonaws.com/AAAA/.well-known/jwks.json",
+ exc=aiohttp.ClientError,
+ )
+
+ def async_register_mock_platform(
+ hass: HomeAssistant, register: system_health.SystemHealthRegistration
+ ) -> None:
+ async def mock_empty_info(hass: HomeAssistant) -> dict[str, Any]:
+ return {}
+
+ register.async_register_info(mock_empty_info, "/config/mock_integration")
+
+ mock_platform(
+ hass,
+ "mock_no_info_integration.system_health",
+ MagicMock(async_register=async_register_mock_platform),
+ )
+ hass.config.components.add("mock_no_info_integration")
+
+ assert await async_setup_component(hass, "system_health", {})
+
+ with patch("uuid.UUID.hex", new_callable=PropertyMock) as hexmock:
+ hexmock.return_value = "12345678901234567890"
+ assert await async_setup_component(
+ hass,
+ DOMAIN,
+ {
+ DOMAIN: {
+ "user_pool_id": "AAAA",
+ "region": "us-east-1",
+ "acme_server": "cert-server",
+ "relayer_server": "cloud.bla.com",
+ },
+ },
+ )
+ await hass.async_block_till_done()
+
+ await cloud.login("test-user", "test-pass")
+
+ cloud.remote.snitun_server = "us-west-1"
+ cloud.remote.certificate_status = CertificateStatus.READY
+ cloud.expiration_date = dt_util.parse_datetime("2025-01-17T11:19:31.0+00:00")
+
+ await cloud.client.async_system_message({"region": "xx-earth-616"})
+ await set_cloud_prefs(
+ {
+ "alexa_enabled": True,
+ "google_enabled": False,
+ "remote_enabled": True,
+ "cloud_ice_servers_enabled": True,
+ }
+ )
+
+ cloud_client = await hass_client()
+ with (
+ patch.object(hass.config, "config_dir", new="config"),
+ patch(
+ "homeassistant.components.homeassistant.system_health.system_info.async_get_system_info",
+ return_value={
+ "installation_type": "Home Assistant Core",
+ "version": "2025.2.0",
+ "dev": False,
+ "hassio": False,
+ "virtualenv": False,
+ "python_version": "3.13.1",
+ "docker": False,
+ "arch": "x86_64",
+ "timezone": "US/Pacific",
+ "os_name": "Linux",
+ "os_version": "6.12.9",
+ "user": "hass",
+ },
+ ),
+ ):
+ req = await cloud_client.get("/api/cloud/support_package")
+ assert req.status == HTTPStatus.OK
+ assert await req.text() == snapshot
diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py
index bf9fd7302ae..81b10866dff 100644
--- a/tests/components/cloud/test_tts.py
+++ b/tests/components/cloud/test_tts.py
@@ -12,7 +12,12 @@ import voluptuous as vol
from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY
from homeassistant.components.cloud.const import DEFAULT_TTS_DEFAULT_VOICE, DOMAIN
-from homeassistant.components.cloud.tts import PLATFORM_SCHEMA, SUPPORT_LANGUAGES, Voice
+from homeassistant.components.cloud.tts import (
+ DEFAULT_VOICES,
+ PLATFORM_SCHEMA,
+ SUPPORT_LANGUAGES,
+ Voice,
+)
from homeassistant.components.media_player import (
ATTR_MEDIA_CONTENT_ID,
DOMAIN as DOMAIN_MP,
@@ -61,6 +66,19 @@ def test_default_exists() -> None:
assert DEFAULT_TTS_DEFAULT_VOICE[1] in TTS_VOICES[DEFAULT_TTS_DEFAULT_VOICE[0]]
+def test_all_languages_have_default() -> None:
+ """Test all languages have a default voice."""
+ assert set(SUPPORT_LANGUAGES).difference(DEFAULT_VOICES) == set()
+ assert set(DEFAULT_VOICES).difference(SUPPORT_LANGUAGES) == set()
+
+
+@pytest.mark.parametrize(("language", "voice"), DEFAULT_VOICES.items())
+def test_default_voice_is_valid(language: str, voice: str) -> None:
+ """Test that the default voice is valid."""
+ assert language in TTS_VOICES
+ assert voice in TTS_VOICES[language]
+
+
def test_schema() -> None:
"""Test schema."""
assert "nl-NL" in SUPPORT_LANGUAGES
diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py
index ee000c5ada2..f5241f65200 100644
--- a/tests/components/config/test_config_entries.py
+++ b/tests/components/config/test_config_entries.py
@@ -3,6 +3,7 @@
from collections import OrderedDict
from collections.abc import Generator
from http import HTTPStatus
+from typing import Any
from unittest.mock import ANY, AsyncMock, patch
from aiohttp.test_utils import TestClient
@@ -12,12 +13,13 @@ import voluptuous as vol
from homeassistant import config_entries as core_ce, data_entry_flow, loader
from homeassistant.components.config import config_entries
-from homeassistant.config_entries import HANDLERS, ConfigFlow
+from homeassistant.config_entries import HANDLERS, ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_entry_flow, config_validation as cv
from homeassistant.helpers.discovery_flow import DiscoveryKey
+from homeassistant.helpers.service_info.hassio import HassioServiceInfo
from homeassistant.loader import IntegrationNotFound
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
@@ -729,27 +731,62 @@ async def test_get_progress_index(
mock_platform(hass, "test.config_flow", None)
ws_client = await hass_ws_client(hass)
+ mock_integration(
+ hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True))
+ )
+
+ entry = MockConfigEntry(domain="test", title="Test", entry_id="1234")
+ entry.add_to_hass(hass)
+
class TestFlow(core_ce.ConfigFlow):
VERSION = 5
- async def async_step_hassio(self, discovery_info):
+ async def async_step_hassio(
+ self, discovery_info: HassioServiceInfo
+ ) -> ConfigFlowResult:
+ """Handle a Hass.io discovery."""
return await self.async_step_account()
- async def async_step_account(self, user_input=None):
+ async def async_step_account(self, user_input: dict[str, Any] | None = None):
+ """Show a form to the user."""
return self.async_show_form(step_id="account")
+ async def async_step_user(self, user_input: dict[str, Any] | None = None):
+ """Handle a config flow initialized by the user."""
+ return await self.async_step_account()
+
+ async def async_step_reconfigure(
+ self, user_input: dict[str, Any] | None = None
+ ):
+ """Handle a reconfiguration flow initialized by the user."""
+ nonlocal entry
+ assert self._get_reconfigure_entry() is entry
+ return await self.async_step_account()
+
with patch.dict(HANDLERS, {"test": TestFlow}):
- form = await hass.config_entries.flow.async_init(
+ form_hassio = await hass.config_entries.flow.async_init(
"test", context={"source": core_ce.SOURCE_HASSIO}
)
+ form_user = await hass.config_entries.flow.async_init(
+ "test", context={"source": core_ce.SOURCE_USER}
+ )
+ form_reconfigure = await hass.config_entries.flow.async_init(
+ "test", context={"source": core_ce.SOURCE_RECONFIGURE, "entry_id": "1234"}
+ )
+
+ for form in (form_hassio, form_user, form_reconfigure):
+ assert form["type"] == data_entry_flow.FlowResultType.FORM
+ assert form["step_id"] == "account"
await ws_client.send_json({"id": 5, "type": "config_entries/flow/progress"})
response = await ws_client.receive_json()
assert response["success"]
+
+ # Active flows with SOURCE_USER and SOURCE_RECONFIGURE should be filtered out
assert response["result"] == [
{
- "flow_id": form["flow_id"],
+ "flow_id": form_hassio["flow_id"],
"handler": "test",
"step_id": "account",
"context": {"source": core_ce.SOURCE_HASSIO},
diff --git a/tests/components/conversation/snapshots/test_session.ambr b/tests/components/conversation/snapshots/test_chat_log.ambr
similarity index 100%
rename from tests/components/conversation/snapshots/test_session.ambr
rename to tests/components/conversation/snapshots/test_chat_log.ambr
diff --git a/tests/components/conversation/test_session.py b/tests/components/conversation/test_chat_log.py
similarity index 68%
rename from tests/components/conversation/test_session.py
rename to tests/components/conversation/test_chat_log.py
index 3943f41a62b..c22a90e6928 100644
--- a/tests/components/conversation/test_session.py
+++ b/tests/components/conversation/test_chat_log.py
@@ -9,13 +9,13 @@ from syrupy.assertion import SnapshotAssertion
import voluptuous as vol
from homeassistant.components.conversation import (
- Content,
+ AssistantContent,
ConversationInput,
ConverseError,
- NativeContent,
+ ToolResultContent,
async_get_chat_log,
)
-from homeassistant.components.conversation.session import DATA_CHAT_HISTORY
+from homeassistant.components.conversation.chat_log import DATA_CHAT_HISTORY
from homeassistant.core import Context, HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import chat_session, llm
@@ -40,7 +40,7 @@ def mock_conversation_input(hass: HomeAssistant) -> ConversationInput:
@pytest.fixture
def mock_ulid() -> Generator[Mock]:
"""Mock the ulid library."""
- with patch("homeassistant.util.ulid.ulid_now") as mock_ulid_now:
+ with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now:
mock_ulid_now.return_value = "mock-ulid"
yield mock_ulid_now
@@ -56,11 +56,10 @@ async def test_cleanup(
):
conversation_id = session.conversation_id
# Add message so it persists
- chat_log.async_add_message(
- Content(
- role="assistant",
- agent_id=mock_conversation_input.agent_id,
- content="",
+ chat_log.async_add_assistant_content_without_tools(
+ AssistantContent(
+ agent_id="mock-agent-id",
+ content="Hey!",
)
)
@@ -79,7 +78,7 @@ async def test_cleanup(
assert conversation_id not in hass.data[DATA_CHAT_HISTORY]
-async def test_add_message(
+async def test_default_content(
hass: HomeAssistant, mock_conversation_input: ConversationInput
) -> None:
"""Test filtering of messages."""
@@ -87,95 +86,11 @@ async def test_add_message(
chat_session.async_get_chat_session(hass) as session,
async_get_chat_log(hass, session, mock_conversation_input) as chat_log,
):
- assert len(chat_log.messages) == 2
-
- with pytest.raises(ValueError):
- chat_log.async_add_message(
- Content(role="system", agent_id=None, content="")
- )
-
- # No 2 user messages in a row
- assert chat_log.messages[1].role == "user"
-
- with pytest.raises(ValueError):
- chat_log.async_add_message(Content(role="user", agent_id=None, content=""))
-
- # No 2 assistant messages in a row
- chat_log.async_add_message(Content(role="assistant", agent_id=None, content=""))
- assert len(chat_log.messages) == 3
- assert chat_log.messages[-1].role == "assistant"
-
- with pytest.raises(ValueError):
- chat_log.async_add_message(
- Content(role="assistant", agent_id=None, content="")
- )
-
-
-async def test_message_filtering(
- hass: HomeAssistant, mock_conversation_input: ConversationInput
-) -> None:
- """Test filtering of messages."""
- with (
- chat_session.async_get_chat_session(hass) as session,
- async_get_chat_log(hass, session, mock_conversation_input) as chat_log,
- ):
- messages = chat_log.async_get_messages(agent_id=None)
- assert len(messages) == 2
- assert messages[0] == Content(
- role="system",
- agent_id=None,
- content="",
- )
- assert messages[1] == Content(
- role="user",
- agent_id="mock-agent-id",
- content=mock_conversation_input.text,
- )
- # Cannot add a second user message in a row
- with pytest.raises(ValueError):
- chat_log.async_add_message(
- Content(
- role="user",
- agent_id="mock-agent-id",
- content="Hey!",
- )
- )
-
- chat_log.async_add_message(
- Content(
- role="assistant",
- agent_id="mock-agent-id",
- content="Hey!",
- )
- )
- # Different agent, native messages will be filtered out.
- chat_log.async_add_message(
- NativeContent(agent_id="another-mock-agent-id", content=1)
- )
- chat_log.async_add_message(NativeContent(agent_id="mock-agent-id", content=1))
- # A non-native message from another agent is not filtered out.
- chat_log.async_add_message(
- Content(
- role="assistant",
- agent_id="another-mock-agent-id",
- content="Hi!",
- )
- )
-
- assert len(chat_log.messages) == 6
-
- messages = chat_log.async_get_messages(agent_id="mock-agent-id")
- assert len(messages) == 5
-
- assert messages[2] == Content(
- role="assistant",
- agent_id="mock-agent-id",
- content="Hey!",
- )
- assert messages[3] == NativeContent(agent_id="mock-agent-id", content=1)
- assert messages[4] == Content(
- role="assistant", agent_id="another-mock-agent-id", content="Hi!"
- )
+ assert len(chat_log.content) == 2
+ assert chat_log.content[0].role == "system"
+ assert chat_log.content[0].content == ""
+ assert chat_log.content[1].role == "user"
+ assert chat_log.content[1].content == mock_conversation_input.text
async def test_llm_api(
@@ -268,12 +183,10 @@ async def test_template_variables(
),
)
- assert chat_log.user_name == "Test User"
-
- assert "The instance name is test home." in chat_log.messages[0].content
- assert "The user name is Test User." in chat_log.messages[0].content
- assert "The user id is 12345." in chat_log.messages[0].content
- assert "The calling platform is test." in chat_log.messages[0].content
+ assert "The instance name is test home." in chat_log.content[0].content
+ assert "The user name is Test User." in chat_log.content[0].content
+ assert "The user id is 12345." in chat_log.content[0].content
+ assert "The calling platform is test." in chat_log.content[0].content
async def test_extra_systen_prompt(
@@ -296,16 +209,15 @@ async def test_extra_systen_prompt(
user_llm_hass_api=None,
user_llm_prompt=None,
)
- chat_log.async_add_message(
- Content(
- role="assistant",
+ chat_log.async_add_assistant_content_without_tools(
+ AssistantContent(
agent_id="mock-agent-id",
content="Hey!",
)
)
assert chat_log.extra_system_prompt == extra_system_prompt
- assert chat_log.messages[0].content.endswith(extra_system_prompt)
+ assert chat_log.content[0].content.endswith(extra_system_prompt)
# Verify that follow-up conversations with no system prompt take previous one
conversation_id = chat_log.conversation_id
@@ -323,7 +235,7 @@ async def test_extra_systen_prompt(
)
assert chat_log.extra_system_prompt == extra_system_prompt
- assert chat_log.messages[0].content.endswith(extra_system_prompt)
+ assert chat_log.content[0].content.endswith(extra_system_prompt)
# Verify that we take new system prompts
mock_conversation_input.extra_system_prompt = extra_system_prompt2
@@ -338,17 +250,16 @@ async def test_extra_systen_prompt(
user_llm_hass_api=None,
user_llm_prompt=None,
)
- chat_log.async_add_message(
- Content(
- role="assistant",
+ chat_log.async_add_assistant_content_without_tools(
+ AssistantContent(
agent_id="mock-agent-id",
content="Hey!",
)
)
assert chat_log.extra_system_prompt == extra_system_prompt2
- assert chat_log.messages[0].content.endswith(extra_system_prompt2)
- assert extra_system_prompt not in chat_log.messages[0].content
+ assert chat_log.content[0].content.endswith(extra_system_prompt2)
+ assert extra_system_prompt not in chat_log.content[0].content
# Verify that follow-up conversations with no system prompt take previous one
mock_conversation_input.extra_system_prompt = None
@@ -365,7 +276,7 @@ async def test_extra_systen_prompt(
)
assert chat_log.extra_system_prompt == extra_system_prompt2
- assert chat_log.messages[0].content.endswith(extra_system_prompt2)
+ assert chat_log.content[0].content.endswith(extra_system_prompt2)
async def test_tool_call(
@@ -383,8 +294,7 @@ async def test_tool_call(
mock_tool.async_call.return_value = "Test response"
with patch(
- "homeassistant.components.conversation.session.llm.AssistAPI._async_get_tools",
- return_value=[],
+ "homeassistant.helpers.llm.AssistAPI._async_get_tools", return_value=[]
) as mock_get_tools:
mock_get_tools.return_value = [mock_tool]
@@ -398,14 +308,34 @@ async def test_tool_call(
user_llm_hass_api="assist",
user_llm_prompt=None,
)
- result = await chat_log.async_call_tool(
- llm.ToolInput(
- tool_name="test_tool",
- tool_args={"param1": "Test Param"},
- )
+ content = AssistantContent(
+ agent_id=mock_conversation_input.agent_id,
+ content="",
+ tool_calls=[
+ llm.ToolInput(
+ id="mock-tool-call-id",
+ tool_name="test_tool",
+ tool_args={"param1": "Test Param"},
+ )
+ ],
)
- assert result == "Test response"
+ with pytest.raises(ValueError):
+ chat_log.async_add_assistant_content_without_tools(content)
+
+ result = None
+ async for tool_result_content in chat_log.async_add_assistant_content(
+ content
+ ):
+ assert result is None
+ result = tool_result_content
+
+ assert result == ToolResultContent(
+ agent_id=mock_conversation_input.agent_id,
+ tool_call_id="mock-tool-call-id",
+ tool_result="Test response",
+ tool_name="test_tool",
+ )
async def test_tool_call_exception(
@@ -423,8 +353,7 @@ async def test_tool_call_exception(
mock_tool.async_call.side_effect = HomeAssistantError("Test error")
with patch(
- "homeassistant.components.conversation.session.llm.AssistAPI._async_get_tools",
- return_value=[],
+ "homeassistant.helpers.llm.AssistAPI._async_get_tools", return_value=[]
) as mock_get_tools:
mock_get_tools.return_value = [mock_tool]
@@ -438,11 +367,26 @@ async def test_tool_call_exception(
user_llm_hass_api="assist",
user_llm_prompt=None,
)
- result = await chat_log.async_call_tool(
- llm.ToolInput(
- tool_name="test_tool",
- tool_args={"param1": "Test Param"},
+ result = None
+ async for tool_result_content in chat_log.async_add_assistant_content(
+ AssistantContent(
+ agent_id=mock_conversation_input.agent_id,
+ content="",
+ tool_calls=[
+ llm.ToolInput(
+ id="mock-tool-call-id",
+ tool_name="test_tool",
+ tool_args={"param1": "Test Param"},
+ )
+ ],
)
- )
+ ):
+ assert result is None
+ result = tool_result_content
- assert result == {"error": "HomeAssistantError", "error_text": "Test error"}
+ assert result == ToolResultContent(
+ agent_id=mock_conversation_input.agent_id,
+ tool_call_id="mock-tool-call-id",
+ tool_result={"error": "HomeAssistantError", "error_text": "Test error"},
+ tool_name="test_tool",
+ )
diff --git a/tests/components/eheimdigital/conftest.py b/tests/components/eheimdigital/conftest.py
index ef52eade9ae..afb97b97569 100644
--- a/tests/components/eheimdigital/conftest.py
+++ b/tests/components/eheimdigital/conftest.py
@@ -34,6 +34,7 @@ def classic_led_ctrl_mock():
)
classic_led_ctrl_mock.name = "Mock classicLEDcontrol+e"
classic_led_ctrl_mock.aquarium_name = "Mock Aquarium"
+ classic_led_ctrl_mock.sw_version = "1.0.0_1.0.0"
classic_led_ctrl_mock.light_mode = LightMode.DAYCL_MODE
classic_led_ctrl_mock.light_level = (10, 39)
return classic_led_ctrl_mock
@@ -47,6 +48,7 @@ def heater_mock():
heater_mock.device_type = EheimDeviceType.VERSION_EHEIM_EXT_HEATER
heater_mock.name = "Mock Heater"
heater_mock.aquarium_name = "Mock Aquarium"
+ heater_mock.sw_version = "1.0.0_1.0.0"
heater_mock.temperature_unit = HeaterUnit.CELSIUS
heater_mock.current_temperature = 24.2
heater_mock.target_temperature = 25.5
diff --git a/tests/components/electric_kiwi/__init__.py b/tests/components/electric_kiwi/__init__.py
index 7f5e08a56b5..936557ac3bf 100644
--- a/tests/components/electric_kiwi/__init__.py
+++ b/tests/components/electric_kiwi/__init__.py
@@ -1 +1,13 @@
"""Tests for the Electric Kiwi integration."""
+
+from homeassistant.core import HomeAssistant
+
+from tests.common import MockConfigEntry
+
+
+async def init_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None:
+ """Fixture for setting up the integration with args."""
+ entry.add_to_hass(hass)
+
+ await hass.config_entries.async_setup(entry.entry_id)
+ await hass.async_block_till_done()
diff --git a/tests/components/electric_kiwi/conftest.py b/tests/components/electric_kiwi/conftest.py
index 010efcb7b5f..cc967631be4 100644
--- a/tests/components/electric_kiwi/conftest.py
+++ b/tests/components/electric_kiwi/conftest.py
@@ -2,11 +2,18 @@
from __future__ import annotations
-from collections.abc import Awaitable, Callable, Generator
+from collections.abc import Generator
from time import time
from unittest.mock import AsyncMock, patch
-from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
+from electrickiwi_api.model import (
+ AccountSummary,
+ CustomerConnection,
+ Hop,
+ HopIntervals,
+ Service,
+ Session,
+)
import pytest
from homeassistant.components.application_credentials import (
@@ -23,37 +30,55 @@ CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
REDIRECT_URI = "https://example.com/auth/external/callback"
-type YieldFixture = Generator[AsyncMock]
-type ComponentSetup = Callable[[], Awaitable[bool]]
+
+@pytest.fixture(autouse=True)
+async def setup_credentials(hass: HomeAssistant) -> None:
+ """Fixture to setup application credentials component."""
+ await async_setup_component(hass, "application_credentials", {})
+ await async_import_client_credential(
+ hass,
+ DOMAIN,
+ ClientCredential(CLIENT_ID, CLIENT_SECRET),
+ )
@pytest.fixture(autouse=True)
-async def request_setup(current_request_with_host: None) -> None:
- """Request setup."""
-
-
-@pytest.fixture
-def component_setup(
- hass: HomeAssistant, config_entry: MockConfigEntry
-) -> ComponentSetup:
- """Fixture for setting up the integration."""
-
- async def _setup_func() -> bool:
- assert await async_setup_component(hass, "application_credentials", {})
- await hass.async_block_till_done()
- await async_import_client_credential(
- hass,
- DOMAIN,
- ClientCredential(CLIENT_ID, CLIENT_SECRET),
- DOMAIN,
+def electrickiwi_api() -> Generator[AsyncMock]:
+ """Mock ek api and return values."""
+ with (
+ patch(
+ "homeassistant.components.electric_kiwi.ElectricKiwiApi",
+ autospec=True,
+ ) as mock_client,
+ patch(
+ "homeassistant.components.electric_kiwi.config_flow.ElectricKiwiApi",
+ new=mock_client,
+ ),
+ ):
+ client = mock_client.return_value
+ client.customer_number = 123456
+ client.electricity = Service(
+ identifier="00000000DDA",
+ service="electricity",
+ service_status="Y",
+ is_primary_service=True,
)
- await hass.async_block_till_done()
- config_entry.add_to_hass(hass)
- result = await hass.config_entries.async_setup(config_entry.entry_id)
- await hass.async_block_till_done()
- return result
-
- return _setup_func
+ client.get_active_session.return_value = Session.from_dict(
+ load_json_value_fixture("session.json", DOMAIN)
+ )
+ client.get_hop_intervals.return_value = HopIntervals.from_dict(
+ load_json_value_fixture("hop_intervals.json", DOMAIN)
+ )
+ client.get_hop.return_value = Hop.from_dict(
+ load_json_value_fixture("get_hop.json", DOMAIN)
+ )
+ client.get_account_summary.return_value = AccountSummary.from_dict(
+ load_json_value_fixture("account_summary.json", DOMAIN)
+ )
+ client.get_connection_details.return_value = CustomerConnection.from_dict(
+ load_json_value_fixture("connection_details.json", DOMAIN)
+ )
+ yield client
@pytest.fixture(name="config_entry")
@@ -63,7 +88,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
title="Electric Kiwi",
domain=DOMAIN,
data={
- "id": "12345",
+ "id": "123456",
"auth_implementation": DOMAIN,
"token": {
"refresh_token": "mock-refresh-token",
@@ -74,6 +99,54 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
},
},
unique_id=DOMAIN,
+ version=1,
+ minor_version=1,
+ )
+
+
+@pytest.fixture(name="config_entry2")
+def mock_config_entry2(hass: HomeAssistant) -> MockConfigEntry:
+ """Create mocked config entry."""
+ return MockConfigEntry(
+ title="Electric Kiwi",
+ domain=DOMAIN,
+ data={
+ "id": "123457",
+ "auth_implementation": DOMAIN,
+ "token": {
+ "refresh_token": "mock-refresh-token",
+ "access_token": "mock-access-token",
+ "type": "Bearer",
+ "expires_in": 60,
+ "expires_at": time() + 60,
+ },
+ },
+ unique_id="1234567",
+ version=1,
+ minor_version=1,
+ )
+
+
+@pytest.fixture(name="migrated_config_entry")
+def mock_migrated_config_entry(hass: HomeAssistant) -> MockConfigEntry:
+ """Create mocked config entry."""
+ return MockConfigEntry(
+ title="Electric Kiwi",
+ domain=DOMAIN,
+ data={
+ "id": "123456",
+ "auth_implementation": DOMAIN,
+ "token": {
+ "refresh_token": "mock-refresh-token",
+ "access_token": "mock-access-token",
+ "type": "Bearer",
+ "expires_in": 60,
+ "expires_at": time() + 60,
+ },
+ },
+ unique_id="123456",
+ version=1,
+ minor_version=2,
)
@@ -87,35 +160,10 @@ def mock_setup_entry() -> Generator[AsyncMock]:
@pytest.fixture(name="ek_auth")
-def electric_kiwi_auth() -> YieldFixture:
+def electric_kiwi_auth() -> Generator[AsyncMock]:
"""Patch access to electric kiwi access token."""
with patch(
- "homeassistant.components.electric_kiwi.api.AsyncConfigEntryAuth"
+ "homeassistant.components.electric_kiwi.api.ConfigEntryElectricKiwiAuth"
) as mock_auth:
mock_auth.return_value.async_get_access_token = AsyncMock("auth_token")
yield mock_auth
-
-
-@pytest.fixture(name="ek_api")
-def ek_api() -> YieldFixture:
- """Mock ek api and return values."""
- with patch(
- "homeassistant.components.electric_kiwi.ElectricKiwiApi", autospec=True
- ) as mock_ek_api:
- mock_ek_api.return_value.customer_number = 123456
- mock_ek_api.return_value.connection_id = 123456
- mock_ek_api.return_value.set_active_session.return_value = None
- mock_ek_api.return_value.get_hop_intervals.return_value = (
- HopIntervals.from_dict(
- load_json_value_fixture("hop_intervals.json", DOMAIN)
- )
- )
- mock_ek_api.return_value.get_hop.return_value = Hop.from_dict(
- load_json_value_fixture("get_hop.json", DOMAIN)
- )
- mock_ek_api.return_value.get_account_balance.return_value = (
- AccountBalance.from_dict(
- load_json_value_fixture("account_balance.json", DOMAIN)
- )
- )
- yield mock_ek_api
diff --git a/tests/components/electric_kiwi/fixtures/account_balance.json b/tests/components/electric_kiwi/fixtures/account_balance.json
deleted file mode 100644
index 25bc57784ee..00000000000
--- a/tests/components/electric_kiwi/fixtures/account_balance.json
+++ /dev/null
@@ -1,28 +0,0 @@
-{
- "data": {
- "connections": [
- {
- "hop_percentage": "3.5",
- "id": 3,
- "running_balance": "184.09",
- "start_date": "2020-10-04",
- "unbilled_days": 15
- }
- ],
- "last_billed_amount": "-66.31",
- "last_billed_date": "2020-10-03",
- "next_billing_date": "2020-11-03",
- "is_prepay": "N",
- "summary": {
- "credits": "0.0",
- "electricity_used": "184.09",
- "other_charges": "0.00",
- "payments": "-220.0"
- },
- "total_account_balance": "-102.22",
- "total_billing_days": 30,
- "total_running_balance": "184.09",
- "type": "account_running_balance"
- },
- "status": 1
-}
diff --git a/tests/components/electric_kiwi/fixtures/account_summary.json b/tests/components/electric_kiwi/fixtures/account_summary.json
new file mode 100644
index 00000000000..6a05d6a3fe7
--- /dev/null
+++ b/tests/components/electric_kiwi/fixtures/account_summary.json
@@ -0,0 +1,43 @@
+{
+ "data": {
+ "type": "account_summary",
+ "total_running_balance": "184.09",
+ "total_account_balance": "-102.22",
+ "total_billing_days": 31,
+ "next_billing_date": "2025-02-19",
+ "service_names": ["power"],
+ "services": {
+ "power": {
+ "connections": [
+ {
+ "id": 515363,
+ "running_balance": "12.98",
+ "unbilled_days": 5,
+ "hop_percentage": "11.2",
+ "start_date": "2025-01-19",
+ "service_label": "Power"
+ }
+ ]
+ }
+ },
+ "date_to_pay": "",
+ "invoice_id": "",
+ "total_invoiced_charges": "",
+ "default_to_pay": "",
+ "invoice_exists": 1,
+ "display_date": "2025-01-19",
+ "last_billed_date": "2025-01-18",
+ "last_billed_amount": "-21.02",
+ "summary": {
+ "electricity_used": "12.98",
+ "other_charges": "0.00",
+ "payments": "0.00",
+ "credits": "0.00",
+ "mobile_charges": "0.00",
+ "broadband_charges": "0.00",
+ "addon_unbilled_charges": {}
+ },
+ "is_prepay": "N"
+ },
+ "status": 1
+}
diff --git a/tests/components/electric_kiwi/fixtures/connection_details.json b/tests/components/electric_kiwi/fixtures/connection_details.json
new file mode 100644
index 00000000000..5b446659aab
--- /dev/null
+++ b/tests/components/electric_kiwi/fixtures/connection_details.json
@@ -0,0 +1,73 @@
+{
+ "data": {
+ "type": "connection",
+ "id": 515363,
+ "customer_id": 273941,
+ "customer_number": 34030646,
+ "icp_identifier": "00000000DDA",
+ "address": "",
+ "short_address": "",
+ "physical_address_unit": "",
+ "physical_address_number": "555",
+ "physical_address_street": "RACECOURSE ROAD",
+ "physical_address_suburb": "",
+ "physical_address_town": "Blah",
+ "physical_address_region": "Blah",
+ "physical_address_postcode": "0000",
+ "is_active": "Y",
+ "pricing_plan": {
+ "id": 51423,
+ "usage": "0.0000",
+ "fixed": "0.6000",
+ "usage_rate_inc_gst": "0.0000",
+ "supply_rate_inc_gst": "0.6900",
+ "plan_description": "MoveMaster Anytime Residential (Low User)",
+ "plan_type": "movemaster_tou",
+ "signup_price_plan_blurb": "Better rates every day during off-peak, and all day on weekends. Plus half price nights (11pm-7am) and our best solar buyback.",
+ "signup_price_plan_label": "MoveMaster",
+ "app_price_plan_label": "Your MoveMaster rates are...",
+ "solar_rate_excl_gst": "0.1250",
+ "solar_rate_incl_gst": "0.1438",
+ "pricing_type": "tou_plus",
+ "tou_plus": {
+ "fixed_rate_excl_gst": "0.6000",
+ "fixed_rate_incl_gst": "0.6900",
+ "interval_types": ["peak", "off_peak_shoulder", "off_peak_night"],
+ "peak": {
+ "price_excl_gst": "0.5390",
+ "price_incl_gst": "0.6199",
+ "display_text": {
+ "Weekdays": "7am-9am, 5pm-9pm"
+ },
+ "tou_plus_label": "Peak"
+ },
+ "off_peak_shoulder": {
+ "price_excl_gst": "0.3234",
+ "price_incl_gst": "0.3719",
+ "display_text": {
+ "Weekdays": "9am-5pm, 9pm-11pm",
+ "Weekends": "7am-11pm"
+ },
+ "tou_plus_label": "Off-peak shoulder"
+ },
+ "off_peak_night": {
+ "price_excl_gst": "0.2695",
+ "price_incl_gst": "0.3099",
+ "display_text": {
+ "Every day": "11pm-7am"
+ },
+ "tou_plus_label": "Off-peak night"
+ }
+ }
+ },
+ "hop": {
+ "start_time": "9:00 PM",
+ "end_time": "10:00 PM",
+ "interval_start": "43",
+ "interval_end": "44"
+ },
+ "start_date": "2022-03-03",
+ "end_date": "",
+ "property_type": "residential"
+ }
+}
diff --git a/tests/components/electric_kiwi/fixtures/get_hop.json b/tests/components/electric_kiwi/fixtures/get_hop.json
index d29825391e9..2b126bfc017 100644
--- a/tests/components/electric_kiwi/fixtures/get_hop.json
+++ b/tests/components/electric_kiwi/fixtures/get_hop.json
@@ -1,16 +1,18 @@
{
"data": {
- "connection_id": "3",
- "customer_number": 1000001,
- "end": {
- "end_time": "5:00 PM",
- "interval": "34"
- },
+ "type": "hop_customer",
+ "customer_id": 123456,
+ "service_type": "electricity",
+ "connection_id": 515363,
+ "billing_id": 1247975,
"start": {
- "start_time": "4:00 PM",
- "interval": "33"
+ "interval": "33",
+ "start_time": "4:00 PM"
},
- "type": "hop_customer"
+ "end": {
+ "interval": "34",
+ "end_time": "5:00 PM"
+ }
},
"status": 1
}
diff --git a/tests/components/electric_kiwi/fixtures/hop_intervals.json b/tests/components/electric_kiwi/fixtures/hop_intervals.json
index 15ecc174f13..860630b000a 100644
--- a/tests/components/electric_kiwi/fixtures/hop_intervals.json
+++ b/tests/components/electric_kiwi/fixtures/hop_intervals.json
@@ -1,249 +1,250 @@
{
"data": {
- "hop_duration": "60",
"type": "hop_intervals",
+ "hop_duration": "60",
"intervals": {
"1": {
- "active": 1,
+ "start_time": "12:00 AM",
"end_time": "1:00 AM",
- "start_time": "12:00 AM"
+ "active": 1
},
"2": {
- "active": 1,
+ "start_time": "12:30 AM",
"end_time": "1:30 AM",
- "start_time": "12:30 AM"
+ "active": 1
},
"3": {
- "active": 1,
+ "start_time": "1:00 AM",
"end_time": "2:00 AM",
- "start_time": "1:00 AM"
+ "active": 1
},
"4": {
- "active": 1,
+ "start_time": "1:30 AM",
"end_time": "2:30 AM",
- "start_time": "1:30 AM"
+ "active": 1
},
"5": {
- "active": 1,
+ "start_time": "2:00 AM",
"end_time": "3:00 AM",
- "start_time": "2:00 AM"
+ "active": 1
},
"6": {
- "active": 1,
+ "start_time": "2:30 AM",
"end_time": "3:30 AM",
- "start_time": "2:30 AM"
+ "active": 1
},
"7": {
- "active": 1,
+ "start_time": "3:00 AM",
"end_time": "4:00 AM",
- "start_time": "3:00 AM"
+ "active": 1
},
"8": {
- "active": 1,
+ "start_time": "3:30 AM",
"end_time": "4:30 AM",
- "start_time": "3:30 AM"
+ "active": 1
},
"9": {
- "active": 1,
+ "start_time": "4:00 AM",
"end_time": "5:00 AM",
- "start_time": "4:00 AM"
+ "active": 1
},
"10": {
- "active": 1,
+ "start_time": "4:30 AM",
"end_time": "5:30 AM",
- "start_time": "4:30 AM"
+ "active": 1
},
"11": {
- "active": 1,
+ "start_time": "5:00 AM",
"end_time": "6:00 AM",
- "start_time": "5:00 AM"
+ "active": 1
},
"12": {
- "active": 1,
+ "start_time": "5:30 AM",
"end_time": "6:30 AM",
- "start_time": "5:30 AM"
+ "active": 1
},
"13": {
- "active": 1,
+ "start_time": "6:00 AM",
"end_time": "7:00 AM",
- "start_time": "6:00 AM"
+ "active": 1
},
"14": {
- "active": 1,
+ "start_time": "6:30 AM",
"end_time": "7:30 AM",
- "start_time": "6:30 AM"
+ "active": 0
},
"15": {
- "active": 1,
+ "start_time": "7:00 AM",
"end_time": "8:00 AM",
- "start_time": "7:00 AM"
+ "active": 0
},
"16": {
- "active": 1,
+ "start_time": "7:30 AM",
"end_time": "8:30 AM",
- "start_time": "7:30 AM"
+ "active": 0
},
"17": {
- "active": 1,
+ "start_time": "8:00 AM",
"end_time": "9:00 AM",
- "start_time": "8:00 AM"
+ "active": 0
},
"18": {
- "active": 1,
+ "start_time": "8:30 AM",
"end_time": "9:30 AM",
- "start_time": "8:30 AM"
+ "active": 0
},
"19": {
- "active": 1,
+ "start_time": "9:00 AM",
"end_time": "10:00 AM",
- "start_time": "9:00 AM"
+ "active": 1
},
"20": {
- "active": 1,
+ "start_time": "9:30 AM",
"end_time": "10:30 AM",
- "start_time": "9:30 AM"
+ "active": 1
},
"21": {
- "active": 1,
+ "start_time": "10:00 AM",
"end_time": "11:00 AM",
- "start_time": "10:00 AM"
+ "active": 1
},
"22": {
- "active": 1,
+ "start_time": "10:30 AM",
"end_time": "11:30 AM",
- "start_time": "10:30 AM"
+ "active": 1
},
"23": {
- "active": 1,
+ "start_time": "11:00 AM",
"end_time": "12:00 PM",
- "start_time": "11:00 AM"
+ "active": 1
},
"24": {
- "active": 1,
+ "start_time": "11:30 AM",
"end_time": "12:30 PM",
- "start_time": "11:30 AM"
+ "active": 1
},
"25": {
- "active": 1,
+ "start_time": "12:00 PM",
"end_time": "1:00 PM",
- "start_time": "12:00 PM"
+ "active": 1
},
"26": {
- "active": 1,
+ "start_time": "12:30 PM",
"end_time": "1:30 PM",
- "start_time": "12:30 PM"
+ "active": 1
},
"27": {
- "active": 1,
+ "start_time": "1:00 PM",
"end_time": "2:00 PM",
- "start_time": "1:00 PM"
+ "active": 1
},
"28": {
- "active": 1,
+ "start_time": "1:30 PM",
"end_time": "2:30 PM",
- "start_time": "1:30 PM"
+ "active": 1
},
"29": {
- "active": 1,
+ "start_time": "2:00 PM",
"end_time": "3:00 PM",
- "start_time": "2:00 PM"
+ "active": 1
},
"30": {
- "active": 1,
+ "start_time": "2:30 PM",
"end_time": "3:30 PM",
- "start_time": "2:30 PM"
+ "active": 1
},
"31": {
- "active": 1,
+ "start_time": "3:00 PM",
"end_time": "4:00 PM",
- "start_time": "3:00 PM"
+ "active": 1
},
"32": {
- "active": 1,
+ "start_time": "3:30 PM",
"end_time": "4:30 PM",
- "start_time": "3:30 PM"
+ "active": 1
},
"33": {
- "active": 1,
+ "start_time": "4:00 PM",
"end_time": "5:00 PM",
- "start_time": "4:00 PM"
+ "active": 1
},
"34": {
- "active": 1,
+ "start_time": "4:30 PM",
"end_time": "5:30 PM",
- "start_time": "4:30 PM"
+ "active": 0
},
"35": {
- "active": 1,
+ "start_time": "5:00 PM",
"end_time": "6:00 PM",
- "start_time": "5:00 PM"
+ "active": 0
},
"36": {
- "active": 1,
+ "start_time": "5:30 PM",
"end_time": "6:30 PM",
- "start_time": "5:30 PM"
+ "active": 0
},
"37": {
- "active": 1,
+ "start_time": "6:00 PM",
"end_time": "7:00 PM",
- "start_time": "6:00 PM"
+ "active": 0
},
"38": {
- "active": 1,
+ "start_time": "6:30 PM",
"end_time": "7:30 PM",
- "start_time": "6:30 PM"
+ "active": 0
},
"39": {
- "active": 1,
+ "start_time": "7:00 PM",
"end_time": "8:00 PM",
- "start_time": "7:00 PM"
+ "active": 0
},
"40": {
- "active": 1,
+ "start_time": "7:30 PM",
"end_time": "8:30 PM",
- "start_time": "7:30 PM"
+ "active": 0
},
"41": {
- "active": 1,
+ "start_time": "8:00 PM",
"end_time": "9:00 PM",
- "start_time": "8:00 PM"
+ "active": 0
},
"42": {
- "active": 1,
+ "start_time": "8:30 PM",
"end_time": "9:30 PM",
- "start_time": "8:30 PM"
+ "active": 0
},
"43": {
- "active": 1,
+ "start_time": "9:00 PM",
"end_time": "10:00 PM",
- "start_time": "9:00 PM"
+ "active": 1
},
"44": {
- "active": 1,
+ "start_time": "9:30 PM",
"end_time": "10:30 PM",
- "start_time": "9:30 PM"
+ "active": 1
},
"45": {
- "active": 1,
- "end_time": "11:00 AM",
- "start_time": "10:00 PM"
+ "start_time": "10:00 PM",
+ "end_time": "11:00 PM",
+ "active": 1
},
"46": {
- "active": 1,
+ "start_time": "10:30 PM",
"end_time": "11:30 PM",
- "start_time": "10:30 PM"
+ "active": 1
},
"47": {
- "active": 1,
+ "start_time": "11:00 PM",
"end_time": "12:00 AM",
- "start_time": "11:00 PM"
+ "active": 1
},
"48": {
- "active": 1,
+ "start_time": "11:30 PM",
"end_time": "12:30 AM",
- "start_time": "11:30 PM"
+ "active": 0
}
- }
+ },
+ "service_type": "electricity"
},
"status": 1
}
diff --git a/tests/components/electric_kiwi/fixtures/session.json b/tests/components/electric_kiwi/fixtures/session.json
new file mode 100644
index 00000000000..ee04aaca549
--- /dev/null
+++ b/tests/components/electric_kiwi/fixtures/session.json
@@ -0,0 +1,23 @@
+{
+ "data": {
+ "data": {
+ "type": "session",
+ "avatar": [],
+ "customer_number": 123456,
+ "customer_name": "Joe Dirt",
+ "email": "joe@dirt.kiwi",
+ "customer_status": "Y",
+ "services": [
+ {
+ "service": "Electricity",
+ "identifier": "00000000DDA",
+ "is_primary_service": true,
+ "service_status": "Y"
+ }
+ ],
+ "res_partner_id": 285554,
+ "nuid": "EK_GUID"
+ }
+ },
+ "status": 1
+}
diff --git a/tests/components/electric_kiwi/fixtures/session_no_services.json b/tests/components/electric_kiwi/fixtures/session_no_services.json
new file mode 100644
index 00000000000..62ae7aea20a
--- /dev/null
+++ b/tests/components/electric_kiwi/fixtures/session_no_services.json
@@ -0,0 +1,16 @@
+{
+ "data": {
+ "data": {
+ "type": "session",
+ "avatar": [],
+ "customer_number": 123456,
+ "customer_name": "Joe Dirt",
+ "email": "joe@dirt.kiwi",
+ "customer_status": "Y",
+ "services": [],
+ "res_partner_id": 285554,
+ "nuid": "EK_GUID"
+ }
+ },
+ "status": 1
+}
diff --git a/tests/components/electric_kiwi/test_config_flow.py b/tests/components/electric_kiwi/test_config_flow.py
index 681320972b5..ab643a0ddf1 100644
--- a/tests/components/electric_kiwi/test_config_flow.py
+++ b/tests/components/electric_kiwi/test_config_flow.py
@@ -3,70 +3,40 @@
from __future__ import annotations
from http import HTTPStatus
-from unittest.mock import AsyncMock, MagicMock
+from unittest.mock import AsyncMock
+from electrickiwi_api.exceptions import ApiException
import pytest
-from homeassistant import config_entries
-from homeassistant.components.application_credentials import (
- ClientCredential,
- async_import_client_credential,
-)
from homeassistant.components.electric_kiwi.const import (
DOMAIN,
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
SCOPE_VALUES,
)
+from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_entry_oauth2_flow
-from homeassistant.setup import async_setup_component
-from .conftest import CLIENT_ID, CLIENT_SECRET, REDIRECT_URI
+from .conftest import CLIENT_ID, REDIRECT_URI
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator
-pytestmark = pytest.mark.usefixtures("mock_setup_entry")
-
-@pytest.fixture
-async def setup_credentials(hass: HomeAssistant) -> None:
- """Fixture to setup application credentials component."""
- await async_setup_component(hass, "application_credentials", {})
- await async_import_client_credential(
- hass,
- DOMAIN,
- ClientCredential(CLIENT_ID, CLIENT_SECRET),
- )
-
-
-async def test_config_flow_no_credentials(hass: HomeAssistant) -> None:
- """Test config flow base case with no credentials registered."""
- result = await hass.config_entries.flow.async_init(
- DOMAIN, context={"source": config_entries.SOURCE_USER}
- )
- assert result.get("type") is FlowResultType.ABORT
- assert result.get("reason") == "missing_credentials"
-
-
-@pytest.mark.usefixtures("current_request_with_host")
+@pytest.mark.usefixtures("current_request_with_host", "electrickiwi_api")
async def test_full_flow(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
- setup_credentials: None,
mock_setup_entry: AsyncMock,
) -> None:
"""Check full flow."""
- await async_import_client_credential(
- hass, DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET)
- )
result = await hass.config_entries.flow.async_init(
- DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN}
+ DOMAIN, context={"source": SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
@@ -76,13 +46,13 @@ async def test_full_flow(
},
)
- URL_SCOPE = SCOPE_VALUES.replace(" ", "+")
+ url_scope = SCOPE_VALUES.replace(" ", "+")
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
f"&redirect_uri={REDIRECT_URI}"
f"&state={state}"
- f"&scope={URL_SCOPE}"
+ f"&scope={url_scope}"
)
client = await hass_client_no_auth()
@@ -90,6 +60,7 @@ async def test_full_flow(
assert resp.status == HTTPStatus.OK
assert resp.headers["content-type"] == "text/html; charset=utf-8"
+ aioclient_mock.clear_requests()
aioclient_mock.post(
OAUTH2_TOKEN,
json={
@@ -106,20 +77,73 @@ async def test_full_flow(
assert len(mock_setup_entry.mock_calls) == 1
+@pytest.mark.usefixtures("current_request_with_host")
+async def test_flow_failure(
+ hass: HomeAssistant,
+ hass_client_no_auth: ClientSessionGenerator,
+ aioclient_mock: AiohttpClientMocker,
+ electrickiwi_api: AsyncMock,
+) -> None:
+ """Check failure on creation of entry."""
+
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": SOURCE_USER}
+ )
+ state = config_entry_oauth2_flow._encode_jwt(
+ hass,
+ {
+ "flow_id": result["flow_id"],
+ "redirect_uri": REDIRECT_URI,
+ },
+ )
+
+ url_scope = SCOPE_VALUES.replace(" ", "+")
+
+ assert result["url"] == (
+ f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
+ f"&redirect_uri={REDIRECT_URI}"
+ f"&state={state}"
+ f"&scope={url_scope}"
+ )
+
+ client = await hass_client_no_auth()
+ resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
+ assert resp.status == HTTPStatus.OK
+ assert resp.headers["content-type"] == "text/html; charset=utf-8"
+
+ aioclient_mock.clear_requests()
+ aioclient_mock.post(
+ OAUTH2_TOKEN,
+ json={
+ "refresh_token": "mock-refresh-token",
+ "access_token": "mock-access-token",
+ "type": "Bearer",
+ "expires_in": 60,
+ },
+ )
+
+ electrickiwi_api.get_active_session.side_effect = ApiException()
+
+ result = await hass.config_entries.flow.async_configure(result["flow_id"])
+
+ assert len(hass.config_entries.async_entries(DOMAIN)) == 0
+ assert result.get("type") is FlowResultType.ABORT
+ assert result.get("reason") == "connection_error"
+
+
@pytest.mark.usefixtures("current_request_with_host")
async def test_existing_entry(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
- setup_credentials: None,
- config_entry: MockConfigEntry,
+ migrated_config_entry: MockConfigEntry,
) -> None:
"""Check existing entry."""
- config_entry.add_to_hass(hass)
+ migrated_config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
- DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN}
+ DOMAIN, context={"source": SOURCE_USER, "entry_id": DOMAIN}
)
state = config_entry_oauth2_flow._encode_jwt(
@@ -145,7 +169,9 @@ async def test_existing_entry(
},
)
- await hass.config_entries.flow.async_configure(result["flow_id"])
+ result = await hass.config_entries.flow.async_configure(result["flow_id"])
+ assert result.get("type") is FlowResultType.ABORT
+ assert result.get("reason") == "already_configured"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
@@ -154,13 +180,13 @@ async def test_reauthentication(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
- mock_setup_entry: MagicMock,
- config_entry: MockConfigEntry,
- setup_credentials: None,
+ mock_setup_entry: AsyncMock,
+ migrated_config_entry: MockConfigEntry,
) -> None:
"""Test Electric Kiwi reauthentication."""
- config_entry.add_to_hass(hass)
- result = await config_entry.start_reauth_flow(hass)
+ migrated_config_entry.add_to_hass(hass)
+
+ result = await migrated_config_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
@@ -189,8 +215,11 @@ async def test_reauthentication(
},
)
- await hass.config_entries.flow.async_configure(result["flow_id"])
+ result = await hass.config_entries.flow.async_configure(result["flow_id"])
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup_entry.mock_calls) == 1
+
+ assert result.get("type") is FlowResultType.ABORT
+ assert result.get("reason") == "reauth_successful"
diff --git a/tests/components/electric_kiwi/test_init.py b/tests/components/electric_kiwi/test_init.py
new file mode 100644
index 00000000000..947f788ad55
--- /dev/null
+++ b/tests/components/electric_kiwi/test_init.py
@@ -0,0 +1,135 @@
+"""Test the Electric Kiwi init."""
+
+import http
+from unittest.mock import AsyncMock, patch
+
+from aiohttp import RequestInfo
+from aiohttp.client_exceptions import ClientResponseError
+from electrickiwi_api.exceptions import ApiException, AuthException
+import pytest
+
+from homeassistant.components.electric_kiwi.const import DOMAIN
+from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
+from homeassistant.config_entries import ConfigEntryState
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers import entity_registry as er
+
+from . import init_integration
+
+from tests.common import MockConfigEntry
+
+
+async def test_async_setup_entry(
+ hass: HomeAssistant, config_entry: MockConfigEntry
+) -> None:
+ """Test a successful setup entry and unload of entry."""
+ await init_integration(hass, config_entry)
+
+ assert len(hass.config_entries.async_entries(DOMAIN)) == 1
+ assert config_entry.state is ConfigEntryState.LOADED
+
+ assert await hass.config_entries.async_unload(config_entry.entry_id)
+ await hass.async_block_till_done()
+
+ assert config_entry.state is ConfigEntryState.NOT_LOADED
+
+
+async def test_async_setup_multiple_entries(
+ hass: HomeAssistant,
+ config_entry: MockConfigEntry,
+ config_entry2: MockConfigEntry,
+) -> None:
+ """Test a successful setup and unload of multiple entries."""
+
+ for entry in (config_entry, config_entry2):
+ await init_integration(hass, entry)
+
+ assert len(hass.config_entries.async_entries(DOMAIN)) == 2
+
+ for entry in (config_entry, config_entry2):
+ assert await hass.config_entries.async_unload(entry.entry_id)
+ await hass.async_block_till_done()
+
+ assert entry.state is ConfigEntryState.NOT_LOADED
+
+
+@pytest.mark.parametrize(
+ ("status", "expected_state"),
+ [
+ (
+ http.HTTPStatus.UNAUTHORIZED,
+ ConfigEntryState.SETUP_ERROR,
+ ),
+ (
+ http.HTTPStatus.INTERNAL_SERVER_ERROR,
+ ConfigEntryState.SETUP_RETRY,
+ ),
+ ],
+ ids=["failure_requires_reauth", "transient_failure"],
+)
+async def test_refresh_token_validity_failures(
+ hass: HomeAssistant,
+ config_entry: MockConfigEntry,
+ status: http.HTTPStatus,
+ expected_state: ConfigEntryState,
+) -> None:
+ """Test token refresh failure status."""
+ with patch(
+ "homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session.async_ensure_token_valid",
+ side_effect=ClientResponseError(
+ RequestInfo("", "POST", {}, ""), None, status=status
+ ),
+ ) as mock_async_ensure_token_valid:
+ await init_integration(hass, config_entry)
+ mock_async_ensure_token_valid.assert_called_once()
+
+ assert len(hass.config_entries.async_entries(DOMAIN)) == 1
+
+ entries = hass.config_entries.async_entries(DOMAIN)
+ assert entries[0].state is expected_state
+
+
+async def test_unique_id_migration(
+ hass: HomeAssistant,
+ config_entry: MockConfigEntry,
+ entity_registry: er.EntityRegistry,
+) -> None:
+ """Test that the unique ID is migrated to the customer number."""
+
+ config_entry.add_to_hass(hass)
+ entity_registry.async_get_or_create(
+ SENSOR_DOMAIN, DOMAIN, "123456_515363_sensor", config_entry=config_entry
+ )
+ await hass.config_entries.async_setup(config_entry.entry_id)
+ await hass.async_block_till_done()
+ new_entry = hass.config_entries.async_get_entry(config_entry.entry_id)
+ assert new_entry.minor_version == 2
+ assert new_entry.unique_id == "123456"
+ entity_entry = entity_registry.async_get(
+ "sensor.electric_kiwi_123456_515363_sensor"
+ )
+ assert entity_entry.unique_id == "123456_00000000DDA_sensor"
+
+
+async def test_unique_id_migration_failure(
+ hass: HomeAssistant, config_entry: MockConfigEntry, electrickiwi_api: AsyncMock
+) -> None:
+ """Test that the unique ID is migrated to the customer number."""
+ electrickiwi_api.set_active_session.side_effect = ApiException()
+ await init_integration(hass, config_entry)
+
+ assert config_entry.minor_version == 1
+ assert config_entry.unique_id == DOMAIN
+ assert config_entry.state is ConfigEntryState.MIGRATION_ERROR
+
+
+async def test_unique_id_migration_auth_failure(
+ hass: HomeAssistant, config_entry: MockConfigEntry, electrickiwi_api: AsyncMock
+) -> None:
+ """Test that the unique ID is migrated to the customer number."""
+ electrickiwi_api.set_active_session.side_effect = AuthException()
+ await init_integration(hass, config_entry)
+
+ assert config_entry.minor_version == 1
+ assert config_entry.unique_id == DOMAIN
+ assert config_entry.state is ConfigEntryState.MIGRATION_ERROR
diff --git a/tests/components/electric_kiwi/test_sensor.py b/tests/components/electric_kiwi/test_sensor.py
index a85eb16a986..3e58b33a998 100644
--- a/tests/components/electric_kiwi/test_sensor.py
+++ b/tests/components/electric_kiwi/test_sensor.py
@@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.util import dt as dt_util
-from .conftest import ComponentSetup, YieldFixture
+from . import init_integration
from tests.common import MockConfigEntry
@@ -47,10 +47,9 @@ def restore_timezone():
async def test_hop_sensors(
hass: HomeAssistant,
config_entry: MockConfigEntry,
- ek_api: YieldFixture,
- ek_auth: YieldFixture,
+ electrickiwi_api: Mock,
+ ek_auth: AsyncMock,
entity_registry: EntityRegistry,
- component_setup: ComponentSetup,
sensor: str,
sensor_state: str,
) -> None:
@@ -61,7 +60,7 @@ async def test_hop_sensors(
sensor state should be set to today at 4pm or if now is past 4pm,
then tomorrow at 4pm.
"""
- assert await component_setup()
+ await init_integration(hass, config_entry)
assert config_entry.state is ConfigEntryState.LOADED
entity = entity_registry.async_get(sensor)
@@ -70,8 +69,7 @@ async def test_hop_sensors(
state = hass.states.get(sensor)
assert state
- api = ek_api(Mock())
- hop_data = await api.get_hop()
+ hop_data = await electrickiwi_api.get_hop()
value = _check_and_move_time(hop_data, sensor_state)
@@ -98,20 +96,19 @@ async def test_hop_sensors(
),
(
"sensor.next_billing_date",
- "2020-11-03T00:00:00",
+ "2025-02-19T00:00:00",
SensorDeviceClass.DATE,
None,
),
- ("sensor.hour_of_power_savings", "3.5", None, SensorStateClass.MEASUREMENT),
+ ("sensor.hour_of_power_savings", "11.2", None, SensorStateClass.MEASUREMENT),
],
)
async def test_account_sensors(
hass: HomeAssistant,
config_entry: MockConfigEntry,
- ek_api: YieldFixture,
- ek_auth: YieldFixture,
+ electrickiwi_api: AsyncMock,
+ ek_auth: AsyncMock,
entity_registry: EntityRegistry,
- component_setup: ComponentSetup,
sensor: str,
sensor_state: str,
device_class: str,
@@ -119,7 +116,7 @@ async def test_account_sensors(
) -> None:
"""Test Account sensors for the Electric Kiwi integration."""
- assert await component_setup()
+ await init_integration(hass, config_entry)
assert config_entry.state is ConfigEntryState.LOADED
entity = entity_registry.async_get(sensor)
@@ -133,9 +130,9 @@ async def test_account_sensors(
assert state.attributes.get(ATTR_STATE_CLASS) == state_class
-async def test_check_and_move_time(ek_api: AsyncMock) -> None:
+async def test_check_and_move_time(electrickiwi_api: AsyncMock) -> None:
"""Test correct time is returned depending on time of day."""
- hop = await ek_api(Mock()).get_hop()
+ hop = await electrickiwi_api.get_hop()
test_time = datetime(2023, 6, 21, 18, 0, 0, tzinfo=TEST_TIMEZONE)
dt_util.set_default_time_zone(TEST_TIMEZONE)
diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py
index a3da14b3835..efbe6da9b13 100644
--- a/tests/components/enphase_envoy/test_config_flow.py
+++ b/tests/components/enphase_envoy/test_config_flow.py
@@ -439,7 +439,7 @@ async def test_zero_conf_old_blank_entry(
mock_setup_entry: AsyncMock,
mock_envoy: AsyncMock,
) -> None:
- """Test re-using old blank entry."""
+ """Test reusing old blank entry."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
@@ -478,7 +478,7 @@ async def test_zero_conf_old_blank_entry_standard_title(
mock_setup_entry: AsyncMock,
mock_envoy: AsyncMock,
) -> None:
- """Test re-using old blank entry was Envoy as title."""
+ """Test reusing old blank entry was Envoy as title."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
@@ -519,7 +519,7 @@ async def test_zero_conf_old_blank_entry_user_title(
mock_setup_entry: AsyncMock,
mock_envoy: AsyncMock,
) -> None:
- """Test re-using old blank entry with user title."""
+ """Test reusing old blank entry with user title."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
diff --git a/tests/components/fireservicerota/test_config_flow.py b/tests/components/fireservicerota/test_config_flow.py
index 5555a8d649c..8d150034ec9 100644
--- a/tests/components/fireservicerota/test_config_flow.py
+++ b/tests/components/fireservicerota/test_config_flow.py
@@ -66,7 +66,7 @@ async def test_invalid_credentials(hass: HomeAssistant) -> None:
"""Test that invalid credentials throws an error."""
with patch(
- "homeassistant.components.fireservicerota.FireServiceRota.request_tokens",
+ "homeassistant.components.fireservicerota.coordinator.FireServiceRota.request_tokens",
side_effect=InvalidAuthError,
):
result = await hass.config_entries.flow.async_init(
diff --git a/tests/components/google_drive/snapshots/test_backup.ambr b/tests/components/google_drive/snapshots/test_backup.ambr
index 9e1ec00b52e..2f3df3eed7f 100644
--- a/tests/components/google_drive/snapshots/test_backup.ambr
+++ b/tests/components/google_drive/snapshots/test_backup.ambr
@@ -140,7 +140,7 @@
tuple(
dict({
'description': '{"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], "backup_id": "test-backup", "date": "2025-01-01T01:23:45.678Z", "database_included": true, "extra_metadata": {"with_automatic_settings": false}, "folders": [], "homeassistant_included": true, "homeassistant_version": "2024.12.0", "name": "Test", "protected": false, "size": 987}',
- 'name': 'Test_-_2025-01-01_01.23_45678000.tar',
+ 'name': 'Test_2025-01-01_01.23_45678000.tar',
'parents': list([
'HA folder ID',
]),
@@ -211,7 +211,7 @@
tuple(
dict({
'description': '{"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], "backup_id": "test-backup", "date": "2025-01-01T01:23:45.678Z", "database_included": true, "extra_metadata": {"with_automatic_settings": false}, "folders": [], "homeassistant_included": true, "homeassistant_version": "2024.12.0", "name": "Test", "protected": false, "size": 987}',
- 'name': 'Test_-_2025-01-01_01.23_45678000.tar',
+ 'name': 'Test_2025-01-01_01.23_45678000.tar',
'parents': list([
'new folder id',
]),
diff --git a/tests/components/google_drive/test_backup.py b/tests/components/google_drive/test_backup.py
index 7e455ebb535..115a30a3eb6 100644
--- a/tests/components/google_drive/test_backup.py
+++ b/tests/components/google_drive/test_backup.py
@@ -47,6 +47,7 @@ TEST_AGENT_BACKUP_RESULT = {
"backup_id": "test-backup",
"database_included": True,
"date": "2025-01-01T01:23:45.678Z",
+ "extra_metadata": {"with_automatic_settings": False},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0",
diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr
index 21458abb7c8..c89981e67bb 100644
--- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr
+++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr
@@ -244,7 +244,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
@@ -296,7 +296,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
@@ -348,7 +348,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
@@ -401,7 +401,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
@@ -454,7 +454,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
@@ -565,7 +565,7 @@
'top_k': 64,
'top_p': 0.95,
}),
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
'safety_settings': dict({
'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE',
'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE',
diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_diagnostics.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_diagnostics.ambr
index 316bf74b72a..b445499ad49 100644
--- a/tests/components/google_generative_ai_conversation/snapshots/test_diagnostics.ambr
+++ b/tests/components/google_generative_ai_conversation/snapshots/test_diagnostics.ambr
@@ -5,7 +5,7 @@
'api_key': '**REDACTED**',
}),
'options': dict({
- 'chat_model': 'models/gemini-1.5-flash-latest',
+ 'chat_model': 'models/gemini-2.0-flash',
'dangerous_block_threshold': 'BLOCK_MEDIUM_AND_ABOVE',
'harassment_block_threshold': 'BLOCK_MEDIUM_AND_ABOVE',
'hate_block_threshold': 'BLOCK_MEDIUM_AND_ABOVE',
diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_init.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_init.ambr
index f68f4c6bf14..c9e02a6d009 100644
--- a/tests/components/google_generative_ai_conversation/snapshots/test_init.ambr
+++ b/tests/components/google_generative_ai_conversation/snapshots/test_init.ambr
@@ -6,7 +6,7 @@
tuple(
),
dict({
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
}),
),
tuple(
@@ -32,7 +32,7 @@
tuple(
),
dict({
- 'model_name': 'models/gemini-1.5-flash-latest',
+ 'model_name': 'models/gemini-2.0-flash',
}),
),
tuple(
diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py
index d4992c732e1..ee5291196c3 100644
--- a/tests/components/google_generative_ai_conversation/test_config_flow.py
+++ b/tests/components/google_generative_ai_conversation/test_config_flow.py
@@ -39,6 +39,12 @@ from tests.common import MockConfigEntry
@pytest.fixture
def mock_models():
"""Mock the model list API."""
+ model_20_flash = Mock(
+ display_name="Gemini 2.0 Flash",
+ supported_generation_methods=["generateContent"],
+ )
+ model_20_flash.name = "models/gemini-2.0-flash"
+
model_15_flash = Mock(
display_name="Gemini 1.5 Flash",
supported_generation_methods=["generateContent"],
@@ -58,7 +64,7 @@ def mock_models():
model_10_pro.name = "models/gemini-pro"
with patch(
"homeassistant.components.google_generative_ai_conversation.config_flow.genai.list_models",
- return_value=iter([model_15_flash, model_15_pro, model_10_pro]),
+ return_value=iter([model_20_flash, model_15_flash, model_15_pro, model_10_pro]),
):
yield
diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py
index a87056275dc..72a5390f4b1 100644
--- a/tests/components/google_generative_ai_conversation/test_conversation.py
+++ b/tests/components/google_generative_ai_conversation/test_conversation.py
@@ -36,6 +36,13 @@ def freeze_the_time():
yield
+@pytest.fixture(autouse=True)
+def mock_ulid_tools():
+ """Mock generated ULIDs for tool calls."""
+ with patch("homeassistant.helpers.llm.ulid_now", return_value="mock-tool-call"):
+ yield
+
+
@pytest.mark.parametrize(
"agent_id", [None, "conversation.google_generative_ai_conversation"]
)
@@ -177,6 +184,7 @@ async def test_chat_history(
"homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools"
)
@pytest.mark.usefixtures("mock_init_component")
+@pytest.mark.usefixtures("mock_ulid_tools")
async def test_function_call(
mock_get_tools,
hass: HomeAssistant,
@@ -256,6 +264,7 @@ async def test_function_call(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="mock-tool-call",
tool_name="test_tool",
tool_args={
"param1": ["test_value", "param1's value"],
@@ -287,9 +296,7 @@ async def test_function_call(
detail_event = trace_events[1]
assert "Answer in plain text" in detail_event["data"]["messages"][0]["content"]
assert [
- p.function_response.name
- for p in detail_event["data"]["messages"][2]["content"].parts
- if p.function_response
+ p["tool_name"] for p in detail_event["data"]["messages"][2]["tool_calls"]
] == ["test_tool"]
@@ -362,6 +369,7 @@ async def test_function_call_without_parameters(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="mock-tool-call",
tool_name="test_tool",
tool_args={},
),
@@ -451,6 +459,7 @@ async def test_function_exception(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="mock-tool-call",
tool_name="test_tool",
tool_args={"param1": 1},
),
@@ -605,6 +614,7 @@ async def test_template_variables(
mock_chat.send_message_async.return_value = chat_response
mock_part = MagicMock()
mock_part.text = "Model response"
+ mock_part.function_call = None
chat_response.parts = [mock_part]
result = await conversation.async_converse(
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
diff --git a/tests/components/govee_light_local/conftest.py b/tests/components/govee_light_local/conftest.py
index 6a8ee99b764..61a6394bd6a 100644
--- a/tests/components/govee_light_local/conftest.py
+++ b/tests/components/govee_light_local/conftest.py
@@ -4,7 +4,8 @@ from asyncio import Event
from collections.abc import Generator
from unittest.mock import AsyncMock, MagicMock, patch
-from govee_local_api import GoveeLightCapability
+from govee_local_api import GoveeLightCapabilities
+from govee_local_api.light_capabilities import COMMON_FEATURES
import pytest
from homeassistant.components.govee_light_local.coordinator import GoveeController
@@ -34,8 +35,6 @@ def fixture_mock_setup_entry() -> Generator[AsyncMock]:
yield mock_setup_entry
-DEFAULT_CAPABILITEIS: set[GoveeLightCapability] = {
- GoveeLightCapability.COLOR_RGB,
- GoveeLightCapability.COLOR_KELVIN_TEMPERATURE,
- GoveeLightCapability.BRIGHTNESS,
-}
+DEFAULT_CAPABILITIES: GoveeLightCapabilities = GoveeLightCapabilities(
+ features=COMMON_FEATURES, segments=[], scenes={}
+)
diff --git a/tests/components/govee_light_local/test_config_flow.py b/tests/components/govee_light_local/test_config_flow.py
index 2e7144fae3a..103159f1a2b 100644
--- a/tests/components/govee_light_local/test_config_flow.py
+++ b/tests/components/govee_light_local/test_config_flow.py
@@ -10,7 +10,7 @@ from homeassistant.components.govee_light_local.const import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
-from .conftest import DEFAULT_CAPABILITEIS
+from .conftest import DEFAULT_CAPABILITIES
def _get_devices(mock_govee_api: AsyncMock) -> list[GoveeDevice]:
@@ -20,7 +20,7 @@ def _get_devices(mock_govee_api: AsyncMock) -> list[GoveeDevice]:
ip="192.168.1.100",
fingerprint="asdawdqwdqwd1",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
diff --git a/tests/components/govee_light_local/test_light.py b/tests/components/govee_light_local/test_light.py
index 4a1125643fa..24bdbba9e11 100644
--- a/tests/components/govee_light_local/test_light.py
+++ b/tests/components/govee_light_local/test_light.py
@@ -10,7 +10,7 @@ from homeassistant.components.light import ATTR_SUPPORTED_COLOR_MODES, ColorMode
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
-from .conftest import DEFAULT_CAPABILITEIS
+from .conftest import DEFAULT_CAPABILITIES
from tests.common import MockConfigEntry
@@ -26,7 +26,7 @@ async def test_light_known_device(
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -96,7 +96,7 @@ async def test_light_remove(hass: HomeAssistant, mock_govee_api: AsyncMock) -> N
ip="192.168.1.100",
fingerprint="asdawdqwdqwd1",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -152,7 +152,7 @@ async def test_light_setup_retry_eaddrinuse(
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -180,7 +180,7 @@ async def test_light_setup_error(
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -204,7 +204,7 @@ async def test_light_on_off(hass: HomeAssistant, mock_govee_api: MagicMock) -> N
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -260,7 +260,7 @@ async def test_light_brightness(hass: HomeAssistant, mock_govee_api: MagicMock)
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
@@ -335,7 +335,7 @@ async def test_light_color(hass: HomeAssistant, mock_govee_api: MagicMock) -> No
ip="192.168.1.100",
fingerprint="asdawdqwdqwd",
sku="H615A",
- capabilities=DEFAULT_CAPABILITEIS,
+ capabilities=DEFAULT_CAPABILITIES,
)
]
diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py
index d001a358640..0dd2adc99ed 100644
--- a/tests/components/hassio/test_backup.py
+++ b/tests/components/hassio/test_backup.py
@@ -26,6 +26,7 @@ from aiohasupervisor.models import (
jobs as supervisor_jobs,
mounts as supervisor_mounts,
)
+from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
from aiohasupervisor.models.mounts import MountsInfo
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -39,11 +40,7 @@ from homeassistant.components.backup import (
Folder,
)
from homeassistant.components.hassio import DOMAIN
-from homeassistant.components.hassio.backup import (
- LOCATION_CLOUD_BACKUP,
- LOCATION_LOCAL,
- RESTORE_JOB_ID_ENV,
-)
+from homeassistant.components.hassio.backup import RESTORE_JOB_ID_ENV
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
@@ -60,17 +57,12 @@ TEST_BACKUP = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
- location=None,
location_attributes={
- LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
+ LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
- locations={None},
name="Test",
- protected=False,
- size=1.0,
- size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@@ -89,14 +81,9 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant="2024.12.0",
- location=TEST_BACKUP.location,
location_attributes=TEST_BACKUP.location_attributes,
- locations=TEST_BACKUP.locations,
name=TEST_BACKUP.name,
- protected=TEST_BACKUP.protected,
repositories=[],
- size=TEST_BACKUP.size,
- size_bytes=TEST_BACKUP.size_bytes,
slug=TEST_BACKUP.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP.type,
@@ -106,21 +93,16 @@ TEST_BACKUP_2 = supervisor_backups.Backup(
compressed=False,
content=supervisor_backups.BackupContent(
addons=["ssl"],
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant=False,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
- location=None,
location_attributes={
- LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
+ LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
- locations={None},
name="Test",
- protected=False,
- size=1.0,
- size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@@ -136,17 +118,12 @@ TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete(
compressed=TEST_BACKUP_2.compressed,
date=TEST_BACKUP_2.date,
extra=None,
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant=None,
- location=TEST_BACKUP_2.location,
location_attributes=TEST_BACKUP_2.location_attributes,
- locations=TEST_BACKUP_2.locations,
name=TEST_BACKUP_2.name,
- protected=TEST_BACKUP_2.protected,
repositories=[],
- size=TEST_BACKUP_2.size,
- size_bytes=TEST_BACKUP_2.size_bytes,
slug=TEST_BACKUP_2.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_2.type,
@@ -156,21 +133,16 @@ TEST_BACKUP_3 = supervisor_backups.Backup(
compressed=False,
content=supervisor_backups.BackupContent(
addons=["ssl"],
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
- location="share",
location_attributes={
- LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
+ LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
- locations={"share"},
name="Test",
- protected=False,
- size=1.0,
- size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@@ -186,17 +158,12 @@ TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete(
compressed=TEST_BACKUP_3.compressed,
date=TEST_BACKUP_3.date,
extra=None,
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant=None,
- location=TEST_BACKUP_3.location,
location_attributes=TEST_BACKUP_3.location_attributes,
- locations=TEST_BACKUP_3.locations,
name=TEST_BACKUP_3.name,
- protected=TEST_BACKUP_3.protected,
repositories=[],
- size=TEST_BACKUP_3.size,
- size_bytes=TEST_BACKUP_3.size_bytes,
slug=TEST_BACKUP_3.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_3.type,
@@ -207,21 +174,16 @@ TEST_BACKUP_4 = supervisor_backups.Backup(
compressed=False,
content=supervisor_backups.BackupContent(
addons=["ssl"],
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
- location=None,
location_attributes={
- LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
+ LOCATION_LOCAL_STORAGE: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
- locations={None},
name="Test",
- protected=False,
- size=1.0,
- size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@@ -234,23 +196,18 @@ TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete(
version="9.14.0",
)
],
- compressed=TEST_BACKUP.compressed,
- date=TEST_BACKUP.date,
+ compressed=TEST_BACKUP_4.compressed,
+ date=TEST_BACKUP_4.date,
extra=None,
- folders=["share"],
+ folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=True,
homeassistant="2024.12.0",
- location=TEST_BACKUP.location,
- location_attributes=TEST_BACKUP.location_attributes,
- locations=TEST_BACKUP.locations,
- name=TEST_BACKUP.name,
- protected=TEST_BACKUP.protected,
+ location_attributes=TEST_BACKUP_4.location_attributes,
+ name=TEST_BACKUP_4.name,
repositories=[],
- size=TEST_BACKUP.size,
- size_bytes=TEST_BACKUP.size_bytes,
- slug=TEST_BACKUP.slug,
+ slug=TEST_BACKUP_4.slug,
supervisor_version="2024.11.2",
- type=TEST_BACKUP.type,
+ type=TEST_BACKUP_4.type,
)
TEST_BACKUP_5 = supervisor_backups.Backup(
@@ -261,17 +218,12 @@ TEST_BACKUP_5 = supervisor_backups.Backup(
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
- location=LOCATION_CLOUD_BACKUP,
location_attributes={
LOCATION_CLOUD_BACKUP: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
- locations={LOCATION_CLOUD_BACKUP},
name="Test",
- protected=False,
- size=1.0,
- size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
@@ -290,14 +242,9 @@ TEST_BACKUP_DETAILS_5 = supervisor_backups.BackupComplete(
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant="2024.12.0",
- location=TEST_BACKUP_5.location,
location_attributes=TEST_BACKUP_5.location_attributes,
- locations=TEST_BACKUP_5.locations,
name=TEST_BACKUP_5.name,
- protected=TEST_BACKUP_5.protected,
repositories=[],
- size=TEST_BACKUP_5.size,
- size_bytes=TEST_BACKUP_5.size_bytes,
slug=TEST_BACKUP_5.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_5.type,
@@ -312,6 +259,7 @@ TEST_JOB_NOT_DONE = supervisor_jobs.Job(
stage="copy_additional_locations",
done=False,
errors=[],
+ created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
child_jobs=[],
)
TEST_JOB_DONE = supervisor_jobs.Job(
@@ -322,6 +270,26 @@ TEST_JOB_DONE = supervisor_jobs.Job(
stage="copy_additional_locations",
done=True,
errors=[],
+ created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
+ child_jobs=[],
+)
+TEST_RESTORE_JOB_DONE_WITH_ERROR = supervisor_jobs.Job(
+ name="backup_manager_partial_restore",
+ reference="1ef41507",
+ uuid=UUID(TEST_JOB_ID),
+ progress=0.0,
+ stage="copy_additional_locations",
+ done=True,
+ errors=[
+ supervisor_jobs.JobError(
+ type="BackupInvalidError",
+ message=(
+ "Backup was made on supervisor version 2025.02.2.dev3105, "
+ "can't restore on 2025.01.2.dev3105"
+ ),
+ )
+ ],
+ created=datetime.fromisoformat("1970-01-01T00:00:00Z"),
child_jobs=[],
)
@@ -346,7 +314,7 @@ async def hassio_enabled(
@pytest.fixture
-async def setup_integration(
+async def setup_backup_integration(
hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock
) -> None:
"""Set up Backup integration."""
@@ -476,7 +444,7 @@ async def test_agent_info(
}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("backup", "backup_details", "expected_response"),
[
@@ -491,6 +459,7 @@ async def test_agent_info(
"backup_id": "abc123",
"database_included": True,
"date": "1970-01-01T00:00:00+00:00",
+ "extra_metadata": {},
"failed_agent_ids": [],
"folders": ["share"],
"homeassistant_included": True,
@@ -510,6 +479,7 @@ async def test_agent_info(
"backup_id": "abc123",
"database_included": False,
"date": "1970-01-01T00:00:00+00:00",
+ "extra_metadata": {},
"failed_agent_ids": [],
"folders": ["share"],
"homeassistant_included": False,
@@ -540,7 +510,7 @@ async def test_agent_list_backups(
assert response["result"]["backups"] == [expected_response]
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_download(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
@@ -560,30 +530,40 @@ async def test_agent_download(
assert await resp.content.read() == b"backup data"
supervisor_client.backups.download_backup.assert_called_once_with(
- "abc123", options=supervisor_backups.DownloadBackupOptions(location=None)
+ "abc123",
+ options=supervisor_backups.DownloadBackupOptions(
+ location=LOCATION_LOCAL_STORAGE
+ ),
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.parametrize(
+ ("backup_info", "backup_id", "agent_id"),
+ [
+ (TEST_BACKUP_DETAILS_3, "unknown", "hassio.local"),
+ (TEST_BACKUP_DETAILS_3, TEST_BACKUP_DETAILS_3.slug, "hassio.local"),
+ (TEST_BACKUP_DETAILS, TEST_BACKUP_DETAILS_3.slug, "hassio.local"),
+ ],
+)
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_download_unavailable_backup(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
supervisor_client: AsyncMock,
+ agent_id: str,
+ backup_id: str,
+ backup_info: supervisor_backups.BackupComplete,
) -> None:
"""Test agent download backup which does not exist."""
client = await hass_client()
- backup_id = "abc123"
- supervisor_client.backups.list.return_value = [TEST_BACKUP_3]
- supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_3
- supervisor_client.backups.download_backup.return_value.__aiter__.return_value = (
- iter((b"backup data",))
- )
+ supervisor_client.backups.backup_info.return_value = backup_info
+ supervisor_client.backups.download_backup.side_effect = SupervisorNotFoundError
- resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local")
+ resp = await client.get(f"/api/backup/download/{backup_id}?agent_id={agent_id}")
assert resp.status == 404
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_upload(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
@@ -632,7 +612,7 @@ async def test_agent_upload(
supervisor_client.backups.remove_backup.assert_not_called()
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_get_backup(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -662,6 +642,7 @@ async def test_agent_get_backup(
"backup_id": "abc123",
"database_included": True,
"date": "1970-01-01T00:00:00+00:00",
+ "extra_metadata": {},
"failed_agent_ids": [],
"folders": ["share"],
"homeassistant_included": True,
@@ -673,7 +654,7 @@ async def test_agent_get_backup(
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("backup_info_side_effect", "expected_response"),
[
@@ -717,7 +698,7 @@ async def test_agent_get_backup_with_error(
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_delete_backup(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -738,11 +719,14 @@ async def test_agent_delete_backup(
assert response["success"]
assert response["result"] == {"agent_errors": {}}
supervisor_client.backups.remove_backup.assert_called_once_with(
- backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
+ backup_id,
+ options=supervisor_backups.RemoveBackupOptions(
+ location={LOCATION_LOCAL_STORAGE}
+ ),
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("remove_side_effect", "expected_response"),
[
@@ -784,11 +768,14 @@ async def test_agent_delete_with_error(
assert response == {"id": 1, "type": "result"} | expected_response
supervisor_client.backups.remove_backup.assert_called_once_with(
- backup_id, options=supervisor_backups.RemoveBackupOptions(location={None})
+ backup_id,
+ options=supervisor_backups.RemoveBackupOptions(
+ location={LOCATION_LOCAL_STORAGE}
+ ),
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("event_data", "mount_info_calls"),
[
@@ -859,17 +846,17 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
"supervisor.backup_request_date": "2025-01-30T05:42:12.345678-08:00",
"with_automatic_settings": False,
},
- filename=PurePath("Test_-_2025-01-30_05.42_12345678.tar"),
+ filename=PurePath("Test_2025-01-30_05.42_12345678.tar"),
folders={"ssl"},
homeassistant_exclude_database=False,
homeassistant=True,
- location=[None],
+ location=[LOCATION_LOCAL_STORAGE],
name="Test",
password=None,
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("extra_generate_options", "expected_supervisor_options"),
[
@@ -919,7 +906,7 @@ async def test_reader_writer_create(
"""Test generating a backup."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@@ -984,7 +971,114 @@ async def test_reader_writer_create(
assert response["event"] == {"manager_state": "idle"}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
+async def test_reader_writer_create_report_progress(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ freezer: FrozenDateTimeFactory,
+ supervisor_client: AsyncMock,
+) -> None:
+ """Test generating a backup."""
+ client = await hass_ws_client(hass)
+ freezer.move_to("2025-01-30 13:42:12.345678")
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
+ supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
+ supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
+
+ await client.send_json_auto_id({"type": "backup/subscribe_events"})
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+ response = await client.receive_json()
+ assert response["success"]
+
+ await client.send_json_auto_id(
+ {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"}
+ )
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "create_backup",
+ "reason": None,
+ "stage": None,
+ "state": "in_progress",
+ }
+
+ response = await client.receive_json()
+ assert response["success"]
+ assert response["result"] == {"backup_job_id": TEST_JOB_ID}
+
+ supervisor_client.backups.partial_backup.assert_called_once_with(
+ DEFAULT_BACKUP_OPTIONS
+ )
+
+ supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
+ supervisor_events = [
+ supervisor_event_base | {"done": False, "stage": "addon_repositories"},
+ supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "home_assistant"},
+ supervisor_event_base | {"done": False, "stage": "addons"},
+ supervisor_event_base | {"done": True, "stage": "finishing_file"},
+ ]
+ expected_manager_events = [
+ "addon_repositories",
+ "home_assistant",
+ "addons",
+ "finishing_file",
+ ]
+
+ for supervisor_event in supervisor_events:
+ await client.send_json_auto_id(
+ {
+ "type": "supervisor/event",
+ "data": {"event": "job", "data": supervisor_event},
+ }
+ )
+
+ acks = 0
+ events = []
+ for _ in range(len(supervisor_events) + len(expected_manager_events)):
+ response = await client.receive_json()
+ if "event" in response:
+ events.append(response)
+ continue
+ assert response["success"]
+ acks += 1
+
+ assert acks == len(supervisor_events)
+ assert len(events) == len(expected_manager_events)
+
+ for i, event in enumerate(events):
+ assert event["event"] == {
+ "manager_state": "create_backup",
+ "reason": None,
+ "stage": expected_manager_events[i],
+ "state": "in_progress",
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "create_backup",
+ "reason": None,
+ "stage": "upload_to_agents",
+ "state": "in_progress",
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "create_backup",
+ "reason": None,
+ "stage": None,
+ "state": "completed",
+ }
+
+ supervisor_client.backups.download_backup.assert_not_called()
+ supervisor_client.backups.remove_backup.assert_not_called()
+
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+
+
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_create_job_done(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -994,7 +1088,7 @@ async def test_reader_writer_create_job_done(
"""Test generating a backup, and backup job finishes early."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = TEST_JOB_DONE
@@ -1063,7 +1157,7 @@ async def test_reader_writer_create_job_done(
None,
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
- [None, "share1", "share2", "share3"],
+ [LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
False,
[],
),
@@ -1072,7 +1166,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
"hunter2",
- [None, "share1", "share2", "share3"],
+ [LOCATION_LOCAL_STORAGE, "share1", "share2", "share3"],
True,
[],
),
@@ -1090,7 +1184,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["share1", "share2", "share3"],
True,
- [None],
+ [LOCATION_LOCAL_STORAGE],
),
(
[
@@ -1107,7 +1201,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["share2", "share3"],
True,
- [None, "share1"],
+ [LOCATION_LOCAL_STORAGE, "share1"],
),
(
[
@@ -1123,7 +1217,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
- [None, "share1", "share2"],
+ [LOCATION_LOCAL_STORAGE, "share1", "share2"],
True,
["share3"],
),
@@ -1139,7 +1233,7 @@ async def test_reader_writer_create_job_done(
"hunter2",
["hassio.local"],
None,
- [None],
+ [LOCATION_LOCAL_STORAGE],
False,
[],
),
@@ -1177,15 +1271,14 @@ async def test_reader_writer_create_per_agent_encryption(
for i in range(1, 4)
],
)
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = replace(
TEST_BACKUP_DETAILS,
extra=DEFAULT_BACKUP_OPTIONS.extra,
- locations=create_locations,
location_attributes={
- location or LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
+ location: supervisor_backups.BackupLocationAttributes(
protected=create_protected,
- size_bytes=TEST_BACKUP_DETAILS.size_bytes,
+ size_bytes=1048576,
)
for location in create_locations
},
@@ -1265,7 +1358,7 @@ async def test_reader_writer_create_per_agent_encryption(
upload_locations
)
for call in supervisor_client.backups.upload_backup.mock_calls:
- assert call.args[1].filename == PurePath("Test_-_2025-01-30_05.42_12345678.tar")
+ assert call.args[1].filename == PurePath("Test_2025-01-30_05.42_12345678.tar")
upload_call_locations: set = call.args[1].location
assert len(upload_call_locations) == 1
assert upload_call_locations.pop() in upload_locations
@@ -1275,7 +1368,7 @@ async def test_reader_writer_create_per_agent_encryption(
assert response["event"] == {"manager_state": "idle"}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("side_effect", "error_code", "error_message", "expected_reason"),
[
@@ -1342,15 +1435,44 @@ async def test_reader_writer_create_partial_backup_error(
assert supervisor_client.backups.partial_backup.call_count == 1
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.parametrize(
+ "supervisor_event",
+ [
+ # Missing backup reference
+ {
+ "event": "job",
+ "data": {
+ "done": True,
+ "uuid": TEST_JOB_ID,
+ },
+ },
+ # Errors
+ {
+ "event": "job",
+ "data": {
+ "done": True,
+ "errors": [
+ {
+ "type": "BackupMountDownError",
+ "message": "test_mount is down, cannot back-up to it",
+ }
+ ],
+ "uuid": TEST_JOB_ID,
+ "reference": "test_slug",
+ },
+ },
+ ],
+)
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_create_missing_reference_error(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
+ supervisor_event: dict[str, Any],
) -> None:
"""Test missing reference error when generating a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
await client.send_json_auto_id({"type": "backup/subscribe_events"})
@@ -1377,13 +1499,7 @@ async def test_reader_writer_create_missing_reference_error(
assert supervisor_client.backups.partial_backup.call_count == 1
await client.send_json_auto_id(
- {
- "type": "supervisor/event",
- "data": {
- "event": "job",
- "data": {"done": True, "uuid": TEST_JOB_ID},
- },
- }
+ {"type": "supervisor/event", "data": supervisor_event}
)
response = await client.receive_json()
assert response["success"]
@@ -1406,7 +1522,7 @@ async def test_reader_writer_create_missing_reference_error(
assert response["event"] == {"manager_state": "idle"}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
@pytest.mark.parametrize(
("method", "download_call_count", "remove_call_count"),
@@ -1423,7 +1539,7 @@ async def test_reader_writer_create_download_remove_error(
) -> None:
"""Test download and remove error when generating a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
method_mock = getattr(supervisor_client.backups, method)
@@ -1500,7 +1616,7 @@ async def test_reader_writer_create_download_remove_error(
assert response["event"] == {"manager_state": "idle"}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
async def test_reader_writer_create_info_error(
hass: HomeAssistant,
@@ -1510,7 +1626,7 @@ async def test_reader_writer_create_info_error(
) -> None:
"""Test backup info error when generating a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.side_effect = exception
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@@ -1577,7 +1693,7 @@ async def test_reader_writer_create_info_error(
assert response["event"] == {"manager_state": "idle"}
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_create_remote_backup(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -1587,7 +1703,7 @@ async def test_reader_writer_create_remote_backup(
"""Test generating a backup which will be uploaded to a remote agent."""
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
@@ -1661,7 +1777,7 @@ async def test_reader_writer_create_remote_backup(
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
("extra_generate_options", "expected_error"),
[
@@ -1690,7 +1806,7 @@ async def test_reader_writer_create_wrong_parameters(
) -> None:
"""Test generating a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
await client.send_json_auto_id({"type": "backup/subscribe_events"})
@@ -1731,7 +1847,7 @@ async def test_reader_writer_create_wrong_parameters(
supervisor_client.backups.partial_backup.assert_not_called()
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_agent_receive_remote_backup(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
@@ -1807,7 +1923,7 @@ async def test_agent_receive_remote_backup(
),
],
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_restore(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -1817,7 +1933,7 @@ async def test_reader_writer_restore(
) -> None:
"""Test restoring a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
+ supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.return_value = get_job_result
@@ -1848,7 +1964,7 @@ async def test_reader_writer_restore(
background=True,
folders=None,
homeassistant=True,
- location=None,
+ location=LOCATION_LOCAL_STORAGE,
password=None,
),
)
@@ -1874,31 +1990,140 @@ async def test_reader_writer_restore(
assert response["result"] is None
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
+async def test_reader_writer_restore_report_progress(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ supervisor_client: AsyncMock,
+) -> None:
+ """Test restoring a backup."""
+ client = await hass_ws_client(hass)
+ supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
+ supervisor_client.backups.list.return_value = [TEST_BACKUP]
+ supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
+ supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
+
+ await client.send_json_auto_id({"type": "backup/subscribe_events"})
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "idle",
+ }
+ response = await client.receive_json()
+ assert response["success"]
+
+ await client.send_json_auto_id(
+ {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"}
+ )
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": None,
+ "state": "in_progress",
+ }
+
+ supervisor_client.backups.partial_restore.assert_called_once_with(
+ "abc123",
+ supervisor_backups.PartialRestoreOptions(
+ addons=None,
+ background=True,
+ folders=None,
+ homeassistant=True,
+ location=LOCATION_LOCAL_STORAGE,
+ password=None,
+ ),
+ )
+
+ supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
+ supervisor_events = [
+ supervisor_event_base | {"done": False, "stage": "addon_repositories"},
+ supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "home_assistant"},
+ supervisor_event_base | {"done": True, "stage": "addons"},
+ ]
+ expected_manager_events = [
+ "addon_repositories",
+ "home_assistant",
+ "addons",
+ ]
+
+ for supervisor_event in supervisor_events:
+ await client.send_json_auto_id(
+ {
+ "type": "supervisor/event",
+ "data": {"event": "job", "data": supervisor_event},
+ }
+ )
+
+ acks = 0
+ events = []
+ for _ in range(len(supervisor_events) + len(expected_manager_events)):
+ response = await client.receive_json()
+ if "event" in response:
+ events.append(response)
+ continue
+ assert response["success"]
+ acks += 1
+
+ assert acks == len(supervisor_events)
+ assert len(events) == len(expected_manager_events)
+
+ for i, event in enumerate(events):
+ assert event["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": expected_manager_events[i],
+ "state": "in_progress",
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": None,
+ "state": "completed",
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+
+ response = await client.receive_json()
+ assert response["success"]
+ assert response["result"] is None
+
+
@pytest.mark.parametrize(
- ("supervisor_error_string", "expected_error_code", "expected_reason"),
+ ("supervisor_error", "expected_error_code", "expected_reason"),
[
- ("Invalid password for backup", "password_incorrect", "password_incorrect"),
(
- "Backup was made on supervisor version 2025.12.0, can't restore on 2024.12.0. Must update supervisor first.",
+ SupervisorBadRequestError("Invalid password for backup"),
+ "password_incorrect",
+ "password_incorrect",
+ ),
+ (
+ SupervisorBadRequestError(
+ "Backup was made on supervisor version 2025.12.0, can't "
+ "restore on 2024.12.0. Must update supervisor first."
+ ),
"home_assistant_error",
"unknown_error",
),
+ (SupervisorNotFoundError(), "backup_not_found", "backup_not_found"),
],
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_restore_error(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
- supervisor_error_string: str,
+ supervisor_error: Exception,
expected_error_code: str,
expected_reason: str,
) -> None:
"""Test restoring a backup."""
client = await hass_ws_client(hass)
- supervisor_client.backups.partial_restore.side_effect = SupervisorBadRequestError(
- supervisor_error_string
- )
+ supervisor_client.backups.partial_restore.side_effect = supervisor_error
supervisor_client.backups.list.return_value = [TEST_BACKUP]
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
@@ -1926,7 +2151,7 @@ async def test_reader_writer_restore_error(
background=True,
folders=None,
homeassistant=True,
- location=None,
+ location=LOCATION_LOCAL_STORAGE,
password=None,
),
)
@@ -1946,6 +2171,97 @@ async def test_reader_writer_restore_error(
assert response["error"]["code"] == expected_error_code
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
+async def test_reader_writer_restore_late_error(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ supervisor_client: AsyncMock,
+) -> None:
+ """Test restoring a backup with error."""
+ client = await hass_ws_client(hass)
+ supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID)
+ supervisor_client.backups.list.return_value = [TEST_BACKUP]
+ supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
+ supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
+
+ await client.send_json_auto_id({"type": "backup/subscribe_events"})
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+ response = await client.receive_json()
+ assert response["success"]
+
+ await client.send_json_auto_id(
+ {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"}
+ )
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": None,
+ "state": "in_progress",
+ }
+
+ supervisor_client.backups.partial_restore.assert_called_once_with(
+ "abc123",
+ supervisor_backups.PartialRestoreOptions(
+ addons=None,
+ background=True,
+ folders=None,
+ homeassistant=True,
+ location=LOCATION_LOCAL_STORAGE,
+ password=None,
+ ),
+ )
+
+ event = {
+ "event": "job",
+ "data": {
+ "name": "backup_manager_partial_restore",
+ "reference": "7c54aeed",
+ "uuid": TEST_JOB_ID,
+ "progress": 0,
+ "stage": None,
+ "done": True,
+ "parent_id": None,
+ "errors": [
+ {
+ "type": "BackupInvalidError",
+ "message": (
+ "Backup was made on supervisor version 2025.02.2.dev3105, can't"
+ " restore on 2025.01.2.dev3105. Must update supervisor first."
+ ),
+ }
+ ],
+ "created": "2025-02-03T08:27:49.297997+00:00",
+ },
+ }
+ await client.send_json_auto_id({"type": "supervisor/event", "data": event})
+ response = await client.receive_json()
+ assert response["success"]
+
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "restore_backup",
+ "reason": "backup_reader_writer_error",
+ "stage": None,
+ "state": "failed",
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+
+ response = await client.receive_json()
+ assert not response["success"]
+ assert response["error"] == {
+ "code": "home_assistant_error",
+ "message": (
+ "Restore failed: [{'type': 'BackupInvalidError', 'message': \"Backup "
+ "was made on supervisor version 2025.02.2.dev3105, can't restore on "
+ '2025.01.2.dev3105. Must update supervisor first."}]'
+ ),
+ }
+
+
@pytest.mark.parametrize(
("backup", "backup_details", "parameters", "expected_error"),
[
@@ -1969,7 +2285,7 @@ async def test_reader_writer_restore_error(
),
],
)
-@pytest.mark.usefixtures("hassio_client", "setup_integration")
+@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_restore_wrong_parameters(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
@@ -1999,15 +2315,40 @@ async def test_reader_writer_restore_wrong_parameters(
}
+@pytest.mark.parametrize(
+ ("get_job_result", "last_non_idle_event"),
+ [
+ (
+ TEST_JOB_DONE,
+ {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": None,
+ "state": "completed",
+ },
+ ),
+ (
+ TEST_RESTORE_JOB_DONE_WITH_ERROR,
+ {
+ "manager_state": "restore_backup",
+ "reason": "unknown_error",
+ "stage": None,
+ "state": "failed",
+ },
+ ),
+ ],
+)
@pytest.mark.usefixtures("hassio_client")
async def test_restore_progress_after_restart(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
+ get_job_result: supervisor_jobs.Job,
+ last_non_idle_event: dict[str, Any],
) -> None:
"""Test restore backup progress after restart."""
- supervisor_client.jobs.get_job.return_value = TEST_JOB_DONE
+ supervisor_client.jobs.get_job.return_value = get_job_result
with patch.dict(os.environ, MOCK_ENVIRON | {RESTORE_JOB_ID_ENV: TEST_JOB_ID}):
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
@@ -2017,11 +2358,88 @@ async def test_restore_progress_after_restart(
await client.send_json_auto_id({"type": "backup/info"})
response = await client.receive_json()
+ assert response["success"]
+ assert response["result"]["last_non_idle_event"] == last_non_idle_event
+ assert response["result"]["state"] == "idle"
+
+
+@pytest.mark.usefixtures("hassio_client")
+async def test_restore_progress_after_restart_report_progress(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ supervisor_client: AsyncMock,
+) -> None:
+ """Test restore backup progress after restart."""
+
+ supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
+
+ with patch.dict(os.environ, MOCK_ENVIRON | {RESTORE_JOB_ID_ENV: TEST_JOB_ID}):
+ assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
+
+ client = await hass_ws_client(hass)
+
+ await client.send_json_auto_id({"type": "backup/subscribe_events"})
+ response = await client.receive_json()
+ assert response["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": None,
+ "state": "in_progress",
+ }
+ response = await client.receive_json()
+ assert response["success"]
+
+ supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
+ supervisor_events = [
+ supervisor_event_base | {"done": False, "stage": "addon_repositories"},
+ supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
+ supervisor_event_base | {"done": False, "stage": "home_assistant"},
+ supervisor_event_base | {"done": True, "stage": "addons"},
+ ]
+ expected_manager_events = ["addon_repositories", "home_assistant", "addons"]
+ expected_manager_states = ["in_progress", "in_progress", "completed"]
+
+ for supervisor_event in supervisor_events:
+ await client.send_json_auto_id(
+ {
+ "type": "supervisor/event",
+ "data": {"event": "job", "data": supervisor_event},
+ }
+ )
+
+ acks = 0
+ events = []
+ for _ in range(len(supervisor_events) + len(expected_manager_events)):
+ response = await client.receive_json()
+ if "event" in response:
+ events.append(response)
+ continue
+ assert response["success"]
+ acks += 1
+
+ assert acks == len(supervisor_events)
+ assert len(events) == len(expected_manager_events)
+
+ for i, event in enumerate(events):
+ assert event["event"] == {
+ "manager_state": "restore_backup",
+ "reason": None,
+ "stage": expected_manager_events[i],
+ "state": expected_manager_states[i],
+ }
+
+ response = await client.receive_json()
+ assert response["event"] == {"manager_state": "idle"}
+
+ await client.send_json_auto_id({"type": "backup/info"})
+ response = await client.receive_json()
+
assert response["success"]
assert response["result"]["last_non_idle_event"] == {
"manager_state": "restore_backup",
- "reason": "",
- "stage": None,
+ "reason": None,
+ "stage": "addons",
"state": "completed",
}
assert response["result"]["state"] == "idle"
diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py
index f9c5e617904..b8367f87e57 100644
--- a/tests/components/homewizard/conftest.py
+++ b/tests/components/homewizard/conftest.py
@@ -160,7 +160,7 @@ def mock_config_entry_v2() -> MockConfigEntry:
CONF_IP_ADDRESS: "127.0.0.1",
CONF_TOKEN: "00112233445566778899ABCDEFABCDEF",
},
- unique_id="HWE-P1_5c2fafabcdef",
+ unique_id="HWE-BAT_5c2fafabcdef",
)
diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py
index 77366da84c5..9139ef80d12 100644
--- a/tests/components/homewizard/test_init.py
+++ b/tests/components/homewizard/test_init.py
@@ -2,6 +2,7 @@
from datetime import timedelta
from unittest.mock import MagicMock
+import weakref
from freezegun.api import FrozenDateTimeFactory
from homewizard_energy.errors import DisabledError, UnauthorizedError
@@ -9,6 +10,7 @@ import pytest
from homeassistant.components.homewizard.const import DOMAIN
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
+from homeassistant.const import CONF_IP_ADDRESS, CONF_TOKEN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, async_fire_time_changed
@@ -24,6 +26,9 @@ async def test_load_unload_v1(
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
+ weak_ref = weakref.ref(mock_config_entry.runtime_data)
+ assert weak_ref() is not None
+
assert mock_config_entry.state is ConfigEntryState.LOADED
assert len(mock_homewizardenergy.combined.mock_calls) == 1
@@ -31,6 +36,7 @@ async def test_load_unload_v1(
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
+ assert weak_ref() is None
async def test_load_unload_v2(
@@ -52,6 +58,36 @@ async def test_load_unload_v2(
assert mock_config_entry_v2.state is ConfigEntryState.NOT_LOADED
+async def test_load_unload_v2_as_v1(
+ hass: HomeAssistant,
+ mock_homewizardenergy: MagicMock,
+) -> None:
+ """Test loading and unloading of integration with v2 config, but without using it."""
+
+ # Simulate v2 config but as a P1 Meter
+ mock_config_entry = MockConfigEntry(
+ title="Device",
+ domain=DOMAIN,
+ data={
+ CONF_IP_ADDRESS: "127.0.0.1",
+ CONF_TOKEN: "00112233445566778899ABCDEFABCDEF",
+ },
+ unique_id="HWE-P1_5c2fafabcdef",
+ )
+
+ mock_config_entry.add_to_hass(hass)
+ await hass.config_entries.async_setup(mock_config_entry.entry_id)
+ await hass.async_block_till_done()
+
+ assert mock_config_entry.state is ConfigEntryState.LOADED
+ assert len(mock_homewizardenergy.combined.mock_calls) == 1
+
+ await hass.config_entries.async_unload(mock_config_entry.entry_id)
+ await hass.async_block_till_done()
+
+ assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
+
+
async def test_load_failed_host_unavailable(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
diff --git a/tests/components/homewizard/test_repair.py b/tests/components/homewizard/test_repair.py
index a59d6f415dd..763af48b1a2 100644
--- a/tests/components/homewizard/test_repair.py
+++ b/tests/components/homewizard/test_repair.py
@@ -36,6 +36,10 @@ async def test_repair_acquires_token(
client = await hass_client()
mock_config_entry.add_to_hass(hass)
+ hass.config_entries.async_update_entry(
+ mock_config_entry, unique_id="HWE-BAT_5c2fafabcdef"
+ )
+ await hass.async_block_till_done()
with patch("homeassistant.components.homewizard.has_v2_api", return_value=True):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py
index 2b978ffc33f..c831d40d261 100644
--- a/tests/components/hue/test_light_v2.py
+++ b/tests/components/hue/test_light_v2.py
@@ -392,7 +392,7 @@ async def test_light_availability(
assert test_light is not None
assert test_light.state == "on"
- # Change availability by modififying the zigbee_connectivity status
+ # Change availability by modifying the zigbee_connectivity status
for status in ("connectivity_issue", "disconnected", "connected"):
mock_bridge_v2.api.emit_event(
"update",
diff --git a/tests/components/idasen_desk/test_config_flow.py b/tests/components/idasen_desk/test_config_flow.py
index baeed6be1ab..15baac1b055 100644
--- a/tests/components/idasen_desk/test_config_flow.py
+++ b/tests/components/idasen_desk/test_config_flow.py
@@ -50,6 +50,49 @@ async def test_user_step_success(hass: HomeAssistant, mock_desk_api: MagicMock)
assert len(mock_setup_entry.mock_calls) == 1
+async def test_user_step_replaces_ignored_device(
+ hass: HomeAssistant, mock_desk_api: MagicMock
+) -> None:
+ """Test user step replaces ignored devices."""
+ entry = MockConfigEntry(
+ domain=DOMAIN,
+ unique_id=IDASEN_DISCOVERY_INFO.address,
+ source=config_entries.SOURCE_IGNORE,
+ data={CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address},
+ )
+ entry.add_to_hass(hass)
+
+ with patch(
+ "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info",
+ return_value=[NOT_IDASEN_DISCOVERY_INFO, IDASEN_DISCOVERY_INFO],
+ ):
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": config_entries.SOURCE_USER}
+ )
+ assert result["type"] is FlowResultType.FORM
+ assert result["step_id"] == "user"
+ assert result["errors"] == {}
+
+ with patch(
+ "homeassistant.components.idasen_desk.async_setup_entry", return_value=True
+ ) as mock_setup_entry:
+ result2 = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ {
+ CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address,
+ },
+ )
+ await hass.async_block_till_done()
+
+ assert result2["type"] is FlowResultType.CREATE_ENTRY
+ assert result2["title"] == IDASEN_DISCOVERY_INFO.name
+ assert result2["data"] == {
+ CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address,
+ }
+ assert result2["result"].unique_id == IDASEN_DISCOVERY_INFO.address
+ assert len(mock_setup_entry.mock_calls) == 1
+
+
async def test_user_step_no_devices_found(hass: HomeAssistant) -> None:
"""Test user step with no devices found."""
with patch(
diff --git a/tests/components/iometer/__init__.py b/tests/components/iometer/__init__.py
new file mode 100644
index 00000000000..5c08438925e
--- /dev/null
+++ b/tests/components/iometer/__init__.py
@@ -0,0 +1 @@
+"""Tests for the IOmeter integration."""
diff --git a/tests/components/iometer/conftest.py b/tests/components/iometer/conftest.py
new file mode 100644
index 00000000000..ee45021952e
--- /dev/null
+++ b/tests/components/iometer/conftest.py
@@ -0,0 +1,57 @@
+"""Common fixtures for the IOmeter tests."""
+
+from collections.abc import Generator
+from unittest.mock import AsyncMock, patch
+
+from iometer import Reading, Status
+import pytest
+
+from homeassistant.components.iometer.const import DOMAIN
+from homeassistant.const import CONF_HOST
+
+from tests.common import MockConfigEntry, load_fixture
+
+
+@pytest.fixture
+def mock_setup_entry() -> Generator[AsyncMock]:
+ """Override async_setup_entry."""
+ with patch(
+ "homeassistant.components.iometer.async_setup_entry",
+ return_value=True,
+ ) as mock_setup_entry:
+ yield mock_setup_entry
+
+
+@pytest.fixture
+def mock_iometer_client() -> Generator[AsyncMock]:
+ """Mock a new IOmeter client."""
+ with (
+ patch(
+ "homeassistant.components.iometer.IOmeterClient",
+ autospec=True,
+ ) as mock_client,
+ patch(
+ "homeassistant.components.iometer.config_flow.IOmeterClient",
+ new=mock_client,
+ ),
+ ):
+ client = mock_client.return_value
+ client.host = "10.0.0.2"
+ client.get_current_reading.return_value = Reading.from_json(
+ load_fixture("reading.json", DOMAIN)
+ )
+ client.get_current_status.return_value = Status.from_json(
+ load_fixture("status.json", DOMAIN)
+ )
+ yield client
+
+
+@pytest.fixture
+def mock_config_entry() -> MockConfigEntry:
+ """Mock a IOmeter config entry."""
+ return MockConfigEntry(
+ domain=DOMAIN,
+ title="IOmeter-1ISK0000000000",
+ data={CONF_HOST: "10.0.0.2"},
+ unique_id="658c2b34-2017-45f2-a12b-731235f8bb97",
+ )
diff --git a/tests/components/iometer/fixtures/reading.json b/tests/components/iometer/fixtures/reading.json
new file mode 100644
index 00000000000..82190c88883
--- /dev/null
+++ b/tests/components/iometer/fixtures/reading.json
@@ -0,0 +1,14 @@
+{
+ "__typename": "iometer.reading.v1",
+ "meter": {
+ "number": "1ISK0000000000",
+ "reading": {
+ "time": "2024-11-11T11:11:11Z",
+ "registers": [
+ { "obis": "01-00:01.08.00*ff", "value": 1234.5, "unit": "Wh" },
+ { "obis": "01-00:02.08.00*ff", "value": 5432.1, "unit": "Wh" },
+ { "obis": "01-00:10.07.00*ff", "value": 100, "unit": "W" }
+ ]
+ }
+ }
+}
diff --git a/tests/components/iometer/fixtures/status.json b/tests/components/iometer/fixtures/status.json
new file mode 100644
index 00000000000..4d3001d8454
--- /dev/null
+++ b/tests/components/iometer/fixtures/status.json
@@ -0,0 +1,19 @@
+{
+ "__typename": "iometer.status.v1",
+ "meter": {
+ "number": "1ISK0000000000"
+ },
+ "device": {
+ "bridge": { "rssi": -30, "version": "build-65" },
+ "id": "658c2b34-2017-45f2-a12b-731235f8bb97",
+ "core": {
+ "connectionStatus": "connected",
+ "rssi": -30,
+ "version": "build-58",
+ "powerStatus": "battery",
+ "batteryLevel": 100,
+ "attachmentStatus": "attached",
+ "pinStatus": "entered"
+ }
+ }
+}
diff --git a/tests/components/iometer/test_config_flow.py b/tests/components/iometer/test_config_flow.py
new file mode 100644
index 00000000000..49fce459282
--- /dev/null
+++ b/tests/components/iometer/test_config_flow.py
@@ -0,0 +1,171 @@
+"""Test the IOmeter config flow."""
+
+from ipaddress import ip_address
+from unittest.mock import AsyncMock
+
+from iometer import IOmeterConnectionError
+
+from homeassistant.components import zeroconf
+from homeassistant.components.iometer.const import DOMAIN
+from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
+from homeassistant.const import CONF_HOST
+from homeassistant.core import HomeAssistant
+from homeassistant.data_entry_flow import FlowResultType
+
+from tests.common import MockConfigEntry
+
+IP_ADDRESS = "10.0.0.2"
+IOMETER_DEVICE_ID = "658c2b34-2017-45f2-a12b-731235f8bb97"
+
+ZEROCONF_DISCOVERY = zeroconf.ZeroconfServiceInfo(
+ ip_address=ip_address(IP_ADDRESS),
+ ip_addresses=[ip_address(IP_ADDRESS)],
+ hostname="IOmeter-EC63E8.local.",
+ name="IOmeter-EC63E8",
+ port=80,
+ type="_iometer._tcp.",
+ properties={},
+)
+
+
+async def test_user_flow(
+ hass: HomeAssistant,
+ mock_iometer_client: AsyncMock,
+) -> None:
+ """Test full user configuration flow."""
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_USER},
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.FORM
+ assert result["step_id"] == "user"
+
+ result = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ user_input={CONF_HOST: IP_ADDRESS},
+ )
+
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.CREATE_ENTRY
+ assert result["title"] == "IOmeter 1ISK0000000000"
+ assert result["data"] == {CONF_HOST: IP_ADDRESS}
+ assert result["result"].unique_id == IOMETER_DEVICE_ID
+
+
+async def test_zeroconf_flow(
+ hass: HomeAssistant,
+ mock_iometer_client: AsyncMock,
+) -> None:
+ """Test zeroconf flow."""
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_ZEROCONF},
+ data=ZEROCONF_DISCOVERY,
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.FORM
+ assert result["step_id"] == "zeroconf_confirm"
+
+ result = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ {},
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.CREATE_ENTRY
+ assert result["title"] == "IOmeter 1ISK0000000000"
+ assert result["data"] == {CONF_HOST: IP_ADDRESS}
+ assert result["result"].unique_id == IOMETER_DEVICE_ID
+
+
+async def test_zeroconf_flow_abort_duplicate(
+ hass: HomeAssistant,
+ mock_config_entry: MockConfigEntry,
+) -> None:
+ """Test zeroconf flow aborts with duplicate."""
+ mock_config_entry.add_to_hass(hass)
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_ZEROCONF},
+ data=ZEROCONF_DISCOVERY,
+ )
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "already_configured"
+
+
+async def test_zeroconf_flow_connection_error(
+ hass: HomeAssistant,
+ mock_iometer_client: AsyncMock,
+) -> None:
+ """Test zeroconf flow."""
+ mock_iometer_client.get_current_status.side_effect = IOmeterConnectionError()
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_ZEROCONF},
+ data=ZEROCONF_DISCOVERY,
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "cannot_connect"
+
+
+async def test_user_flow_connection_error(
+ hass: HomeAssistant,
+ mock_iometer_client: AsyncMock,
+ mock_setup_entry: AsyncMock,
+) -> None:
+ """Test flow error."""
+ mock_iometer_client.get_current_status.side_effect = IOmeterConnectionError()
+
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_USER},
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.FORM
+ assert result["step_id"] == "user"
+
+ result = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ {CONF_HOST: IP_ADDRESS},
+ )
+ await hass.async_block_till_done()
+
+ assert result["type"] is FlowResultType.FORM
+ assert result["errors"] == {"base": "cannot_connect"}
+
+ mock_iometer_client.get_current_status.side_effect = None
+
+ result = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ {CONF_HOST: IP_ADDRESS},
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.CREATE_ENTRY
+
+
+async def test_flow_abort_duplicate(
+ hass: HomeAssistant,
+ mock_iometer_client: AsyncMock,
+ mock_setup_entry: AsyncMock,
+ mock_config_entry: MockConfigEntry,
+) -> None:
+ """Test duplicate flow."""
+ mock_config_entry.add_to_hass(hass)
+
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN,
+ context={"source": SOURCE_USER},
+ )
+ await hass.async_block_till_done()
+ assert result["type"] is FlowResultType.FORM
+ assert result["step_id"] == "user"
+
+ result = await hass.config_entries.flow.async_configure(
+ result["flow_id"],
+ {CONF_HOST: IP_ADDRESS},
+ )
+ await hass.async_block_till_done()
+
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "already_configured"
diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py
index a664b91393d..7c693abcda8 100644
--- a/tests/components/kitchen_sink/test_backup.py
+++ b/tests/components/kitchen_sink/test_backup.py
@@ -2,7 +2,7 @@
from collections.abc import AsyncGenerator
from io import StringIO
-from unittest.mock import patch
+from unittest.mock import ANY, patch
import pytest
@@ -106,6 +106,7 @@ async def test_agents_list_backups(
"backup_id": "abc123",
"database_included": False,
"date": "1970-01-01T00:00:00Z",
+ "extra_metadata": {},
"failed_agent_ids": [],
"folders": ["media", "share"],
"homeassistant_included": True,
@@ -187,6 +188,7 @@ async def test_agents_upload(
"backup_id": "test-backup",
"database_included": True,
"date": "1970-01-01T00:00:00.000Z",
+ "extra_metadata": {"instance_id": ANY, "with_automatic_settings": False},
"failed_agent_ids": [],
"folders": ["media", "share"],
"homeassistant_included": True,
diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json
index 0d45dc5c9f4..85ce95da0ed 100644
--- a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json
+++ b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json
@@ -57,6 +57,16 @@
"type": "number"
}
},
+ "filterInfo": {
+ "filterLifetime": {
+ "mode": ["r"],
+ "type": "number"
+ },
+ "usedTime": {
+ "mode": ["r"],
+ "type": "number"
+ }
+ },
"operation": {
"airCleanOperationMode": {
"mode": ["w"],
@@ -124,6 +134,52 @@
}
}
},
+ "temperatureInUnits": [
+ {
+ "currentTemperature": {
+ "type": "number",
+ "mode": ["r"]
+ },
+ "targetTemperature": {
+ "type": "number",
+ "mode": ["r"]
+ },
+ "coolTargetTemperature": {
+ "type": "range",
+ "mode": ["w"],
+ "value": {
+ "w": {
+ "max": 30,
+ "min": 18,
+ "step": 1
+ }
+ }
+ },
+ "unit": "C"
+ },
+ {
+ "currentTemperature": {
+ "type": "number",
+ "mode": ["r"]
+ },
+ "targetTemperature": {
+ "type": "number",
+ "mode": ["r"]
+ },
+ "coolTargetTemperature": {
+ "type": "range",
+ "mode": ["w"],
+ "value": {
+ "w": {
+ "max": 86,
+ "min": 64,
+ "step": 2
+ }
+ }
+ },
+ "unit": "F"
+ }
+ ],
"timer": {
"relativeHourToStart": {
"mode": ["r", "w"],
@@ -149,6 +205,24 @@
"mode": ["r", "w"],
"type": "number"
}
+ },
+ "windDirection": {
+ "rotateUpDown": {
+ "type": "boolean",
+ "mode": ["r", "w"],
+ "value": {
+ "r": [true, false],
+ "w": [true, false]
+ }
+ },
+ "rotateLeftRight": {
+ "type": "boolean",
+ "mode": ["r", "w"],
+ "value": {
+ "r": [true, false],
+ "w": [true, false]
+ }
+ }
}
}
}
diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/status.json b/tests/components/lg_thinq/fixtures/air_conditioner/status.json
index 90d15d1ae16..8440e7da28c 100644
--- a/tests/components/lg_thinq/fixtures/air_conditioner/status.json
+++ b/tests/components/lg_thinq/fixtures/air_conditioner/status.json
@@ -32,6 +32,19 @@
"targetTemperature": 19,
"unit": "C"
},
+ "temperatureInUnits": [
+ {
+ "currentTemperature": 25,
+ "targetTemperature": 19,
+ "unit": "C"
+ },
+ {
+ "currentTemperature": 77,
+ "targetTemperature": 66,
+ "unit": "F"
+ }
+ ],
+
"timer": {
"relativeStartTimer": "UNSET",
"relativeStopTimer": "UNSET",
@@ -39,5 +52,9 @@
"absoluteStopTimer": "UNSET",
"absoluteHourToStart": 13,
"absoluteMinuteToStart": 14
+ },
+ "windDirection": {
+ "rotateUpDown": false,
+ "rotateLeftRight": false
}
}
diff --git a/tests/components/lg_thinq/snapshots/test_climate.ambr b/tests/components/lg_thinq/snapshots/test_climate.ambr
index e9470c3de03..9369367a1f7 100644
--- a/tests/components/lg_thinq/snapshots/test_climate.ambr
+++ b/tests/components/lg_thinq/snapshots/test_climate.ambr
@@ -43,7 +43,7 @@
'original_name': None,
'platform': 'lg_thinq',
'previous_unique_id': None,
- 'supported_features': ,
+ 'supported_features': ,
'translation_key': ,
'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_climate_air_conditioner',
'unit_of_measurement': None,
@@ -72,7 +72,9 @@
'preset_modes': list([
'air_clean',
]),
- 'supported_features': ,
+ 'supported_features': ,
+ 'target_temp_high': None,
+ 'target_temp_low': None,
'target_temp_step': 1,
'temperature': 19,
}),
diff --git a/tests/components/lg_thinq/snapshots/test_sensor.ambr b/tests/components/lg_thinq/snapshots/test_sensor.ambr
index 2c58b109e61..fe1929944f9 100644
--- a/tests/components/lg_thinq/snapshots/test_sensor.ambr
+++ b/tests/components/lg_thinq/snapshots/test_sensor.ambr
@@ -1,4 +1,51 @@
# serializer version: 1
+# name: test_all_entities[sensor.test_air_conditioner_filter_remaining-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.test_air_conditioner_filter_remaining',
+ 'has_entity_name': True,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'Filter remaining',
+ 'platform': 'lg_thinq',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': ,
+ 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_filter_lifetime',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_all_entities[sensor.test_air_conditioner_filter_remaining-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'friendly_name': 'Test air conditioner Filter remaining',
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.test_air_conditioner_filter_remaining',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '540',
+ })
+# ---
# name: test_all_entities[sensor.test_air_conditioner_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
diff --git a/tests/components/meteo_france/conftest.py b/tests/components/meteo_france/conftest.py
index 123fc00e42a..eb28ec0a838 100644
--- a/tests/components/meteo_france/conftest.py
+++ b/tests/components/meteo_france/conftest.py
@@ -2,13 +2,48 @@
from unittest.mock import patch
+from meteofrance_api.model import CurrentPhenomenons, Forecast, Rain
import pytest
+from homeassistant.components.meteo_france.const import CONF_CITY, DOMAIN
+from homeassistant.config_entries import SOURCE_USER
+from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
+from homeassistant.core import HomeAssistant
+
+from tests.common import MockConfigEntry, load_json_object_fixture
+
@pytest.fixture(autouse=True)
def patch_requests():
"""Stub out services that makes requests."""
- patch_client = patch("homeassistant.components.meteo_france.MeteoFranceClient")
+ with patch("homeassistant.components.meteo_france.MeteoFranceClient") as mock_data:
+ mock_data = mock_data.return_value
+ mock_data.get_forecast.return_value = Forecast(
+ load_json_object_fixture("raw_forecast.json", DOMAIN)
+ )
+ mock_data.get_rain.return_value = Rain(
+ load_json_object_fixture("raw_rain.json", DOMAIN)
+ )
+ mock_data.get_warning_current_phenomenoms.return_value = CurrentPhenomenons(
+ load_json_object_fixture("raw_warning_current_phenomenoms.json", DOMAIN)
+ )
+ yield mock_data
- with patch_client:
- yield
+
+@pytest.fixture(name="config_entry")
+def get_config_entry(hass: HomeAssistant) -> MockConfigEntry:
+ """Create and register mock config entry."""
+ entry_data = {
+ CONF_CITY: "La Clusaz",
+ CONF_LATITUDE: 45.90417,
+ CONF_LONGITUDE: 6.42306,
+ }
+ config_entry = MockConfigEntry(
+ domain=DOMAIN,
+ source=SOURCE_USER,
+ unique_id=f"{entry_data[CONF_LATITUDE], entry_data[CONF_LONGITUDE]}",
+ title=entry_data[CONF_CITY],
+ data=entry_data,
+ )
+ config_entry.add_to_hass(hass)
+ return config_entry
diff --git a/tests/components/meteo_france/fixtures/raw_forecast.json b/tests/components/meteo_france/fixtures/raw_forecast.json
new file mode 100644
index 00000000000..3c0552136d2
--- /dev/null
+++ b/tests/components/meteo_france/fixtures/raw_forecast.json
@@ -0,0 +1,53 @@
+{
+ "updated_on": 1737995400,
+ "position": {
+ "country": "FR - France",
+ "dept": "74",
+ "insee": "74080",
+ "lat": 45.90417,
+ "lon": 6.42306,
+ "name": "La Clusaz",
+ "rain_product_available": 1,
+ "timezone": "Europe/Paris"
+ },
+ "daily_forecast": [
+ {
+ "T": { "max": 10.4, "min": 6.9, "sea": null },
+ "dt": 1737936000,
+ "humidity": { "max": 90, "min": 65 },
+ "precipitation": { "24h": 1.3 },
+ "sun": { "rise": 1737963392, "set": 1737996163 },
+ "uv": 1,
+ "weather12H": { "desc": "Eclaircies", "icon": "p2j" }
+ }
+ ],
+ "forecast": [
+ {
+ "T": { "value": 9.1, "windchill": 5.4 },
+ "clouds": 70,
+ "dt": 1737990000,
+ "humidity": 75,
+ "iso0": 1250,
+ "rain": { "1h": 0 },
+ "rain snow limit": "Non pertinent",
+ "sea_level": 988.7,
+ "snow": { "1h": 0 },
+ "uv": 1,
+ "weather": { "desc": "Eclaircies", "icon": "p2j" },
+ "wind": {
+ "direction": 200,
+ "gust": 18,
+ "icon": "SSO",
+ "speed": 8
+ }
+ }
+ ],
+ "probability_forecast": [
+ {
+ "dt": 1737990000,
+ "freezing": 0,
+ "rain": { "3h": null, "6h": null },
+ "snow": { "3h": null, "6h": null }
+ }
+ ]
+}
diff --git a/tests/components/meteo_france/fixtures/raw_rain.json b/tests/components/meteo_france/fixtures/raw_rain.json
new file mode 100644
index 00000000000..a9f17b8a98e
--- /dev/null
+++ b/tests/components/meteo_france/fixtures/raw_rain.json
@@ -0,0 +1,24 @@
+{
+ "position": {
+ "lat": 48.807166,
+ "lon": 2.239895,
+ "alti": 76,
+ "name": "Meudon",
+ "country": "FR - France",
+ "dept": "92",
+ "timezone": "Europe/Paris"
+ },
+ "updated_on": 1589995200,
+ "quality": 0,
+ "forecast": [
+ { "dt": 1589996100, "rain": 1, "desc": "Temps sec" },
+ { "dt": 1589996400, "rain": 1, "desc": "Temps sec" },
+ { "dt": 1589996700, "rain": 1, "desc": "Temps sec" },
+ { "dt": 1589997000, "rain": 2, "desc": "Pluie faible" },
+ { "dt": 1589997300, "rain": 3, "desc": "Pluie modérée" },
+ { "dt": 1589997600, "rain": 2, "desc": "Pluie faible" },
+ { "dt": 1589998200, "rain": 1, "desc": "Temps sec" },
+ { "dt": 1589998800, "rain": 1, "desc": "Temps sec" },
+ { "dt": 1589999400, "rain": 1, "desc": "Temps sec" }
+ ]
+}
diff --git a/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json b/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json
new file mode 100644
index 00000000000..8d84e512fb6
--- /dev/null
+++ b/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json
@@ -0,0 +1,13 @@
+{
+ "update_time": 1591279200,
+ "end_validity_time": 1591365600,
+ "domain_id": "32",
+ "phenomenons_max_colors": [
+ { "phenomenon_id": "6", "phenomenon_max_color_id": 1 },
+ { "phenomenon_id": "4", "phenomenon_max_color_id": 1 },
+ { "phenomenon_id": "5", "phenomenon_max_color_id": 3 },
+ { "phenomenon_id": "2", "phenomenon_max_color_id": 1 },
+ { "phenomenon_id": "1", "phenomenon_max_color_id": 1 },
+ { "phenomenon_id": "3", "phenomenon_max_color_id": 2 }
+ ]
+}
diff --git a/tests/components/meteo_france/snapshots/test_sensor.ambr b/tests/components/meteo_france/snapshots/test_sensor.ambr
new file mode 100644
index 00000000000..85fdec0fcea
--- /dev/null
+++ b/tests/components/meteo_france/snapshots/test_sensor.ambr
@@ -0,0 +1,764 @@
+# serializer version: 1
+# name: test_sensor[sensor.32_weather_alert-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.32_weather_alert',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:weather-cloudy-alert',
+ 'original_name': '32 Weather alert',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '32 Weather alert',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_sensor[sensor.32_weather_alert-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'Canicule': 'Vert',
+ 'Inondation': 'Vert',
+ 'Neige-verglas': 'Orange',
+ 'Orages': 'Jaune',
+ 'Pluie-inondation': 'Vert',
+ 'Vent violent': 'Vert',
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': '32 Weather alert',
+ 'icon': 'mdi:weather-cloudy-alert',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.32_weather_alert',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'Orange',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_cloud_cover-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_cloud_cover',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:weather-partly-cloudy',
+ 'original_name': 'La Clusaz Cloud cover',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_cloud',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_cloud_cover-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Cloud cover',
+ 'icon': 'mdi:weather-partly-cloudy',
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_cloud_cover',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '70',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_daily_original_condition-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_daily_original_condition',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Daily original condition',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_daily_original_condition',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_daily_original_condition-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Daily original condition',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_daily_original_condition',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'Eclaircies',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_daily_precipitation-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_daily_precipitation',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Daily precipitation',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_precipitation',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_daily_precipitation-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'precipitation',
+ 'friendly_name': 'La Clusaz Daily precipitation',
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_daily_precipitation',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '1.3',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_freeze_chance-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_freeze_chance',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:snowflake',
+ 'original_name': 'La Clusaz Freeze chance',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_freeze_chance',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_freeze_chance-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Freeze chance',
+ 'icon': 'mdi:snowflake',
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_freeze_chance',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '0',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_humidity-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_humidity',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Humidity',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_humidity',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_humidity-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'humidity',
+ 'friendly_name': 'La Clusaz Humidity',
+ 'state_class': ,
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_humidity',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '75',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_original_condition-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_original_condition',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Original condition',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_original_condition',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_original_condition-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Original condition',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_original_condition',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'Eclaircies',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_pressure-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_pressure',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Pressure',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_pressure',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_pressure-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'pressure',
+ 'friendly_name': 'La Clusaz Pressure',
+ 'state_class': ,
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_pressure',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '988.7',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_rain_chance-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_rain_chance',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:weather-rainy',
+ 'original_name': 'La Clusaz Rain chance',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_rain_chance',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_rain_chance-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Rain chance',
+ 'icon': 'mdi:weather-rainy',
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_rain_chance',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'unknown',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_snow_chance-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_snow_chance',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:weather-snowy',
+ 'original_name': 'La Clusaz Snow chance',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_snow_chance',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_snow_chance-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz Snow chance',
+ 'icon': 'mdi:weather-snowy',
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_snow_chance',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'unknown',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_temperature-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_temperature',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Temperature',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_temperature',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_temperature-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'temperature',
+ 'friendly_name': 'La Clusaz Temperature',
+ 'state_class': ,
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_temperature',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '9.1',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_uv-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_uv',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': 'mdi:sunglasses',
+ 'original_name': 'La Clusaz UV',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_uv',
+ 'unit_of_measurement': 'UV index',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_uv-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz UV',
+ 'icon': 'mdi:sunglasses',
+ 'unit_of_measurement': 'UV index',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_uv',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '1',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_wind_gust-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_wind_gust',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': 'mdi:weather-windy-variant',
+ 'original_name': 'La Clusaz Wind gust',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_wind_gust',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_wind_gust-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'wind_speed',
+ 'friendly_name': 'La Clusaz Wind gust',
+ 'icon': 'mdi:weather-windy-variant',
+ 'state_class': ,
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_wind_gust',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '65',
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_wind_speed-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.la_clusaz_wind_speed',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz Wind speed',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306_wind_speed',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_sensor[sensor.la_clusaz_wind_speed-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'wind_speed',
+ 'friendly_name': 'La Clusaz Wind speed',
+ 'state_class': ,
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.la_clusaz_wind_speed',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '29',
+ })
+# ---
+# name: test_sensor[sensor.meudon_next_rain-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.meudon_next_rain',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'Meudon Next rain',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '48.807166,2.239895_next_rain',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_sensor[sensor.meudon_next_rain-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ '1_hour_forecast': dict({
+ '0 min': 'Temps sec',
+ '10 min': 'Temps sec',
+ '15 min': 'Pluie faible',
+ '20 min': 'Pluie modérée',
+ '25 min': 'Pluie faible',
+ '35 min': 'Temps sec',
+ '45 min': 'Temps sec',
+ '5 min': 'Temps sec',
+ '55 min': 'Temps sec',
+ }),
+ 'attribution': 'Data provided by Météo-France',
+ 'device_class': 'timestamp',
+ 'forecast_time_ref': '2020-05-20T17:35:00+00:00',
+ 'friendly_name': 'Meudon Next rain',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.meudon_next_rain',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '2020-05-20T17:50:00+00:00',
+ })
+# ---
diff --git a/tests/components/meteo_france/snapshots/test_weather.ambr b/tests/components/meteo_france/snapshots/test_weather.ambr
new file mode 100644
index 00000000000..9e7d7631479
--- /dev/null
+++ b/tests/components/meteo_france/snapshots/test_weather.ambr
@@ -0,0 +1,59 @@
+# serializer version: 1
+# name: test_weather[weather.la_clusaz-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'weather',
+ 'entity_category': None,
+ 'entity_id': 'weather.la_clusaz',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'La Clusaz',
+ 'platform': 'meteo_france',
+ 'previous_unique_id': None,
+ 'supported_features': ,
+ 'translation_key': None,
+ 'unique_id': '45.90417,6.42306',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_weather[weather.la_clusaz-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'attribution': 'Data provided by Météo-France',
+ 'friendly_name': 'La Clusaz',
+ 'humidity': 75,
+ 'precipitation_unit': ,
+ 'pressure': 988.7,
+ 'pressure_unit': ,
+ 'supported_features': ,
+ 'temperature': 9.1,
+ 'temperature_unit': ,
+ 'visibility_unit': ,
+ 'wind_bearing': 200,
+ 'wind_speed': 28.8,
+ 'wind_speed_unit': ,
+ }),
+ 'context': ,
+ 'entity_id': 'weather.la_clusaz',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'partlycloudy',
+ })
+# ---
diff --git a/tests/components/meteo_france/test_sensor.py b/tests/components/meteo_france/test_sensor.py
new file mode 100644
index 00000000000..be77de0008b
--- /dev/null
+++ b/tests/components/meteo_france/test_sensor.py
@@ -0,0 +1,32 @@
+"""Test Météo France weather entity."""
+
+from collections.abc import Generator
+from unittest.mock import patch
+
+import pytest
+from syrupy.assertion import SnapshotAssertion
+
+from homeassistant.const import Platform
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers import entity_registry as er
+
+from tests.common import MockConfigEntry, snapshot_platform
+
+
+@pytest.fixture(autouse=True)
+def override_platforms() -> Generator[None]:
+ """Override PLATFORMS."""
+ with patch("homeassistant.components.meteo_france.PLATFORMS", [Platform.SENSOR]):
+ yield
+
+
+@pytest.mark.usefixtures("entity_registry_enabled_by_default")
+async def test_sensor(
+ hass: HomeAssistant,
+ config_entry: MockConfigEntry,
+ entity_registry: er.EntityRegistry,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test the sensor entity."""
+ await hass.config_entries.async_setup(config_entry.entry_id)
+ await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
diff --git a/tests/components/meteo_france/test_weather.py b/tests/components/meteo_france/test_weather.py
new file mode 100644
index 00000000000..cd55ac31b27
--- /dev/null
+++ b/tests/components/meteo_france/test_weather.py
@@ -0,0 +1,31 @@
+"""Test Météo France weather entity."""
+
+from collections.abc import Generator
+from unittest.mock import patch
+
+import pytest
+from syrupy.assertion import SnapshotAssertion
+
+from homeassistant.const import Platform
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers import entity_registry as er
+
+from tests.common import MockConfigEntry, snapshot_platform
+
+
+@pytest.fixture(autouse=True)
+def override_platforms() -> Generator[None]:
+ """Override PLATFORMS."""
+ with patch("homeassistant.components.meteo_france.PLATFORMS", [Platform.WEATHER]):
+ yield
+
+
+async def test_weather(
+ hass: HomeAssistant,
+ config_entry: MockConfigEntry,
+ entity_registry: er.EntityRegistry,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test the weather entity."""
+ await hass.config_entries.async_setup(config_entry.entry_id)
+ await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
diff --git a/tests/components/motionmount/test_sensor.py b/tests/components/motionmount/test_sensor.py
new file mode 100644
index 00000000000..bb68c67ce62
--- /dev/null
+++ b/tests/components/motionmount/test_sensor.py
@@ -0,0 +1,48 @@
+"""Tests for the MotionMount Sensor platform."""
+
+from unittest.mock import patch
+
+from motionmount import MotionMountSystemError
+import pytest
+
+from homeassistant.core import HomeAssistant
+
+from . import ZEROCONF_NAME
+
+from tests.common import MockConfigEntry
+
+MAC = bytes.fromhex("c4dd57f8a55f")
+
+
+@pytest.mark.parametrize(
+ ("system_status", "state"),
+ [
+ (None, "none"),
+ (MotionMountSystemError.MotorError, "motor"),
+ (MotionMountSystemError.ObstructionDetected, "obstruction"),
+ (MotionMountSystemError.TVWidthConstraintError, "tv_width_constraint"),
+ (MotionMountSystemError.HDMICECError, "hdmi_cec"),
+ (MotionMountSystemError.InternalError, "internal"),
+ ],
+)
+async def test_error_status_sensor_states(
+ hass: HomeAssistant,
+ mock_config_entry: MockConfigEntry,
+ system_status: MotionMountSystemError,
+ state: str,
+) -> None:
+ """Tests the state attributes."""
+ with patch(
+ "homeassistant.components.motionmount.motionmount.MotionMount",
+ autospec=True,
+ ) as motionmount_mock:
+ motionmount_mock.return_value.name = ZEROCONF_NAME
+ motionmount_mock.return_value.mac = MAC
+ motionmount_mock.return_value.is_authenticated = True
+ motionmount_mock.return_value.system_status = [system_status]
+
+ mock_config_entry.add_to_hass(hass)
+
+ assert await hass.config_entries.async_setup(mock_config_entry.entry_id)
+
+ assert hass.states.get("sensor.my_motionmount_error_status").state == state
diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py
index 2a1e4012f51..87bbcecebe5 100644
--- a/tests/components/mqtt/conftest.py
+++ b/tests/components/mqtt/conftest.py
@@ -38,7 +38,7 @@ def temp_dir_prefix() -> str:
return "test"
-@pytest.fixture
+@pytest.fixture(autouse=True)
def mock_temp_dir(temp_dir_prefix: str) -> Generator[str]:
"""Mock the certificate temp directory."""
with patch(
diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py
index ad64b39a480..2faa9310548 100644
--- a/tests/components/mqtt/test_client.py
+++ b/tests/components/mqtt/test_client.py
@@ -2082,7 +2082,7 @@ async def test_server_sock_buffer_size_with_websocket(
client.setblocking(False)
server.setblocking(False)
- class FakeWebsocket(paho_mqtt.WebsocketWrapper):
+ class FakeWebsocket(paho_mqtt._WebsocketWrapper):
def _do_handshake(self, *args, **kwargs):
pass
diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py
index 512e4091438..7ddd04a09a6 100644
--- a/tests/components/mqtt/test_light_json.py
+++ b/tests/components/mqtt/test_light_json.py
@@ -100,7 +100,6 @@ from homeassistant.const import (
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant, State
-from homeassistant.helpers.json import json_dumps
from homeassistant.util.json import json_loads
from .test_common import (
@@ -195,172 +194,6 @@ async def test_fail_setup_if_no_command_topic(
assert "required key not provided" in caplog.text
-@pytest.mark.parametrize(
- "hass_config",
- [
- help_custom_config(light.DOMAIN, COLOR_MODES_CONFIG, ({"color_temp": True},)),
- help_custom_config(light.DOMAIN, COLOR_MODES_CONFIG, ({"hs": True},)),
- help_custom_config(light.DOMAIN, COLOR_MODES_CONFIG, ({"rgb": True},)),
- help_custom_config(light.DOMAIN, COLOR_MODES_CONFIG, ({"xy": True},)),
- ],
-)
-async def test_fail_setup_if_color_mode_deprecated(
- mqtt_mock_entry: MqttMockHAClientGenerator,
- caplog: pytest.LogCaptureFixture,
-) -> None:
- """Test if setup fails if color mode is combined with deprecated config keys."""
- assert await mqtt_mock_entry()
- assert "supported_color_modes must not be combined with any of" in caplog.text
-
-
-@pytest.mark.parametrize(
- ("hass_config", "color_modes"),
- [
- (
- help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"color_temp": True},)),
- ("color_temp",),
- ),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"hs": True},)), ("hs",)),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"rgb": True},)), ("rgb",)),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"xy": True},)), ("xy",)),
- (
- help_custom_config(
- light.DOMAIN, DEFAULT_CONFIG, ({"color_temp": True, "rgb": True},)
- ),
- ("color_temp, rgb", "rgb, color_temp"),
- ),
- ],
- ids=["color_temp", "hs", "rgb", "xy", "color_temp, rgb"],
-)
-async def test_warning_if_color_mode_flags_are_used(
- mqtt_mock_entry: MqttMockHAClientGenerator,
- caplog: pytest.LogCaptureFixture,
- color_modes: tuple[str, ...],
-) -> None:
- """Test warnings deprecated config keys without supported color modes defined."""
- with patch(
- "homeassistant.components.mqtt.light.schema_json.async_create_issue"
- ) as mock_async_create_issue:
- assert await mqtt_mock_entry()
- assert any(
- (
- f"Deprecated flags [{color_modes_case}] used in MQTT JSON light config "
- "for handling color mode, please use `supported_color_modes` instead."
- in caplog.text
- )
- for color_modes_case in color_modes
- )
- mock_async_create_issue.assert_called_once()
-
-
-@pytest.mark.parametrize(
- ("config", "color_modes"),
- [
- (
- help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"color_temp": True},)),
- ("color_temp",),
- ),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"hs": True},)), ("hs",)),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"rgb": True},)), ("rgb",)),
- (help_custom_config(light.DOMAIN, DEFAULT_CONFIG, ({"xy": True},)), ("xy",)),
- (
- help_custom_config(
- light.DOMAIN, DEFAULT_CONFIG, ({"color_temp": True, "rgb": True},)
- ),
- ("color_temp, rgb", "rgb, color_temp"),
- ),
- ],
- ids=["color_temp", "hs", "rgb", "xy", "color_temp, rgb"],
-)
-async def test_warning_on_discovery_if_color_mode_flags_are_used(
- hass: HomeAssistant,
- mqtt_mock_entry: MqttMockHAClientGenerator,
- caplog: pytest.LogCaptureFixture,
- config: dict[str, Any],
- color_modes: tuple[str, ...],
-) -> None:
- """Test warnings deprecated config keys with discovery."""
- with patch(
- "homeassistant.components.mqtt.light.schema_json.async_create_issue"
- ) as mock_async_create_issue:
- assert await mqtt_mock_entry()
-
- config_payload = json_dumps(config[mqtt.DOMAIN][light.DOMAIN][0])
- async_fire_mqtt_message(
- hass,
- "homeassistant/light/bla/config",
- config_payload,
- )
- await hass.async_block_till_done()
- assert any(
- (
- f"Deprecated flags [{color_modes_case}] used in MQTT JSON light config "
- "for handling color mode, please "
- "use `supported_color_modes` instead" in caplog.text
- )
- for color_modes_case in color_modes
- )
- mock_async_create_issue.assert_not_called()
-
-
-@pytest.mark.parametrize(
- "hass_config",
- [
- help_custom_config(
- light.DOMAIN,
- DEFAULT_CONFIG,
- ({"color_mode": True, "supported_color_modes": ["color_temp"]},),
- ),
- ],
- ids=["color_temp"],
-)
-async def test_warning_if_color_mode_option_flag_is_used(
- mqtt_mock_entry: MqttMockHAClientGenerator,
- caplog: pytest.LogCaptureFixture,
-) -> None:
- """Test warning deprecated color_mode option flag is used."""
- with patch(
- "homeassistant.components.mqtt.light.schema_json.async_create_issue"
- ) as mock_async_create_issue:
- assert await mqtt_mock_entry()
- assert "Deprecated flag `color_mode` used in MQTT JSON light config" in caplog.text
- mock_async_create_issue.assert_called_once()
-
-
-@pytest.mark.parametrize(
- "config",
- [
- help_custom_config(
- light.DOMAIN,
- DEFAULT_CONFIG,
- ({"color_mode": True, "supported_color_modes": ["color_temp"]},),
- ),
- ],
- ids=["color_temp"],
-)
-async def test_warning_on_discovery_if_color_mode_option_flag_is_used(
- hass: HomeAssistant,
- mqtt_mock_entry: MqttMockHAClientGenerator,
- caplog: pytest.LogCaptureFixture,
- config: dict[str, Any],
-) -> None:
- """Test warning deprecated color_mode option flag is used."""
- with patch(
- "homeassistant.components.mqtt.light.schema_json.async_create_issue"
- ) as mock_async_create_issue:
- assert await mqtt_mock_entry()
-
- config_payload = json_dumps(config[mqtt.DOMAIN][light.DOMAIN][0])
- async_fire_mqtt_message(
- hass,
- "homeassistant/light/bla/config",
- config_payload,
- )
- await hass.async_block_till_done()
- assert "Deprecated flag `color_mode` used in MQTT JSON light config" in caplog.text
- mock_async_create_issue.assert_not_called()
-
-
@pytest.mark.parametrize(
("hass_config", "error"),
[
@@ -400,82 +233,6 @@ async def test_fail_setup_if_color_modes_invalid(
assert error in caplog.text
-@pytest.mark.parametrize(
- ("hass_config", "kelvin", "color_temp_payload_value"),
- [
- (
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "command_topic": "test_light/set",
- "state_topic": "test_light",
- "color_mode": True,
- "color_temp_kelvin": False,
- "supported_color_modes": "color_temp",
- }
- }
- },
- 5208,
- 192,
- ),
- (
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "command_topic": "test_light/set",
- "state_topic": "test_light",
- "color_mode": True,
- "color_temp_kelvin": True,
- "supported_color_modes": "color_temp",
- }
- }
- },
- 5208,
- 5208,
- ),
- ],
- ids=["mireds", "kelvin"],
-)
-async def test_single_color_mode(
- hass: HomeAssistant,
- mqtt_mock_entry: MqttMockHAClientGenerator,
- kelvin: int,
- color_temp_payload_value: int,
-) -> None:
- """Test setup with single color_mode."""
- await mqtt_mock_entry()
- state = hass.states.get("light.test")
- assert state.state == STATE_UNKNOWN
-
- await common.async_turn_on(
- hass, "light.test", brightness=50, color_temp_kelvin=kelvin
- )
-
- payload = {
- "state": "ON",
- "brightness": 50,
- "color_mode": "color_temp",
- "color_temp": color_temp_payload_value,
- }
- async_fire_mqtt_message(
- hass,
- "test_light",
- json_dumps(payload),
- )
- color_modes = [light.ColorMode.COLOR_TEMP]
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
-
- assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes
- assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208
- assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50
- assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0]
-
-
@pytest.mark.parametrize("hass_config", [COLOR_MODES_CONFIG])
async def test_turn_on_with_unknown_color_mode_optimistic(
hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
@@ -550,34 +307,6 @@ async def test_controlling_state_with_unknown_color_mode(
assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.COLOR_TEMP
-@pytest.mark.parametrize(
- "hass_config",
- [
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "command_topic": "test_light_rgb/set",
- "rgb": True,
- }
- }
- }
- ],
-)
-async def test_legacy_rgb_light(
- hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
-) -> None:
- """Test legacy RGB light flags expected features and color modes."""
- await mqtt_mock_entry()
-
- state = hass.states.get("light.test")
- color_modes = [light.ColorMode.HS]
- assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes
- expected_features = light.SUPPORT_FLASH | light.SUPPORT_TRANSITION
- assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features
-
-
@pytest.mark.parametrize(
"hass_config",
[
@@ -642,203 +371,9 @@ async def test_no_color_brightness_color_temp_if_no_topics(
"name": "test",
"state_topic": "test_light_rgb",
"command_topic": "test_light_rgb/set",
- "brightness": True,
- "color_temp": True,
- "effect": True,
- "rgb": True,
- "xy": True,
- "hs": True,
- "qos": "0",
- }
- }
- }
- ],
-)
-async def test_controlling_state_via_topic(
- hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
-) -> None:
- """Test the controlling of the state via topic."""
- await mqtt_mock_entry()
-
- state = hass.states.get("light.test")
- assert state.state == STATE_UNKNOWN
- color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS]
- assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes
- expected_features = (
- light.SUPPORT_EFFECT | light.SUPPORT_FLASH | light.SUPPORT_TRANSITION
- )
- assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features
- assert state.attributes.get("rgb_color") is None
- assert state.attributes.get("brightness") is None
- assert state.attributes.get("color_temp_kelvin") is None
- assert state.attributes.get("effect") is None
- assert state.attributes.get("xy_color") is None
- assert state.attributes.get("hs_color") is None
- assert not state.attributes.get(ATTR_ASSUMED_STATE)
-
- # Turn on the light
- async_fire_mqtt_message(
- hass,
- "test_light_rgb",
- '{"state":"ON",'
- '"color":{"r":255,"g":255,"b":255},'
- '"brightness":255,'
- '"color_temp":155,'
- '"effect":"colorloop"}',
- )
-
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
- assert state.attributes.get("brightness") == 255
- assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority
- assert state.attributes.get("effect") == "colorloop"
- assert state.attributes.get("xy_color") == (0.323, 0.329)
- assert state.attributes.get("hs_color") == (0.0, 0.0)
-
- # Turn on the light
- async_fire_mqtt_message(
- hass,
- "test_light_rgb",
- '{"state":"ON",'
- '"brightness":255,'
- '"color":null,'
- '"color_temp":155,'
- '"effect":"colorloop"}',
- )
-
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (
- 255,
- 253,
- 249,
- ) # temp converted to color
- assert state.attributes.get("brightness") == 255
- assert state.attributes.get("color_temp_kelvin") == 6451
- assert state.attributes.get("effect") == "colorloop"
- assert state.attributes.get("xy_color") == (0.328, 0.333) # temp converted to color
- assert state.attributes.get("hs_color") == (44.098, 2.43) # temp converted to color
-
- # Turn the light off
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"OFF"}')
-
- state = hass.states.get("light.test")
- assert state.state == STATE_OFF
-
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "brightness":100}')
-
- light_state = hass.states.get("light.test")
-
- assert light_state.attributes["brightness"] == 100
-
- async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "color":{"r":125,"g":125,"b":125}}'
- )
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("rgb_color") == (255, 255, 255)
-
- async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "color":{"x":0.135,"y":0.135}}'
- )
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("xy_color") == (0.141, 0.141)
-
- async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "color":{"h":180,"s":50}}'
- )
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("hs_color") == (180.0, 50.0)
-
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color":null}')
-
- light_state = hass.states.get("light.test")
- assert "hs_color" in light_state.attributes # Color temp approximation
-
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":155}')
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("color_temp_kelvin") == 6451 # 155 mired
-
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":null}')
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("color_temp_kelvin") is None
-
- async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "effect":"colorloop"}'
- )
-
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("effect") == "colorloop"
-
- async_fire_mqtt_message(
- hass,
- "test_light_rgb",
- '{"state":"ON",'
- '"color":{"r":255,"g":255,"b":255},'
- '"brightness":128,'
- '"color_temp":155,'
- '"effect":"colorloop"}',
- )
- light_state = hass.states.get("light.test")
- assert light_state.state == STATE_ON
- assert light_state.attributes.get("brightness") == 128
-
- async_fire_mqtt_message(
- hass,
- "test_light_rgb",
- '{"state":"OFF","brightness":0}',
- )
- light_state = hass.states.get("light.test")
- assert light_state.state == STATE_OFF
- assert light_state.attributes.get("brightness") is None
-
- # Simulate the lights color temp has been changed
- # while it was switched off
- async_fire_mqtt_message(
- hass,
- "test_light_rgb",
- '{"state":"OFF","color_temp":201}',
- )
- light_state = hass.states.get("light.test")
- assert light_state.state == STATE_OFF
- # Color temp attribute is not exposed while the lamp is off
- assert light_state.attributes.get("color_temp_kelvin") is None
-
- # test previous zero brightness received was ignored and brightness is restored
- # see if the latest color_temp value received is restored
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON"}')
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("brightness") == 128
- assert light_state.attributes.get("color_temp_kelvin") == 4975 # 201 mired
-
- # A `0` brightness value is ignored when a light is turned on
- async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON","brightness":0}')
- light_state = hass.states.get("light.test")
- assert light_state.attributes.get("brightness") == 128
-
-
-@pytest.mark.parametrize(
- "hass_config",
- [
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "state_topic": "test_light_rgb",
- "command_topic": "test_light_rgb/set",
- "brightness": True,
- "color_temp": True,
"color_temp_kelvin": True,
"effect": True,
- "rgb": True,
- "xy": True,
- "hs": True,
+ "supported_color_modes": ["color_temp", "hs"],
"qos": "0",
}
}
@@ -856,9 +391,11 @@ async def test_controlling_state_color_temp_kelvin(
color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS]
assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes
expected_features = (
- light.SUPPORT_EFFECT | light.SUPPORT_FLASH | light.SUPPORT_TRANSITION
+ light.LightEntityFeature.EFFECT
+ | light.LightEntityFeature.FLASH
+ | light.LightEntityFeature.TRANSITION
)
- assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features
+ assert state.attributes.get(ATTR_SUPPORTED_FEATURES) is expected_features
assert state.attributes.get("rgb_color") is None
assert state.attributes.get("brightness") is None
assert state.attributes.get("color_temp_kelvin") is None
@@ -872,7 +409,8 @@ async def test_controlling_state_color_temp_kelvin(
hass,
"test_light_rgb",
'{"state":"ON",'
- '"color":{"r":255,"g":255,"b":255},'
+ '"color":{"h": 44.098, "s": 2.43},'
+ '"color_mode": "hs",'
'"brightness":255,'
'"color_temp":155,'
'"effect":"colorloop"}',
@@ -880,12 +418,12 @@ async def test_controlling_state_color_temp_kelvin(
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ assert state.attributes.get("rgb_color") == (255, 253, 249)
assert state.attributes.get("brightness") == 255
assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority
assert state.attributes.get("effect") == "colorloop"
- assert state.attributes.get("xy_color") == (0.323, 0.329)
- assert state.attributes.get("hs_color") == (0.0, 0.0)
+ assert state.attributes.get("xy_color") == (0.328, 0.333)
+ assert state.attributes.get("hs_color") == (44.098, 2.43)
# Turn on the light
async_fire_mqtt_message(
@@ -894,6 +432,7 @@ async def test_controlling_state_color_temp_kelvin(
'{"state":"ON",'
'"brightness":255,'
'"color":null,'
+ '"color_mode":"color_temp",'
'"color_temp":6451,' # Kelvin
'"effect":"colorloop"}',
)
@@ -920,7 +459,7 @@ async def test_controlling_state_color_temp_kelvin(
)
],
)
-async def test_controlling_state_via_topic2(
+async def test_controlling_state_via_topic(
hass: HomeAssistant,
mqtt_mock_entry: MqttMockHAClientGenerator,
caplog: pytest.LogCaptureFixture,
@@ -981,6 +520,11 @@ async def test_controlling_state_via_topic2(
state = hass.states.get("light.test")
assert state.attributes["brightness"] == 100
+ # Zero brightness value is ignored
+ async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "brightness":0}')
+ state = hass.states.get("light.test")
+ assert state.attributes["brightness"] == 100
+
# RGB color
async_fire_mqtt_message(
hass,
@@ -1083,242 +627,6 @@ async def test_controlling_state_via_topic2(
{
mqtt.DOMAIN: {
light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "command_topic": "test_light_rgb/set",
- "state_topic": "test_light_rgb/set",
- "rgb": True,
- "color_temp": True,
- "brightness": True,
- }
- }
- }
- ],
-)
-async def test_controlling_the_state_with_legacy_color_handling(
- hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
-) -> None:
- """Test state updates for lights with a legacy color handling."""
- supported_color_modes = ["color_temp", "hs"]
- await mqtt_mock_entry()
-
- state = hass.states.get("light.test")
- assert state.state == STATE_UNKNOWN
- expected_features = light.SUPPORT_FLASH | light.SUPPORT_TRANSITION
- assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features
- assert state.attributes.get("brightness") is None
- assert state.attributes.get("color_mode") is None
- assert state.attributes.get("color_temp_kelvin") is None
- assert state.attributes.get("effect") is None
- assert state.attributes.get("hs_color") is None
- assert state.attributes.get("rgb_color") is None
- assert state.attributes.get("rgbw_color") is None
- assert state.attributes.get("rgbww_color") is None
- assert state.attributes.get("supported_color_modes") == supported_color_modes
- assert state.attributes.get("xy_color") is None
- assert not state.attributes.get(ATTR_ASSUMED_STATE)
-
- for _ in range(2):
- # Returned state after the light was turned on
- # Receiving legacy color mode: rgb.
- async_fire_mqtt_message(
- hass,
- "test_light_rgb/set",
- '{ "state": "ON", "brightness": 255, "level": 100, "hue": 16,'
- '"saturation": 100, "color": { "r": 255, "g": 67, "b": 0 }, '
- '"bulb_mode": "color", "color_mode": "rgb" }',
- )
-
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("brightness") == 255
- assert state.attributes.get("color_mode") == "hs"
- assert state.attributes.get("color_temp_kelvin") is None
- assert state.attributes.get("effect") is None
- assert state.attributes.get("hs_color") == (15.765, 100.0)
- assert state.attributes.get("rgb_color") == (255, 67, 0)
- assert state.attributes.get("rgbw_color") is None
- assert state.attributes.get("rgbww_color") is None
- assert state.attributes.get("xy_color") == (0.674, 0.322)
-
- # Returned state after the lights color mode was changed
- # Receiving legacy color mode: color_temp
- async_fire_mqtt_message(
- hass,
- "test_light_rgb/set",
- '{ "state": "ON", "brightness": 255, "level": 100, '
- '"kelvin": 92, "color_temp": 353, "bulb_mode": "white", '
- '"color_mode": "color_temp" }',
- )
-
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("brightness") == 255
- assert state.attributes.get("color_mode") == "color_temp"
- assert state.attributes.get("color_temp_kelvin") == 2832
- assert state.attributes.get("effect") is None
- assert state.attributes.get("hs_color") == (28.125, 61.661)
- assert state.attributes.get("rgb_color") == (255, 171, 98)
- assert state.attributes.get("rgbw_color") is None
- assert state.attributes.get("rgbww_color") is None
- assert state.attributes.get("xy_color") == (0.512, 0.385)
-
-
-@pytest.mark.parametrize(
- "hass_config",
- [
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "schema": "json",
- "name": "test",
- "command_topic": "test_light_rgb/set",
- "brightness": True,
- "color_temp": True,
- "effect": True,
- "hs": True,
- "rgb": True,
- "xy": True,
- "qos": 2,
- }
- }
- }
- ],
-)
-async def test_sending_mqtt_commands_and_optimistic(
- hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
-) -> None:
- """Test the sending of command in optimistic mode."""
- fake_state = State(
- "light.test",
- "on",
- {
- "brightness": 95,
- "hs_color": [100, 100],
- "effect": "random",
- "color_temp_kelvin": 10000,
- },
- )
- mock_restore_cache(hass, (fake_state,))
-
- mqtt_mock = await mqtt_mock_entry()
-
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("brightness") == 95
- assert state.attributes.get("hs_color") == (100, 100)
- assert state.attributes.get("effect") == "random"
- assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority
- color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS]
- assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes
- expected_features = (
- light.SUPPORT_EFFECT | light.SUPPORT_FLASH | light.SUPPORT_TRANSITION
- )
- assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features
- assert state.attributes.get(ATTR_ASSUMED_STATE)
-
- await common.async_turn_on(hass, "light.test")
-
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set", '{"state":"ON"}', 2, False
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
-
- await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111)
-
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set",
- JsonValidator('{"state": "ON", "color_temp": 90}'),
- 2,
- False,
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP
- assert state.attributes.get("color_temp_kelvin") == 11111
-
- await common.async_turn_off(hass, "light.test")
-
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set", '{"state":"OFF"}', 2, False
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.state == STATE_OFF
-
- mqtt_mock.reset_mock()
- await common.async_turn_on(
- hass, "light.test", brightness=50, xy_color=(0.123, 0.123)
- )
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set",
- JsonValidator(
- '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255,'
- ' "x": 0.14, "y": 0.133, "h": 210.824, "s": 100.0},'
- ' "brightness": 50}'
- ),
- 2,
- False,
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.attributes.get("color_mode") == light.ColorMode.HS
- assert state.attributes["brightness"] == 50
- assert state.attributes["hs_color"] == (210.824, 100.0)
- assert state.attributes["rgb_color"] == (0, 124, 255)
- assert state.attributes["xy_color"] == (0.14, 0.133)
-
- await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78))
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set",
- JsonValidator(
- '{"state": "ON", "color": {"r": 255, "g": 56, "b": 59,'
- ' "x": 0.654, "y": 0.301, "h": 359.0, "s": 78.0},'
- ' "brightness": 50}'
- ),
- 2,
- False,
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("color_mode") == light.ColorMode.HS
- assert state.attributes["brightness"] == 50
- assert state.attributes["hs_color"] == (359.0, 78.0)
- assert state.attributes["rgb_color"] == (255, 56, 59)
- assert state.attributes["xy_color"] == (0.654, 0.301)
-
- await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0))
- mqtt_mock.async_publish.assert_called_once_with(
- "test_light_rgb/set",
- JsonValidator(
- '{"state": "ON", "color": {"r": 255, "g": 128, "b": 0,'
- ' "x": 0.611, "y": 0.375, "h": 30.118, "s": 100.0}}'
- ),
- 2,
- False,
- )
- mqtt_mock.async_publish.reset_mock()
- state = hass.states.get("light.test")
- assert state.state == STATE_ON
- assert state.attributes.get("color_mode") == light.ColorMode.HS
- assert state.attributes["brightness"] == 50
- assert state.attributes["hs_color"] == (30.118, 100)
- assert state.attributes["rgb_color"] == (255, 128, 0)
- assert state.attributes["xy_color"] == (0.611, 0.375)
-
-
-@pytest.mark.parametrize(
- "hass_config",
- [
- {
- mqtt.DOMAIN: {
- light.DOMAIN: {
- "brightness": True,
- "color_mode": True,
"command_topic": "test_light_rgb/set",
"effect": True,
"name": "test",
@@ -1338,7 +646,7 @@ async def test_sending_mqtt_commands_and_optimistic(
}
],
)
-async def test_sending_mqtt_commands_and_optimistic2(
+async def test_sending_mqtt_commands_and_optimistic(
hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
) -> None:
"""Test the sending of command in optimistic mode for a light supporting color mode."""
@@ -1560,8 +868,7 @@ async def test_sending_mqtt_commands_and_optimistic2(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
- "brightness": True,
- "hs": True,
+ "supported_color_modes": ["hs"],
}
}
}
@@ -1623,7 +930,7 @@ async def test_sending_hs_color(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
- "rgb": True,
+ "supported_color_modes": ["rgb"],
}
}
}
@@ -1678,7 +985,6 @@ async def test_sending_rgb_color_no_brightness(
{
mqtt.DOMAIN: {
light.DOMAIN: {
- "color_mode": True,
"command_topic": "test_light_rgb/set",
"name": "test",
"schema": "json",
@@ -1761,8 +1067,8 @@ async def test_sending_rgb_color_no_brightness2(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
+ "supported_color_modes": ["rgb"],
"brightness": True,
- "rgb": True,
}
}
}
@@ -1829,9 +1135,9 @@ async def test_sending_rgb_color_with_brightness(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
+ "supported_color_modes": ["rgb"],
"brightness": True,
"brightness_scale": 100,
- "rgb": True,
}
}
}
@@ -1899,9 +1205,7 @@ async def test_sending_rgb_color_with_scaled_brightness(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
- "brightness": True,
"brightness_scale": 100,
- "color_mode": True,
"supported_color_modes": ["hs", "white"],
"white_scale": 50,
}
@@ -1946,8 +1250,7 @@ async def test_sending_scaled_white(
"schema": "json",
"name": "test",
"command_topic": "test_light_rgb/set",
- "brightness": True,
- "xy": True,
+ "supported_color_modes": ["xy"],
}
}
}
@@ -1973,7 +1276,7 @@ async def test_sending_xy_color(
call(
"test_light_rgb/set",
JsonValidator(
- '{"state": "ON", "color": {"x": 0.14, "y": 0.133},'
+ '{"state": "ON", "color": {"x": 0.123, "y": 0.123},'
' "brightness": 50}'
),
0,
@@ -2190,7 +1493,7 @@ async def test_transition(
"name": "test",
"state_topic": "test_light_bright_scale",
"command_topic": "test_light_bright_scale/set",
- "brightness": True,
+ "supported_color_modes": ["brightness"],
"brightness_scale": 99,
}
}
@@ -2255,7 +1558,6 @@ async def test_brightness_scale(
"command_topic": "test_light_bright_scale/set",
"brightness": True,
"brightness_scale": 99,
- "color_mode": True,
"supported_color_modes": ["hs", "white"],
"white_scale": 50,
}
@@ -2315,8 +1617,7 @@ async def test_white_scale(
"state_topic": "test_light_rgb",
"command_topic": "test_light_rgb/set",
"brightness": True,
- "color_temp": True,
- "rgb": True,
+ "supported_color_modes": ["hs", "color_temp"],
"qos": "0",
}
}
@@ -2349,62 +1650,64 @@ async def test_invalid_values(
'{"state":"ON",'
'"color":{"r":255,"g":255,"b":255},'
'"brightness": 255,'
+ '"color_mode": "color_temp",'
'"color_temp": 100,'
'"effect": "rainbow"}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ # Color converttrd from color_temp to rgb
+ assert state.attributes.get("rgb_color") == (202, 218, 255)
assert state.attributes.get("brightness") == 255
- assert state.attributes.get("color_temp_kelvin") is None
+ assert state.attributes.get("color_temp_kelvin") == 10000
# Empty color value
async_fire_mqtt_message(
hass,
"test_light_rgb",
- '{"state":"ON", "color":{}}',
+ '{"state":"ON", "color":{}, "color_mode": "rgb"}',
)
# Color should not have changed
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ assert state.attributes.get("rgb_color") == (202, 218, 255)
# Bad HS color values
async_fire_mqtt_message(
hass,
"test_light_rgb",
- '{"state":"ON", "color":{"h":"bad","s":"val"}}',
+ '{"state":"ON", "color":{"h":"bad","s":"val"}, "color_mode": "hs"}',
)
# Color should not have changed
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ assert state.attributes.get("rgb_color") == (202, 218, 255)
# Bad RGB color values
async_fire_mqtt_message(
hass,
"test_light_rgb",
- '{"state":"ON", "color":{"r":"bad","g":"val","b":"test"}}',
+ '{"state":"ON", "color":{"r":"bad","g":"val","b":"test"}, "color_mode": "rgb"}',
)
# Color should not have changed
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ assert state.attributes.get("rgb_color") == (202, 218, 255)
# Bad XY color values
async_fire_mqtt_message(
hass,
"test_light_rgb",
- '{"state":"ON", "color":{"x":"bad","y":"val"}}',
+ '{"state":"ON", "color":{"x":"bad","y":"val"}, "color_mode": "xy"}',
)
# Color should not have changed
state = hass.states.get("light.test")
assert state.state == STATE_ON
- assert state.attributes.get("rgb_color") == (255, 255, 255)
+ assert state.attributes.get("rgb_color") == (202, 218, 255)
# Bad brightness values
async_fire_mqtt_message(
@@ -2418,7 +1721,9 @@ async def test_invalid_values(
# Unset color and set a valid color temperature
async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "color": null, "color_temp": 100}'
+ hass,
+ "test_light_rgb",
+ '{"state":"ON", "color": null, "color_temp": 100, "color_mode": "color_temp"}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
@@ -2426,11 +1731,14 @@ async def test_invalid_values(
# Bad color temperature
async_fire_mqtt_message(
- hass, "test_light_rgb", '{"state":"ON", "color_temp": "badValue"}'
+ hass,
+ "test_light_rgb",
+ '{"state":"ON", "color_temp": "badValue", "color_mode": "color_temp"}',
)
assert (
- "Invalid color temp value 'badValue' received for entity light.test"
- in caplog.text
+ "Invalid or incomplete color value '{'state': 'ON', 'color_temp': "
+ "'badValue', 'color_mode': 'color_temp'}' "
+ "received for entity light.test" in caplog.text
)
# Color temperature should not have changed
@@ -2927,7 +2235,6 @@ async def test_setup_manual_entity_from_yaml(
DEFAULT_CONFIG,
(
{
- "color_mode": True,
"effect": True,
"supported_color_modes": [
"color_temp",
diff --git a/tests/components/mqtt/test_util.py b/tests/components/mqtt/test_util.py
index dd72902056d..f751096bca2 100644
--- a/tests/components/mqtt/test_util.py
+++ b/tests/components/mqtt/test_util.py
@@ -4,7 +4,6 @@ import asyncio
from collections.abc import Callable
from datetime import timedelta
from pathlib import Path
-from random import getrandbits
import shutil
import tempfile
from unittest.mock import MagicMock, patch
@@ -53,7 +52,7 @@ async def test_canceling_debouncer_on_shutdown(
assert not mock_debouncer.is_set()
mqtt_client_mock.subscribe.assert_not_called()
- # Note thet the broker connection will not be disconnected gracefully
+ # Note that the broker connection will not be disconnected gracefully
await hass.async_block_till_done()
async_fire_time_changed(hass, utcnow() + timedelta(seconds=5))
await asyncio.sleep(0)
@@ -199,7 +198,6 @@ async def test_reading_non_exitisting_certificate_file() -> None:
)
-@pytest.mark.parametrize("temp_dir_prefix", "unknown")
async def test_return_default_get_file_path(
hass: HomeAssistant, mock_temp_dir: str
) -> None:
@@ -211,12 +209,8 @@ async def test_return_default_get_file_path(
and mqtt.util.get_file_path("some_option", "mydefault") == "mydefault"
)
- with patch(
- "homeassistant.components.mqtt.util.TEMP_DIR_NAME",
- f"home-assistant-mqtt-other-{getrandbits(10):03x}",
- ) as temp_dir_name:
- tempdir = Path(tempfile.gettempdir()) / temp_dir_name
- assert await hass.async_add_executor_job(_get_file_path, tempdir)
+ temp_dir = Path(tempfile.gettempdir()) / mock_temp_dir
+ assert await hass.async_add_executor_job(_get_file_path, temp_dir)
async def test_waiting_for_client_not_loaded(
diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py
index 202f7385697..b8e299f5e77 100644
--- a/tests/components/ollama/test_conversation.py
+++ b/tests/components/ollama/test_conversation.py
@@ -18,6 +18,13 @@ from homeassistant.helpers import intent, llm
from tests.common import MockConfigEntry
+@pytest.fixture(autouse=True)
+def mock_ulid_tools():
+ """Mock generated ULIDs for tool calls."""
+ with patch("homeassistant.helpers.llm.ulid_now", return_value="mock-tool-call"):
+ yield
+
+
@pytest.mark.parametrize("agent_id", [None, "conversation.mock_title"])
async def test_chat(
hass: HomeAssistant,
@@ -205,6 +212,7 @@ async def test_function_call(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="mock-tool-call",
tool_name="test_tool",
tool_args=expected_tool_args,
),
@@ -285,6 +293,7 @@ async def test_function_exception(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="mock-tool-call",
tool_name="test_tool",
tool_args={"param1": "test_value"},
),
diff --git a/tests/components/onboarding/snapshots/test_views.ambr b/tests/components/onboarding/snapshots/test_views.ambr
index b57c6cf96dd..2d084bd9ade 100644
--- a/tests/components/onboarding/snapshots/test_views.ambr
+++ b/tests/components/onboarding/snapshots/test_views.ambr
@@ -19,6 +19,10 @@
'backup_id': 'abc123',
'database_included': True,
'date': '1970-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'abc123',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
@@ -42,6 +46,10 @@
'backup_id': 'def456',
'database_included': False,
'date': '1980-01-01T00:00:00.000Z',
+ 'extra_metadata': dict({
+ 'instance_id': 'unknown_uuid',
+ 'with_automatic_settings': True,
+ }),
'failed_agent_ids': list([
]),
'folders': list([
diff --git a/tests/components/onedrive/conftest.py b/tests/components/onedrive/conftest.py
index 205f5837ee7..0d6ee09d587 100644
--- a/tests/components/onedrive/conftest.py
+++ b/tests/components/onedrive/conftest.py
@@ -1,18 +1,9 @@
"""Fixtures for OneDrive tests."""
from collections.abc import AsyncIterator, Generator
-from html import escape
-from json import dumps
import time
from unittest.mock import AsyncMock, MagicMock, patch
-from httpx import Response
-from msgraph.generated.models.drive_item import DriveItem
-from msgraph.generated.models.drive_item_collection_response import (
- DriveItemCollectionResponse,
-)
-from msgraph.generated.models.upload_session import UploadSession
-from msgraph_core.models import LargeFileUploadSession
import pytest
from homeassistant.components.application_credentials import (
@@ -23,7 +14,13 @@ from homeassistant.components.onedrive.const import DOMAIN, OAUTH_SCOPES
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
-from .const import BACKUP_METADATA, CLIENT_ID, CLIENT_SECRET
+from .const import (
+ CLIENT_ID,
+ CLIENT_SECRET,
+ MOCK_APPROOT,
+ MOCK_BACKUP_FILE,
+ MOCK_BACKUP_FOLDER,
+)
from tests.common import MockConfigEntry
@@ -71,95 +68,46 @@ def mock_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
@pytest.fixture
-def mock_adapter() -> Generator[MagicMock]:
- """Return a mocked GraphAdapter."""
- with (
- patch(
- "homeassistant.components.onedrive.config_flow.GraphRequestAdapter",
- autospec=True,
- ) as mock_adapter,
- patch(
- "homeassistant.components.onedrive.backup.GraphRequestAdapter",
- new=mock_adapter,
- ),
- ):
- adapter = mock_adapter.return_value
- adapter.get_http_response_message.return_value = Response(
- status_code=200,
- json={
- "parentReference": {"driveId": "mock_drive_id"},
- "createdBy": {"user": {"displayName": "John Doe"}},
- },
- )
- yield adapter
- adapter.send_async.return_value = LargeFileUploadSession(
- next_expected_ranges=["2-"]
- )
-
-
-@pytest.fixture(autouse=True)
-def mock_graph_client(mock_adapter: MagicMock) -> Generator[MagicMock]:
+def mock_onedrive_client_init() -> Generator[MagicMock]:
"""Return a mocked GraphServiceClient."""
with (
patch(
- "homeassistant.components.onedrive.config_flow.GraphServiceClient",
+ "homeassistant.components.onedrive.config_flow.OneDriveClient",
autospec=True,
- ) as graph_client,
+ ) as onedrive_client,
patch(
- "homeassistant.components.onedrive.GraphServiceClient",
- new=graph_client,
+ "homeassistant.components.onedrive.OneDriveClient",
+ new=onedrive_client,
),
):
- client = graph_client.return_value
+ yield onedrive_client
- client.request_adapter = mock_adapter
- drives = client.drives.by_drive_id.return_value
- drives.special.by_drive_item_id.return_value.get = AsyncMock(
- return_value=DriveItem(id="approot")
- )
+@pytest.fixture(autouse=True)
+def mock_onedrive_client(mock_onedrive_client_init: MagicMock) -> Generator[MagicMock]:
+ """Return a mocked GraphServiceClient."""
+ client = mock_onedrive_client_init.return_value
+ client.get_approot.return_value = MOCK_APPROOT
+ client.create_folder.return_value = MOCK_BACKUP_FOLDER
+ client.list_drive_items.return_value = [MOCK_BACKUP_FILE]
+ client.get_drive_item.return_value = MOCK_BACKUP_FILE
- drive_items = drives.items.by_drive_item_id.return_value
- drive_items.get = AsyncMock(return_value=DriveItem(id="folder_id"))
- drive_items.children.post = AsyncMock(return_value=DriveItem(id="folder_id"))
- drive_items.children.get = AsyncMock(
- return_value=DriveItemCollectionResponse(
- value=[
- DriveItem(
- id=BACKUP_METADATA["backup_id"],
- description=escape(dumps(BACKUP_METADATA)),
- ),
- DriveItem(),
- ]
- )
- )
- drive_items.delete = AsyncMock(return_value=None)
- drive_items.create_upload_session.post = AsyncMock(
- return_value=UploadSession(upload_url="https://test.tld")
- )
- drive_items.patch = AsyncMock(return_value=None)
-
- async def generate_bytes() -> AsyncIterator[bytes]:
- """Asynchronous generator that yields bytes."""
+ class MockStreamReader:
+ async def iter_chunked(self, chunk_size: int) -> AsyncIterator[bytes]:
yield b"backup data"
- drive_items.content.get = AsyncMock(
- return_value=Response(status_code=200, content=generate_bytes())
- )
+ client.download_drive_item.return_value = MockStreamReader()
- yield client
+ return client
@pytest.fixture
-def mock_drive_items(mock_graph_client: MagicMock) -> MagicMock:
- """Return a mocked DriveItems."""
- return mock_graph_client.drives.by_drive_id.return_value.items.by_drive_item_id.return_value
-
-
-@pytest.fixture
-def mock_get_special_folder(mock_graph_client: MagicMock) -> MagicMock:
- """Mock the get special folder method."""
- return mock_graph_client.drives.by_drive_id.return_value.special.by_drive_item_id.return_value.get
+def mock_large_file_upload_client() -> Generator[AsyncMock]:
+ """Return a mocked LargeFileUploadClient upload."""
+ with patch(
+ "homeassistant.components.onedrive.backup.LargeFileUploadClient.upload"
+ ) as mock_upload:
+ yield mock_upload
@pytest.fixture
@@ -179,10 +127,3 @@ def mock_instance_id() -> Generator[AsyncMock]:
return_value="9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0",
):
yield
-
-
-@pytest.fixture(autouse=True)
-def mock_asyncio_sleep() -> Generator[AsyncMock]:
- """Mock asyncio.sleep."""
- with patch("homeassistant.components.onedrive.backup.asyncio.sleep", AsyncMock()):
- yield
diff --git a/tests/components/onedrive/const.py b/tests/components/onedrive/const.py
index c187feef30a..ee3a5ce3dc4 100644
--- a/tests/components/onedrive/const.py
+++ b/tests/components/onedrive/const.py
@@ -1,5 +1,18 @@
"""Consts for OneDrive tests."""
+from html import escape
+from json import dumps
+
+from onedrive_personal_sdk.models.items import (
+ AppRoot,
+ Contributor,
+ File,
+ Folder,
+ Hashes,
+ ItemParentReference,
+ User,
+)
+
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
@@ -17,3 +30,48 @@ BACKUP_METADATA = {
"protected": False,
"size": 34519040,
}
+
+CONTRIBUTOR = Contributor(
+ user=User(
+ display_name="John Doe",
+ id="id",
+ email="john@doe.com",
+ )
+)
+
+MOCK_APPROOT = AppRoot(
+ id="id",
+ child_count=0,
+ size=0,
+ name="name",
+ parent_reference=ItemParentReference(
+ drive_id="mock_drive_id", id="id", path="path"
+ ),
+ created_by=CONTRIBUTOR,
+)
+
+MOCK_BACKUP_FOLDER = Folder(
+ id="id",
+ name="name",
+ size=0,
+ child_count=0,
+ parent_reference=ItemParentReference(
+ drive_id="mock_drive_id", id="id", path="path"
+ ),
+ created_by=CONTRIBUTOR,
+)
+
+MOCK_BACKUP_FILE = File(
+ id="id",
+ name="23e64aec.tar",
+ size=34519040,
+ parent_reference=ItemParentReference(
+ drive_id="mock_drive_id", id="id", path="path"
+ ),
+ hashes=Hashes(
+ quick_xor_hash="hash",
+ ),
+ mime_type="application/x-tar",
+ description=escape(dumps(BACKUP_METADATA)),
+ created_by=CONTRIBUTOR,
+)
diff --git a/tests/components/onedrive/test_backup.py b/tests/components/onedrive/test_backup.py
index 0114d924e1a..0277c3da02e 100644
--- a/tests/components/onedrive/test_backup.py
+++ b/tests/components/onedrive/test_backup.py
@@ -3,15 +3,14 @@
from __future__ import annotations
from collections.abc import AsyncGenerator
-from html import escape
from io import StringIO
-from json import dumps
from unittest.mock import Mock, patch
-from httpx import TimeoutException
-from kiota_abstractions.api_error import APIError
-from msgraph.generated.models.drive_item import DriveItem
-from msgraph_core.models import LargeFileUploadSession
+from onedrive_personal_sdk.exceptions import (
+ AuthenticationError,
+ HashMismatchError,
+ OneDriveException,
+)
import pytest
from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN, AgentBackup
@@ -89,6 +88,7 @@ async def test_agents_list_backups(
"backup_id": "23e64aec",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
+ "extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
@@ -102,14 +102,10 @@ async def test_agents_list_backups(
async def test_agents_get_backup(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test agent get backup."""
- mock_drive_items.get = AsyncMock(
- return_value=DriveItem(description=escape(dumps(BACKUP_METADATA)))
- )
backup_id = BACKUP_METADATA["backup_id"]
client = await hass_ws_client(hass)
await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id})
@@ -128,6 +124,7 @@ async def test_agents_get_backup(
"backup_id": "23e64aec",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
+ "extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
@@ -140,7 +137,7 @@ async def test_agents_get_backup(
async def test_agents_delete(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
) -> None:
"""Test agent delete backup."""
client = await hass_ws_client(hass)
@@ -155,37 +152,15 @@ async def test_agents_delete(
assert response["success"]
assert response["result"] == {"agent_errors": {}}
- mock_drive_items.delete.assert_called_once()
-
-
-async def test_agents_delete_not_found_does_not_throw(
- hass: HomeAssistant,
- hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
-) -> None:
- """Test agent delete backup."""
- mock_drive_items.children.get = AsyncMock(return_value=[])
- client = await hass_ws_client(hass)
-
- await client.send_json_auto_id(
- {
- "type": "backup/delete",
- "backup_id": BACKUP_METADATA["backup_id"],
- }
- )
- response = await client.receive_json()
-
- assert response["success"]
- assert response["result"] == {"agent_errors": {}}
- assert mock_drive_items.delete.call_count == 0
+ mock_onedrive_client.delete_drive_item.assert_called_once()
async def test_agents_upload(
hass_client: ClientSessionGenerator,
caplog: pytest.LogCaptureFixture,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
+ mock_large_file_upload_client: AsyncMock,
mock_config_entry: MockConfigEntry,
- mock_adapter: MagicMock,
) -> None:
"""Test agent upload backup."""
client = await hass_client()
@@ -200,7 +175,6 @@ async def test_agents_upload(
return_value=test_backup,
),
patch("pathlib.Path.open") as mocked_open,
- patch("homeassistant.components.onedrive.backup.UPLOAD_CHUNK_SIZE", 3),
):
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
fetch_backup.return_value = test_backup
@@ -211,31 +185,22 @@ async def test_agents_upload(
assert resp.status == 201
assert f"Uploading backup {test_backup.backup_id}" in caplog.text
- mock_drive_items.create_upload_session.post.assert_called_once()
- mock_drive_items.patch.assert_called_once()
- assert mock_adapter.send_async.call_count == 2
- assert mock_adapter.method_calls[0].args[0].content == b"tes"
- assert mock_adapter.method_calls[0].args[0].headers.get("Content-Range") == {
- "bytes 0-2/34519040"
- }
- assert mock_adapter.method_calls[1].args[0].content == b"t"
- assert mock_adapter.method_calls[1].args[0].headers.get("Content-Range") == {
- "bytes 3-3/34519040"
- }
+ mock_large_file_upload_client.assert_called_once()
+ mock_onedrive_client.update_drive_item.assert_called_once()
-async def test_broken_upload_session(
+async def test_agents_upload_corrupt_upload(
hass_client: ClientSessionGenerator,
caplog: pytest.LogCaptureFixture,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
+ mock_large_file_upload_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
- """Test broken upload session."""
+ """Test hash validation fails."""
+ mock_large_file_upload_client.side_effect = HashMismatchError("test")
client = await hass_client()
test_backup = AgentBackup.from_dict(BACKUP_METADATA)
- mock_drive_items.create_upload_session.post = AsyncMock(return_value=None)
-
with (
patch(
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
@@ -254,152 +219,18 @@ async def test_broken_upload_session(
)
assert resp.status == 201
- assert "Failed to start backup upload" in caplog.text
-
-
-@pytest.mark.parametrize(
- "side_effect",
- [
- APIError(response_status_code=500),
- TimeoutException("Timeout"),
- ],
-)
-async def test_agents_upload_errors_retried(
- hass_client: ClientSessionGenerator,
- caplog: pytest.LogCaptureFixture,
- mock_drive_items: MagicMock,
- mock_config_entry: MockConfigEntry,
- mock_adapter: MagicMock,
- side_effect: Exception,
-) -> None:
- """Test agent upload backup."""
- client = await hass_client()
- test_backup = AgentBackup.from_dict(BACKUP_METADATA)
-
- mock_adapter.send_async.side_effect = [
- side_effect,
- LargeFileUploadSession(next_expected_ranges=["2-"]),
- LargeFileUploadSession(next_expected_ranges=["2-"]),
- ]
-
- with (
- patch(
- "homeassistant.components.backup.manager.BackupManager.async_get_backup",
- ) as fetch_backup,
- patch(
- "homeassistant.components.backup.manager.read_backup",
- return_value=test_backup,
- ),
- patch("pathlib.Path.open") as mocked_open,
- patch("homeassistant.components.onedrive.backup.UPLOAD_CHUNK_SIZE", 3),
- ):
- mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
- fetch_backup.return_value = test_backup
- resp = await client.post(
- f"/api/backup/upload?agent_id={DOMAIN}.{mock_config_entry.unique_id}",
- data={"file": StringIO("test")},
- )
-
- assert resp.status == 201
- assert mock_adapter.send_async.call_count == 3
assert f"Uploading backup {test_backup.backup_id}" in caplog.text
- mock_drive_items.patch.assert_called_once()
-
-
-async def test_agents_upload_4xx_errors_not_retried(
- hass_client: ClientSessionGenerator,
- caplog: pytest.LogCaptureFixture,
- mock_drive_items: MagicMock,
- mock_config_entry: MockConfigEntry,
- mock_adapter: MagicMock,
-) -> None:
- """Test agent upload backup."""
- client = await hass_client()
- test_backup = AgentBackup.from_dict(BACKUP_METADATA)
-
- mock_adapter.send_async.side_effect = APIError(response_status_code=404)
-
- with (
- patch(
- "homeassistant.components.backup.manager.BackupManager.async_get_backup",
- ) as fetch_backup,
- patch(
- "homeassistant.components.backup.manager.read_backup",
- return_value=test_backup,
- ),
- patch("pathlib.Path.open") as mocked_open,
- patch("homeassistant.components.onedrive.backup.UPLOAD_CHUNK_SIZE", 3),
- ):
- mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
- fetch_backup.return_value = test_backup
- resp = await client.post(
- f"/api/backup/upload?agent_id={DOMAIN}.{mock_config_entry.unique_id}",
- data={"file": StringIO("test")},
- )
-
- assert resp.status == 201
- assert mock_adapter.send_async.call_count == 1
- assert f"Uploading backup {test_backup.backup_id}" in caplog.text
- assert mock_drive_items.patch.call_count == 0
- assert "Backup operation failed" in caplog.text
-
-
-@pytest.mark.parametrize(
- ("side_effect", "error"),
- [
- (APIError(response_status_code=500), "Backup operation failed"),
- (TimeoutException("Timeout"), "Backup operation timed out"),
- ],
-)
-async def test_agents_upload_fails_after_max_retries(
- hass_client: ClientSessionGenerator,
- caplog: pytest.LogCaptureFixture,
- mock_drive_items: MagicMock,
- mock_config_entry: MockConfigEntry,
- mock_adapter: MagicMock,
- side_effect: Exception,
- error: str,
-) -> None:
- """Test agent upload backup."""
- client = await hass_client()
- test_backup = AgentBackup.from_dict(BACKUP_METADATA)
-
- mock_adapter.send_async.side_effect = side_effect
-
- with (
- patch(
- "homeassistant.components.backup.manager.BackupManager.async_get_backup",
- ) as fetch_backup,
- patch(
- "homeassistant.components.backup.manager.read_backup",
- return_value=test_backup,
- ),
- patch("pathlib.Path.open") as mocked_open,
- patch("homeassistant.components.onedrive.backup.UPLOAD_CHUNK_SIZE", 3),
- ):
- mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
- fetch_backup.return_value = test_backup
- resp = await client.post(
- f"/api/backup/upload?agent_id={DOMAIN}.{mock_config_entry.unique_id}",
- data={"file": StringIO("test")},
- )
-
- assert resp.status == 201
- assert mock_adapter.send_async.call_count == 6
- assert f"Uploading backup {test_backup.backup_id}" in caplog.text
- assert mock_drive_items.patch.call_count == 0
- assert error in caplog.text
+ mock_large_file_upload_client.assert_called_once()
+ assert mock_onedrive_client.update_drive_item.call_count == 0
+ assert "Hash validation failed, backup file might be corrupt" in caplog.text
async def test_agents_download(
hass_client: ClientSessionGenerator,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test agent download backup."""
- mock_drive_items.get = AsyncMock(
- return_value=DriveItem(description=escape(dumps(BACKUP_METADATA)))
- )
client = await hass_client()
backup_id = BACKUP_METADATA["backup_id"]
@@ -408,29 +239,30 @@ async def test_agents_download(
)
assert resp.status == 200
assert await resp.content.read() == b"backup data"
- mock_drive_items.content.get.assert_called_once()
@pytest.mark.parametrize(
("side_effect", "error"),
[
(
- APIError(response_status_code=500),
+ OneDriveException(),
"Backup operation failed",
),
- (TimeoutException("Timeout"), "Backup operation timed out"),
+ (TimeoutError(), "Backup operation timed out"),
],
)
async def test_delete_error(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
mock_config_entry: MockConfigEntry,
side_effect: Exception,
error: str,
) -> None:
"""Test error during delete."""
- mock_drive_items.delete = AsyncMock(side_effect=side_effect)
+ mock_onedrive_client.delete_drive_item.side_effect = AsyncMock(
+ side_effect=side_effect
+ )
client = await hass_ws_client(hass)
@@ -448,14 +280,35 @@ async def test_delete_error(
}
+async def test_agents_delete_not_found_does_not_throw(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ mock_onedrive_client: MagicMock,
+) -> None:
+ """Test agent delete backup."""
+ mock_onedrive_client.list_drive_items.return_value = []
+ client = await hass_ws_client(hass)
+
+ await client.send_json_auto_id(
+ {
+ "type": "backup/delete",
+ "backup_id": BACKUP_METADATA["backup_id"],
+ }
+ )
+ response = await client.receive_json()
+
+ assert response["success"]
+ assert response["result"] == {"agent_errors": {}}
+
+
async def test_agents_backup_not_found(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
) -> None:
"""Test backup not found."""
- mock_drive_items.children.get = AsyncMock(return_value=[])
+ mock_onedrive_client.list_drive_items.return_value = []
backup_id = BACKUP_METADATA["backup_id"]
client = await hass_ws_client(hass)
await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id})
@@ -468,13 +321,13 @@ async def test_agents_backup_not_found(
async def test_reauth_on_403(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
- mock_drive_items: MagicMock,
+ mock_onedrive_client: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test we re-authenticate on 403."""
- mock_drive_items.children.get = AsyncMock(
- side_effect=APIError(response_status_code=403)
+ mock_onedrive_client.list_drive_items.side_effect = AuthenticationError(
+ 403, "Auth failed"
)
backup_id = BACKUP_METADATA["backup_id"]
client = await hass_ws_client(hass)
@@ -483,7 +336,7 @@ async def test_reauth_on_403(
assert response["success"]
assert response["result"]["agent_errors"] == {
- f"{DOMAIN}.{mock_config_entry.unique_id}": "Backup operation failed"
+ f"{DOMAIN}.{mock_config_entry.unique_id}": "Authentication error"
}
await hass.async_block_till_done()
diff --git a/tests/components/onedrive/test_config_flow.py b/tests/components/onedrive/test_config_flow.py
index 8be6aadfd0f..fb0d58b86c6 100644
--- a/tests/components/onedrive/test_config_flow.py
+++ b/tests/components/onedrive/test_config_flow.py
@@ -3,8 +3,7 @@
from http import HTTPStatus
from unittest.mock import AsyncMock, MagicMock
-from httpx import Response
-from kiota_abstractions.api_error import APIError
+from onedrive_personal_sdk.exceptions import OneDriveException
import pytest
from homeassistant import config_entries
@@ -20,7 +19,7 @@ from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_entry_oauth2_flow
from . import setup_integration
-from .const import CLIENT_ID
+from .const import CLIENT_ID, MOCK_APPROOT
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
@@ -71,6 +70,7 @@ async def test_full_flow(
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
mock_setup_entry: AsyncMock,
+ mock_onedrive_client_init: MagicMock,
) -> None:
"""Check full flow."""
@@ -80,6 +80,10 @@ async def test_full_flow(
await _do_get_token(hass, result, hass_client_no_auth, aioclient_mock)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
+ # Ensure the token callback is set up correctly
+ token_callback = mock_onedrive_client_init.call_args[0][0]
+ assert await token_callback() == "mock-access-token"
+
assert result["type"] is FlowResultType.CREATE_ENTRY
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup_entry.mock_calls) == 1
@@ -89,25 +93,52 @@ async def test_full_flow(
assert result["data"][CONF_TOKEN]["refresh_token"] == "mock-refresh-token"
+@pytest.mark.usefixtures("current_request_with_host")
+async def test_full_flow_with_owner_not_found(
+ hass: HomeAssistant,
+ hass_client_no_auth: ClientSessionGenerator,
+ aioclient_mock: AiohttpClientMocker,
+ mock_setup_entry: AsyncMock,
+ mock_onedrive_client: MagicMock,
+) -> None:
+ """Ensure we get a default title if the drive's owner can't be read."""
+
+ mock_onedrive_client.get_approot.return_value.created_by.user = None
+
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": config_entries.SOURCE_USER}
+ )
+ await _do_get_token(hass, result, hass_client_no_auth, aioclient_mock)
+ result = await hass.config_entries.flow.async_configure(result["flow_id"])
+
+ assert result["type"] is FlowResultType.CREATE_ENTRY
+ assert len(hass.config_entries.async_entries(DOMAIN)) == 1
+ assert len(mock_setup_entry.mock_calls) == 1
+ assert result["title"] == "OneDrive"
+ assert result["result"].unique_id == "mock_drive_id"
+ assert result["data"][CONF_TOKEN][CONF_ACCESS_TOKEN] == "mock-access-token"
+ assert result["data"][CONF_TOKEN]["refresh_token"] == "mock-refresh-token"
+
+
@pytest.mark.usefixtures("current_request_with_host")
@pytest.mark.parametrize(
("exception", "error"),
[
(Exception, "unknown"),
- (APIError, "connection_error"),
+ (OneDriveException, "connection_error"),
],
)
async def test_flow_errors(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
- mock_adapter: MagicMock,
+ mock_onedrive_client: MagicMock,
exception: Exception,
error: str,
) -> None:
"""Test errors during flow."""
- mock_adapter.get_http_response_message.side_effect = exception
+ mock_onedrive_client.get_approot.side_effect = exception
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
@@ -172,15 +203,12 @@ async def test_reauth_flow_id_changed(
aioclient_mock: AiohttpClientMocker,
mock_setup_entry: AsyncMock,
mock_config_entry: MockConfigEntry,
- mock_adapter: MagicMock,
+ mock_onedrive_client: MagicMock,
) -> None:
"""Test that the reauth flow fails on a different drive id."""
- mock_adapter.get_http_response_message.return_value = Response(
- status_code=200,
- json={
- "parentReference": {"driveId": "other_drive_id"},
- },
- )
+ app_root = MOCK_APPROOT
+ app_root.parent_reference.drive_id = "other_drive_id"
+ mock_onedrive_client.get_approot.return_value = app_root
await setup_integration(hass, mock_config_entry)
diff --git a/tests/components/onedrive/test_init.py b/tests/components/onedrive/test_init.py
index bc5c22c3ce6..a6ad55442aa 100644
--- a/tests/components/onedrive/test_init.py
+++ b/tests/components/onedrive/test_init.py
@@ -2,7 +2,7 @@
from unittest.mock import MagicMock
-from kiota_abstractions.api_error import APIError
+from onedrive_personal_sdk.exceptions import AuthenticationError, OneDriveException
import pytest
from homeassistant.config_entries import ConfigEntryState
@@ -16,10 +16,15 @@ from tests.common import MockConfigEntry
async def test_load_unload_config_entry(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
+ mock_onedrive_client_init: MagicMock,
) -> None:
"""Test loading and unloading the integration."""
await setup_integration(hass, mock_config_entry)
+ # Ensure the token callback is set up correctly
+ token_callback = mock_onedrive_client_init.call_args[0][0]
+ assert await token_callback() == "mock-access-token"
+
assert mock_config_entry.state is ConfigEntryState.LOADED
await hass.config_entries.async_unload(mock_config_entry.entry_id)
@@ -31,82 +36,31 @@ async def test_load_unload_config_entry(
@pytest.mark.parametrize(
("side_effect", "state"),
[
- (APIError(response_status_code=403), ConfigEntryState.SETUP_ERROR),
- (APIError(response_status_code=500), ConfigEntryState.SETUP_RETRY),
+ (AuthenticationError(403, "Auth failed"), ConfigEntryState.SETUP_ERROR),
+ (OneDriveException(), ConfigEntryState.SETUP_RETRY),
],
)
async def test_approot_errors(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
- mock_get_special_folder: MagicMock,
+ mock_onedrive_client: MagicMock,
side_effect: Exception,
state: ConfigEntryState,
) -> None:
"""Test errors during approot retrieval."""
- mock_get_special_folder.side_effect = side_effect
+ mock_onedrive_client.get_approot.side_effect = side_effect
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is state
-async def test_faulty_approot(
+async def test_get_integration_folder_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
- mock_get_special_folder: MagicMock,
+ mock_onedrive_client: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test faulty approot retrieval."""
- mock_get_special_folder.return_value = None
- await setup_integration(hass, mock_config_entry)
- assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
- assert "Failed to get approot folder" in caplog.text
-
-
-async def test_faulty_integration_folder(
- hass: HomeAssistant,
- mock_config_entry: MockConfigEntry,
- mock_drive_items: MagicMock,
- caplog: pytest.LogCaptureFixture,
-) -> None:
- """Test faulty approot retrieval."""
- mock_drive_items.get.return_value = None
+ mock_onedrive_client.create_folder.side_effect = OneDriveException()
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
assert "Failed to get backups_9f86d081 folder" in caplog.text
-
-
-async def test_500_error_during_backup_folder_get(
- hass: HomeAssistant,
- mock_config_entry: MockConfigEntry,
- mock_drive_items: MagicMock,
- caplog: pytest.LogCaptureFixture,
-) -> None:
- """Test error during backup folder creation."""
- mock_drive_items.get.side_effect = APIError(response_status_code=500)
- await setup_integration(hass, mock_config_entry)
- assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
- assert "Failed to get backups_9f86d081 folder" in caplog.text
-
-
-async def test_error_during_backup_folder_creation(
- hass: HomeAssistant,
- mock_config_entry: MockConfigEntry,
- mock_drive_items: MagicMock,
- caplog: pytest.LogCaptureFixture,
-) -> None:
- """Test error during backup folder creation."""
- mock_drive_items.get.side_effect = APIError(response_status_code=404)
- mock_drive_items.children.post.side_effect = APIError()
- await setup_integration(hass, mock_config_entry)
- assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
- assert "Failed to create backups_9f86d081 folder" in caplog.text
-
-
-async def test_successful_backup_folder_creation(
- hass: HomeAssistant,
- mock_config_entry: MockConfigEntry,
- mock_drive_items: MagicMock,
-) -> None:
- """Test successful backup folder creation."""
- mock_drive_items.get.side_effect = APIError(response_status_code=404)
- await setup_integration(hass, mock_config_entry)
- assert mock_config_entry.state is ConfigEntryState.LOADED
diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py
index 9ee19cd330c..39ca1b53e28 100644
--- a/tests/components/openai_conversation/test_conversation.py
+++ b/tests/components/openai_conversation/test_conversation.py
@@ -195,6 +195,7 @@ async def test_function_call(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="call_AbCdEfGhIjKlMnOpQrStUvWx",
tool_name="test_tool",
tool_args={"param1": "test_value"},
),
@@ -359,6 +360,7 @@ async def test_function_exception(
mock_tool.async_call.assert_awaited_once_with(
hass,
llm.ToolInput(
+ id="call_AbCdEfGhIjKlMnOpQrStUvWx",
tool_name="test_tool",
tool_args={"param1": "test_value"},
),
diff --git a/tests/components/recorder/db_schema_9.py b/tests/components/recorder/db_schema_9.py
index 784e326e1c3..6cf7085e279 100644
--- a/tests/components/recorder/db_schema_9.py
+++ b/tests/components/recorder/db_schema_9.py
@@ -19,8 +19,7 @@ from sqlalchemy import (
Text,
distinct,
)
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import relationship
+from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.orm.session import Session
from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id
diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py
index 4b4f019fdf7..54638df9a46 100644
--- a/tests/components/ring/test_camera.py
+++ b/tests/components/ring/test_camera.py
@@ -436,9 +436,9 @@ async def test_camera_webrtc(
assert response
assert response.get("success") is False
assert response["error"]["code"] == "home_assistant_error"
- msg = "The sdp_m_line_index is required for ring webrtc streaming"
- assert msg in response["error"].get("message")
- assert msg in caplog.text
+ error_msg = f"Error negotiating stream for {front_camera_mock.name}"
+ assert error_msg in response["error"].get("message")
+ assert error_msg in caplog.text
front_camera_mock.on_webrtc_candidate.assert_called_once()
# Answer message
diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py
index 7c3b93e5114..66decb5ce15 100644
--- a/tests/components/ring/test_init.py
+++ b/tests/components/ring/test_init.py
@@ -16,7 +16,7 @@ from homeassistant.components.ring.const import (
CONF_LISTEN_CREDENTIALS,
SCAN_INTERVAL,
)
-from homeassistant.components.ring.coordinator import RingEventListener
+from homeassistant.components.ring.coordinator import RingConfigEntry, RingEventListener
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME
from homeassistant.core import HomeAssistant
@@ -80,12 +80,12 @@ async def test_auth_failed_on_setup(
("error_type", "log_msg"),
[
(
- RingTimeout,
- "Timeout communicating with API: ",
+ RingTimeout("Some internal error info"),
+ "Timeout communicating with Ring API",
),
(
- RingError,
- "Error communicating with API: ",
+ RingError("Some internal error info"),
+ "Error communicating with Ring API",
),
],
ids=["timeout-error", "other-error"],
@@ -95,6 +95,7 @@ async def test_error_on_setup(
mock_ring_client,
mock_config_entry: MockConfigEntry,
caplog: pytest.LogCaptureFixture,
+ freezer: FrozenDateTimeFactory,
error_type,
log_msg,
) -> None:
@@ -166,11 +167,11 @@ async def test_auth_failure_on_device_update(
[
(
RingTimeout,
- "Error fetching devices data: Timeout communicating with API: ",
+ "Error fetching devices data: Timeout communicating with Ring API",
),
(
RingError,
- "Error fetching devices data: Error communicating with API: ",
+ "Error fetching devices data: Error communicating with Ring API",
),
],
ids=["timeout-error", "other-error"],
@@ -178,7 +179,7 @@ async def test_auth_failure_on_device_update(
async def test_error_on_global_update(
hass: HomeAssistant,
mock_ring_client,
- mock_config_entry: MockConfigEntry,
+ mock_config_entry: RingConfigEntry,
freezer: FrozenDateTimeFactory,
caplog: pytest.LogCaptureFixture,
error_type,
@@ -189,15 +190,35 @@ async def test_error_on_global_update(
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
- mock_ring_client.async_update_devices.side_effect = error_type
+ coordinator = mock_config_entry.runtime_data.devices_coordinator
+ assert coordinator
- freezer.tick(SCAN_INTERVAL)
- async_fire_time_changed(hass)
- await hass.async_block_till_done(wait_background_tasks=True)
+ with patch.object(
+ coordinator, "_async_update_data", wraps=coordinator._async_update_data
+ ) as refresh_spy:
+ error = error_type("Some internal error info 1")
+ mock_ring_client.async_update_devices.side_effect = error
- assert log_msg in caplog.text
+ freezer.tick(SCAN_INTERVAL * 2)
+ async_fire_time_changed(hass)
+ await hass.async_block_till_done(wait_background_tasks=True)
- assert hass.config_entries.async_get_entry(mock_config_entry.entry_id)
+ refresh_spy.assert_called()
+ assert coordinator.last_exception.__cause__ == error
+ assert log_msg in caplog.text
+
+ # Check log is not being spammed.
+ refresh_spy.reset_mock()
+ error2 = error_type("Some internal error info 2")
+ caplog.clear()
+ mock_ring_client.async_update_devices.side_effect = error2
+ freezer.tick(SCAN_INTERVAL * 2)
+ async_fire_time_changed(hass)
+ await hass.async_block_till_done(wait_background_tasks=True)
+
+ refresh_spy.assert_called()
+ assert coordinator.last_exception.__cause__ == error2
+ assert log_msg not in caplog.text
@pytest.mark.parametrize(
@@ -205,11 +226,11 @@ async def test_error_on_global_update(
[
(
RingTimeout,
- "Error fetching devices data: Timeout communicating with API for device Front: ",
+ "Error fetching devices data: Timeout communicating with Ring API",
),
(
RingError,
- "Error fetching devices data: Error communicating with API for device Front: ",
+ "Error fetching devices data: Error communicating with Ring API",
),
],
ids=["timeout-error", "other-error"],
@@ -218,7 +239,7 @@ async def test_error_on_device_update(
hass: HomeAssistant,
mock_ring_client,
mock_ring_devices,
- mock_config_entry: MockConfigEntry,
+ mock_config_entry: RingConfigEntry,
freezer: FrozenDateTimeFactory,
caplog: pytest.LogCaptureFixture,
error_type,
@@ -229,15 +250,36 @@ async def test_error_on_device_update(
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
- front_door_doorbell = mock_ring_devices.get_device(765432)
- front_door_doorbell.async_history.side_effect = error_type
+ coordinator = mock_config_entry.runtime_data.devices_coordinator
+ assert coordinator
- freezer.tick(SCAN_INTERVAL)
- async_fire_time_changed(hass)
- await hass.async_block_till_done(wait_background_tasks=True)
+ with patch.object(
+ coordinator, "_async_update_data", wraps=coordinator._async_update_data
+ ) as refresh_spy:
+ error = error_type("Some internal error info 1")
+ front_door_doorbell = mock_ring_devices.get_device(765432)
+ front_door_doorbell.async_history.side_effect = error
- assert log_msg in caplog.text
- assert hass.config_entries.async_get_entry(mock_config_entry.entry_id)
+ freezer.tick(SCAN_INTERVAL * 2)
+ async_fire_time_changed(hass)
+ await hass.async_block_till_done(wait_background_tasks=True)
+
+ refresh_spy.assert_called()
+ assert coordinator.last_exception.__cause__ == error
+ assert log_msg in caplog.text
+
+ # Check log is not being spammed.
+ error2 = error_type("Some internal error info 2")
+ front_door_doorbell.async_history.side_effect = error2
+ refresh_spy.reset_mock()
+ caplog.clear()
+ freezer.tick(SCAN_INTERVAL * 2)
+ async_fire_time_changed(hass)
+ await hass.async_block_till_done(wait_background_tasks=True)
+
+ refresh_spy.assert_called()
+ assert coordinator.last_exception.__cause__ == error2
+ assert log_msg not in caplog.text
@pytest.mark.parametrize(
diff --git a/tests/components/schedule/snapshots/test_init.ambr b/tests/components/schedule/snapshots/test_init.ambr
new file mode 100644
index 00000000000..93cde4f5733
--- /dev/null
+++ b/tests/components/schedule/snapshots/test_init.ambr
@@ -0,0 +1,59 @@
+# serializer version: 1
+# name: test_service_get[schedule.from_storage-get-after-update]
+ dict({
+ 'friday': list([
+ ]),
+ 'monday': list([
+ ]),
+ 'saturday': list([
+ ]),
+ 'sunday': list([
+ ]),
+ 'thursday': list([
+ ]),
+ 'tuesday': list([
+ ]),
+ 'wednesday': list([
+ dict({
+ 'from': datetime.time(17, 0),
+ 'to': datetime.time(19, 0),
+ }),
+ ]),
+ })
+# ---
+# name: test_service_get[schedule.from_storage-get]
+ dict({
+ 'friday': list([
+ dict({
+ 'data': dict({
+ 'party_level': 'epic',
+ }),
+ 'from': datetime.time(17, 0),
+ 'to': datetime.time(23, 59, 59),
+ }),
+ ]),
+ 'monday': list([
+ ]),
+ 'saturday': list([
+ dict({
+ 'from': datetime.time(0, 0),
+ 'to': datetime.time(23, 59, 59),
+ }),
+ ]),
+ 'sunday': list([
+ dict({
+ 'data': dict({
+ 'entry': 'VIPs only',
+ }),
+ 'from': datetime.time(0, 0),
+ 'to': datetime.time(23, 59, 59, 999999),
+ }),
+ ]),
+ 'thursday': list([
+ ]),
+ 'tuesday': list([
+ ]),
+ 'wednesday': list([
+ ]),
+ })
+# ---
diff --git a/tests/components/schedule/test_init.py b/tests/components/schedule/test_init.py
index 18346122bfd..fef2ff745cd 100644
--- a/tests/components/schedule/test_init.py
+++ b/tests/components/schedule/test_init.py
@@ -8,10 +8,12 @@ from unittest.mock import patch
from freezegun.api import FrozenDateTimeFactory
import pytest
+from syrupy.assertion import SnapshotAssertion
from homeassistant.components.schedule import STORAGE_VERSION, STORAGE_VERSION_MINOR
from homeassistant.components.schedule.const import (
ATTR_NEXT_EVENT,
+ CONF_ALL_DAYS,
CONF_DATA,
CONF_FRIDAY,
CONF_FROM,
@@ -23,12 +25,14 @@ from homeassistant.components.schedule.const import (
CONF_TUESDAY,
CONF_WEDNESDAY,
DOMAIN,
+ SERVICE_GET,
)
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_NAME,
+ CONF_ENTITY_ID,
CONF_ICON,
CONF_ID,
CONF_NAME,
@@ -754,3 +758,66 @@ async def test_ws_create(
assert result["party_mode"][CONF_MONDAY] == [
{CONF_FROM: "12:00:00", CONF_TO: saved_to}
]
+
+
+async def test_service_get(
+ hass: HomeAssistant,
+ hass_ws_client: WebSocketGenerator,
+ snapshot: SnapshotAssertion,
+ schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
+) -> None:
+ """Test getting a single schedule via service."""
+ assert await schedule_setup()
+
+ entity_id = "schedule.from_storage"
+
+ # Test retrieving a single schedule via service call
+ service_result = await hass.services.async_call(
+ DOMAIN,
+ SERVICE_GET,
+ {
+ CONF_ENTITY_ID: entity_id,
+ },
+ blocking=True,
+ return_response=True,
+ )
+ result = service_result.get(entity_id)
+
+ assert set(result) == CONF_ALL_DAYS
+ assert result == snapshot(name=f"{entity_id}-get")
+
+ # Now we update the schedule via WS
+ client = await hass_ws_client(hass)
+ await client.send_json(
+ {
+ "id": 1,
+ "type": f"{DOMAIN}/update",
+ f"{DOMAIN}_id": entity_id.rsplit(".", maxsplit=1)[-1],
+ CONF_NAME: "Party pooper",
+ CONF_ICON: "mdi:party-pooper",
+ CONF_MONDAY: [],
+ CONF_TUESDAY: [],
+ CONF_WEDNESDAY: [{CONF_FROM: "17:00:00", CONF_TO: "19:00:00"}],
+ CONF_THURSDAY: [],
+ CONF_FRIDAY: [],
+ CONF_SATURDAY: [],
+ CONF_SUNDAY: [],
+ }
+ )
+ resp = await client.receive_json()
+ assert resp["success"]
+
+ # Test retrieving the schedule via service call after WS update
+ service_result = await hass.services.async_call(
+ DOMAIN,
+ SERVICE_GET,
+ {
+ CONF_ENTITY_ID: entity_id,
+ },
+ blocking=True,
+ return_response=True,
+ )
+ result = service_result.get(entity_id)
+
+ assert set(result) == CONF_ALL_DAYS
+ assert result == snapshot(name=f"{entity_id}-get-after-update")
diff --git a/tests/components/screenlogic/test_config_flow.py b/tests/components/screenlogic/test_config_flow.py
index 5ce777a47fa..ad8ef125dac 100644
--- a/tests/components/screenlogic/test_config_flow.py
+++ b/tests/components/screenlogic/test_config_flow.py
@@ -86,6 +86,53 @@ async def test_flow_discover_none(hass: HomeAssistant) -> None:
assert result["step_id"] == "gateway_entry"
+async def test_flow_replace_ignored(hass: HomeAssistant) -> None:
+ """Test we can replace ignored entries."""
+ entry = MockConfigEntry(
+ domain=DOMAIN,
+ unique_id="00:c0:33:01:01:01",
+ source=config_entries.SOURCE_IGNORE,
+ )
+ entry.add_to_hass(hass)
+
+ with patch(
+ "homeassistant.components.screenlogic.config_flow.discovery.async_discover",
+ return_value=[
+ {
+ SL_GATEWAY_IP: "1.1.1.1",
+ SL_GATEWAY_PORT: 80,
+ SL_GATEWAY_TYPE: 12,
+ SL_GATEWAY_SUBTYPE: 2,
+ SL_GATEWAY_NAME: "Pentair: 01-01-01",
+ },
+ ],
+ ):
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": config_entries.SOURCE_USER}
+ )
+
+ assert result["type"] is FlowResultType.FORM
+ assert result["errors"] == {}
+ assert result["step_id"] == "gateway_select"
+
+ with patch(
+ "homeassistant.components.screenlogic.async_setup_entry",
+ return_value=True,
+ ) as mock_setup_entry:
+ result2 = await hass.config_entries.flow.async_configure(
+ result["flow_id"], user_input={GATEWAY_SELECT_KEY: "00:c0:33:01:01:01"}
+ )
+ await hass.async_block_till_done()
+
+ assert result2["type"] is FlowResultType.CREATE_ENTRY
+ assert result2["title"] == "Pentair: 01-01-01"
+ assert result2["data"] == {
+ CONF_IP_ADDRESS: "1.1.1.1",
+ CONF_PORT: 80,
+ }
+ assert len(mock_setup_entry.mock_calls) == 1
+
+
async def test_flow_discover_error(hass: HomeAssistant) -> None:
"""Test when discovery errors."""
diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py
index 85cd558e918..b3074742949 100644
--- a/tests/components/shelly/conftest.py
+++ b/tests/components/shelly/conftest.py
@@ -2,6 +2,15 @@
from unittest.mock import AsyncMock, Mock, PropertyMock, patch
+from aioshelly.ble.const import (
+ BLE_CODE,
+ BLE_SCAN_RESULT_EVENT,
+ BLE_SCAN_RESULT_VERSION,
+ BLE_SCRIPT_NAME,
+ VAR_ACTIVE,
+ VAR_EVENT_TYPE,
+ VAR_VERSION,
+)
from aioshelly.block_device import BlockDevice, BlockUpdateType
from aioshelly.const import MODEL_1, MODEL_25, MODEL_PLUS_2PM
from aioshelly.rpc_device import RpcDevice, RpcUpdateType
@@ -180,6 +189,7 @@ MOCK_CONFIG = {
"xcounts": {"expr": None, "unit": None},
"xfreq": {"expr": None, "unit": None},
},
+ "flood:0": {"id": 0, "name": "Test name"},
"light:0": {"name": "test light_0"},
"light:1": {"name": "test light_1"},
"light:2": {"name": "test light_2"},
@@ -200,6 +210,9 @@ MOCK_CONFIG = {
"wifi": {"sta": {"enable": True}, "sta1": {"enable": False}},
"ws": {"enable": False, "server": None},
"voltmeter:100": {"xvoltage": {"unit": "ppm"}},
+ "script:1": {"id": 1, "name": "test_script.js", "enable": True},
+ "script:2": {"id": 2, "name": "test_script_2.js", "enable": False},
+ "script:3": {"id": 3, "name": BLE_SCRIPT_NAME, "enable": False},
}
@@ -326,6 +339,7 @@ MOCK_STATUS_RPC = {
"em1:1": {"act_power": 123.3},
"em1data:0": {"total_act_energy": 123456.4},
"em1data:1": {"total_act_energy": 987654.3},
+ "flood:0": {"id": 0, "alarm": False, "mute": False},
"thermostat:0": {
"id": 0,
"enable": True,
@@ -333,6 +347,15 @@ MOCK_STATUS_RPC = {
"current_C": 12.3,
"output": True,
},
+ "script:1": {
+ "id": 1,
+ "running": True,
+ "mem_used": 826,
+ "mem_peak": 1666,
+ "mem_free": 24360,
+ },
+ "script:2": {"id": 2, "running": False},
+ "script:3": {"id": 3, "running": False},
"humidity:0": {"rh": 44.4},
"sys": {
"available_updates": {
@@ -345,6 +368,28 @@ MOCK_STATUS_RPC = {
"wifi": {"rssi": -63},
}
+MOCK_SCRIPTS = [
+ """"
+function eventHandler(event, userdata) {
+ if (typeof event.component !== "string")
+ return;
+
+ let component = event.component.substring(0, 5);
+ if (component === "input") {
+ let id = Number(event.component.substring(6));
+ Shelly.emitEvent("input_event", { id: id });
+ }
+}
+
+Shelly.addEventHandler(eventHandler);
+Shelly.emitEvent("script_start");
+""",
+ 'console.log("Hello World!")',
+ BLE_CODE.replace(VAR_ACTIVE, "true")
+ .replace(VAR_EVENT_TYPE, BLE_SCAN_RESULT_EVENT)
+ .replace(VAR_VERSION, str(BLE_SCAN_RESULT_VERSION)),
+]
+
@pytest.fixture(autouse=True)
def mock_coap():
@@ -428,6 +473,9 @@ def _mock_rpc_device(version: str | None = None):
firmware_version="some fw string",
initialized=True,
connected=True,
+ script_getcode=AsyncMock(
+ side_effect=lambda script_id: {"data": MOCK_SCRIPTS[script_id - 1]}
+ ),
)
type(device).name = PropertyMock(return_value="Test name")
return device
diff --git a/tests/components/shelly/snapshots/test_binary_sensor.ambr b/tests/components/shelly/snapshots/test_binary_sensor.ambr
index 8dcb7b00a42..942bcaad8ab 100644
--- a/tests/components/shelly/snapshots/test_binary_sensor.ambr
+++ b/tests/components/shelly/snapshots/test_binary_sensor.ambr
@@ -46,3 +46,96 @@
'state': 'off',
})
# ---
+# name: test_rpc_flood_entities[binary_sensor.test_name_flood-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'binary_sensor',
+ 'entity_category': None,
+ 'entity_id': 'binary_sensor.test_name_flood',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'Test name flood',
+ 'platform': 'shelly',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '123456789ABC-flood:0-flood',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_rpc_flood_entities[binary_sensor.test_name_flood-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'device_class': 'moisture',
+ 'friendly_name': 'Test name flood',
+ }),
+ 'context': ,
+ 'entity_id': 'binary_sensor.test_name_flood',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'off',
+ })
+# ---
+# name: test_rpc_flood_entities[binary_sensor.test_name_mute-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': None,
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'binary_sensor',
+ 'entity_category': ,
+ 'entity_id': 'binary_sensor.test_name_mute',
+ 'has_entity_name': False,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'Test name mute',
+ 'platform': 'shelly',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': '123456789ABC-flood:0-mute',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_rpc_flood_entities[binary_sensor.test_name_mute-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'friendly_name': 'Test name mute',
+ }),
+ 'context': ,
+ 'entity_id': 'binary_sensor.test_name_mute',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'off',
+ })
+# ---
diff --git a/tests/components/shelly/snapshots/test_event.ambr b/tests/components/shelly/snapshots/test_event.ambr
new file mode 100644
index 00000000000..51129b7e249
--- /dev/null
+++ b/tests/components/shelly/snapshots/test_event.ambr
@@ -0,0 +1,69 @@
+# serializer version: 1
+# name: test_rpc_script_1_event[event.test_name_test_script_js-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'event_types': list([
+ 'input_event',
+ 'script_start',
+ ]),
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'event',
+ 'entity_category': None,
+ 'entity_id': 'event.test_name_test_script_js',
+ 'has_entity_name': True,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': None,
+ 'original_icon': None,
+ 'original_name': 'test_script.js',
+ 'platform': 'shelly',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': 'script',
+ 'unique_id': '123456789ABC-script:1',
+ 'unit_of_measurement': None,
+ })
+# ---
+# name: test_rpc_script_1_event[event.test_name_test_script_js-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'event_type': None,
+ 'event_types': list([
+ 'input_event',
+ 'script_start',
+ ]),
+ 'friendly_name': 'Test name test_script.js',
+ }),
+ 'context': ,
+ 'entity_id': 'event.test_name_test_script_js',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': 'unknown',
+ })
+# ---
+# name: test_rpc_script_2_event[event.test_name_test_script_2_js-entry]
+ None
+# ---
+# name: test_rpc_script_2_event[event.test_name_test_script_2_js-state]
+ None
+# ---
+# name: test_rpc_script_ble_event[event.test_name_aioshelly_ble_integration-entry]
+ None
+# ---
+# name: test_rpc_script_ble_event[event.test_name_aioshelly_ble_integration-state]
+ None
+# ---
diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py
index bff6d199d0e..7f2d07b1ccc 100644
--- a/tests/components/shelly/test_binary_sensor.py
+++ b/tests/components/shelly/test_binary_sensor.py
@@ -496,3 +496,22 @@ async def test_blu_trv_binary_sensor_entity(
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")
+
+
+async def test_rpc_flood_entities(
+ hass: HomeAssistant,
+ mock_rpc_device: Mock,
+ entity_registry: EntityRegistry,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test RPC flood sensor entities."""
+ await init_integration(hass, 4)
+
+ for entity in ("flood", "mute"):
+ entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_{entity}"
+
+ state = hass.states.get(entity_id)
+ assert state == snapshot(name=f"{entity_id}-state")
+
+ entry = entity_registry.async_get(entity_id)
+ assert entry == snapshot(name=f"{entity_id}-entry")
diff --git a/tests/components/shelly/test_event.py b/tests/components/shelly/test_event.py
index 2465b016808..e184c154697 100644
--- a/tests/components/shelly/test_event.py
+++ b/tests/components/shelly/test_event.py
@@ -2,9 +2,11 @@
from unittest.mock import Mock
+from aioshelly.ble.const import BLE_SCRIPT_NAME
from aioshelly.const import MODEL_I3
import pytest
from pytest_unordered import unordered
+from syrupy import SnapshotAssertion
from homeassistant.components.event import (
ATTR_EVENT_TYPE,
@@ -64,6 +66,99 @@ async def test_rpc_button(
assert state.attributes.get(ATTR_EVENT_TYPE) == "single_push"
+@pytest.mark.usefixtures("entity_registry_enabled_by_default")
+async def test_rpc_script_1_event(
+ hass: HomeAssistant,
+ mock_rpc_device: Mock,
+ entity_registry: EntityRegistry,
+ monkeypatch: pytest.MonkeyPatch,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test script event."""
+ await init_integration(hass, 2)
+ entity_id = "event.test_name_test_script_js"
+
+ state = hass.states.get(entity_id)
+ assert state == snapshot(name=f"{entity_id}-state")
+
+ entry = entity_registry.async_get(entity_id)
+ assert entry == snapshot(name=f"{entity_id}-entry")
+
+ inject_rpc_device_event(
+ monkeypatch,
+ mock_rpc_device,
+ {
+ "events": [
+ {
+ "component": "script:1",
+ "id": 1,
+ "event": "script_start",
+ "ts": 1668522399.2,
+ }
+ ],
+ "ts": 1668522399.2,
+ },
+ )
+ await hass.async_block_till_done()
+
+ state = hass.states.get(entity_id)
+ assert state.attributes.get(ATTR_EVENT_TYPE) == "script_start"
+
+ inject_rpc_device_event(
+ monkeypatch,
+ mock_rpc_device,
+ {
+ "events": [
+ {
+ "component": "script:1",
+ "id": 1,
+ "event": "unknown_event",
+ "ts": 1668522399.2,
+ }
+ ],
+ "ts": 1668522399.2,
+ },
+ )
+ await hass.async_block_till_done()
+
+ state = hass.states.get(entity_id)
+ assert state.attributes.get(ATTR_EVENT_TYPE) != "unknown_event"
+
+
+@pytest.mark.usefixtures("entity_registry_enabled_by_default")
+async def test_rpc_script_2_event(
+ hass: HomeAssistant,
+ entity_registry: EntityRegistry,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test that scripts without any emitEvent will not get an event entity."""
+ await init_integration(hass, 2)
+ entity_id = "event.test_name_test_script_2_js"
+
+ state = hass.states.get(entity_id)
+ assert state == snapshot(name=f"{entity_id}-state")
+
+ entry = entity_registry.async_get(entity_id)
+ assert entry == snapshot(name=f"{entity_id}-entry")
+
+
+@pytest.mark.usefixtures("entity_registry_enabled_by_default")
+async def test_rpc_script_ble_event(
+ hass: HomeAssistant,
+ entity_registry: EntityRegistry,
+ snapshot: SnapshotAssertion,
+) -> None:
+ """Test that the ble script will not get an event entity."""
+ await init_integration(hass, 2)
+ entity_id = f"event.test_name_{BLE_SCRIPT_NAME}"
+
+ state = hass.states.get(entity_id)
+ assert state == snapshot(name=f"{entity_id}-state")
+
+ entry = entity_registry.async_get(entity_id)
+ assert entry == snapshot(name=f"{entity_id}-entry")
+
+
async def test_rpc_event_removal(
hass: HomeAssistant,
mock_rpc_device: Mock,
diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py
index 665a55ba880..0b1bf24c19a 100644
--- a/tests/components/smlight/conftest.py
+++ b/tests/components/smlight/conftest.py
@@ -18,7 +18,8 @@ from tests.common import (
load_json_object_fixture,
)
-MOCK_HOST = "slzb-06.local"
+MOCK_DEVICE_NAME = "slzb-06"
+MOCK_HOST = "192.168.1.161"
MOCK_USERNAME = "test-user"
MOCK_PASSWORD = "test-pass"
@@ -91,7 +92,10 @@ def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]:
"""Return the firmware version."""
fw_list = []
if kwargs.get("mode") == "zigbee":
- fw_list = load_json_array_fixture("zb_firmware.json", DOMAIN)
+ if kwargs.get("zb_type") == 0:
+ fw_list = load_json_array_fixture("zb_firmware.json", DOMAIN)
+ else:
+ fw_list = load_json_array_fixture("zb_firmware_router.json", DOMAIN)
else:
fw_list = load_json_array_fixture("esp_firmware.json", DOMAIN)
diff --git a/tests/components/smlight/fixtures/esp_firmware.json b/tests/components/smlight/fixtures/esp_firmware.json
index 6ea0e1a8b44..f0ee9eb989a 100644
--- a/tests/components/smlight/fixtures/esp_firmware.json
+++ b/tests/components/smlight/fixtures/esp_firmware.json
@@ -2,10 +2,10 @@
{
"mode": "ESP",
"type": null,
- "notes": "CHANGELOG (Current 2.5.2 vs. Previous 2.3.6):\\r\\nFixed incorrect device type detection for some devices\\r\\nFixed web interface not working on some devices\\r\\nFixed disabled SSID/pass fields\\r\\n",
+ "notes": "CHANGELOG (Current 2.7.5 vs. Previous 2.3.6):\\r\\nFixed incorrect device type detection for some devices\\r\\nFixed web interface not working on some devices\\r\\nFixed disabled SSID/pass fields\\r\\n",
"rev": "20240830",
"link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-v2.5.2-ota.bin",
- "ver": "v2.5.2",
+ "ver": "v2.7.5",
"dev": false,
"prod": true,
"baud": null
diff --git a/tests/components/smlight/fixtures/info-2.3.6.json b/tests/components/smlight/fixtures/info-2.3.6.json
new file mode 100644
index 00000000000..e3defb4410e
--- /dev/null
+++ b/tests/components/smlight/fixtures/info-2.3.6.json
@@ -0,0 +1,19 @@
+{
+ "coord_mode": 0,
+ "device_ip": "192.168.1.161",
+ "fs_total": 3456,
+ "fw_channel": "dev",
+ "legacy_api": 0,
+ "hostname": "SLZB-06p7",
+ "MAC": "AA:BB:CC:DD:EE:FF",
+ "model": "SLZB-06p7",
+ "ram_total": 296,
+ "sw_version": "v2.3.6",
+ "wifi_mode": 0,
+ "zb_flash_size": 704,
+ "zb_channel": 0,
+ "zb_hw": "CC2652P7",
+ "zb_ram_size": 152,
+ "zb_version": "20240314",
+ "zb_type": 0
+}
diff --git a/tests/components/smlight/fixtures/info-MR1.json b/tests/components/smlight/fixtures/info-MR1.json
new file mode 100644
index 00000000000..df1c0b0f789
--- /dev/null
+++ b/tests/components/smlight/fixtures/info-MR1.json
@@ -0,0 +1,41 @@
+{
+ "coord_mode": 0,
+ "device_ip": "192.168.1.161",
+ "fs_total": 3456,
+ "fw_channel": "dev",
+ "legacy_api": 0,
+ "hostname": "SLZB-MR1",
+ "MAC": "AA:BB:CC:DD:EE:FF",
+ "model": "SLZB-MR1",
+ "ram_total": 296,
+ "sw_version": "v2.7.3",
+ "wifi_mode": 0,
+ "zb_flash_size": 704,
+ "zb_channel": 0,
+ "zb_hw": "CC2652P7",
+ "zb_ram_size": 152,
+ "zb_version": "20240314",
+ "zb_type": 0,
+ "radios": [
+ {
+ "chip_index": 0,
+ "zb_hw": "EFR32MG21",
+ "zb_version": 20241127,
+ "zb_type": 0,
+ "zb_channel": 0,
+ "zb_ram_size": 152,
+ "zb_flash_size": 704,
+ "radioModes": [true, true, true, false, false]
+ },
+ {
+ "chip_index": 1,
+ "zb_hw": "CC2652P7",
+ "zb_version": 20240314,
+ "zb_type": 1,
+ "zb_channel": 0,
+ "zb_ram_size": 152,
+ "zb_flash_size": 704,
+ "radioModes": [true, true, true, false, false]
+ }
+ ]
+}
diff --git a/tests/components/smlight/fixtures/zb_firmware.json b/tests/components/smlight/fixtures/zb_firmware.json
index ca9d10f87ac..b35bb20d64e 100644
--- a/tests/components/smlight/fixtures/zb_firmware.json
+++ b/tests/components/smlight/fixtures/zb_firmware.json
@@ -3,24 +3,13 @@
"mode": "ZB",
"type": 0,
"notes": "SMLIGHT latest Coordinator release for CC2674P10 chips [16-Jul-2024]:
- +20dB TRANSMIT POWER SUPPORT;
- SDK 7.41 based (latest);
",
- "rev": "20240716",
+ "rev": "20250201",
"link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp-SLZB-06P10-20240716.bin",
- "ver": "20240716",
+ "ver": "20250201",
"dev": false,
"prod": true,
"baud": 115200
},
- {
- "mode": "ZB",
- "type": 1,
- "notes": "SMLIGHT latest ROUTER release for CC2674P10 chips [16-Jul-2024]:
- SDK 7.41 based (latest);
Terms of use",
- "rev": "20240716",
- "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/zr-ZR_SLZB-06P10-20240716.bin",
- "ver": "20240716",
- "dev": false,
- "prod": true,
- "baud": 0
- },
{
"mode": "ZB",
"type": 0,
diff --git a/tests/components/smlight/fixtures/zb_firmware_router.json b/tests/components/smlight/fixtures/zb_firmware_router.json
new file mode 100644
index 00000000000..320fef89347
--- /dev/null
+++ b/tests/components/smlight/fixtures/zb_firmware_router.json
@@ -0,0 +1,13 @@
+[
+ {
+ "mode": "ZB",
+ "type": 1,
+ "notes": "SMLIGHT latest ROUTER release for CC2652P7 chips [16-Jul-2024]:
- SDK 7.41 based (latest);
Terms of use - by downloading and installing this firmware, you agree to the aforementioned terms.",
+ "rev": "20240716",
+ "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp-SLZB-06P10-20240716.bin",
+ "ver": "20240716",
+ "dev": false,
+ "prod": true,
+ "baud": 115200
+ }
+]
diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr
index 598166e537b..457a529065c 100644
--- a/tests/components/smlight/snapshots/test_init.ambr
+++ b/tests/components/smlight/snapshots/test_init.ambr
@@ -3,7 +3,7 @@
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': ,
- 'configuration_url': 'http://slzb-06.local',
+ 'configuration_url': 'http://192.168.1.161',
'connections': set({
tuple(
'mac',
diff --git a/tests/components/smlight/snapshots/test_update.ambr b/tests/components/smlight/snapshots/test_update.ambr
index ed0085dcdc8..8c6757d5b91 100644
--- a/tests/components/smlight/snapshots/test_update.ambr
+++ b/tests/components/smlight/snapshots/test_update.ambr
@@ -42,7 +42,7 @@
'friendly_name': 'Mock Title Core firmware',
'in_progress': False,
'installed_version': 'v2.3.6',
- 'latest_version': 'v2.5.2',
+ 'latest_version': 'v2.7.5',
'release_summary': None,
'release_url': None,
'skipped_version': None,
@@ -101,7 +101,7 @@
'friendly_name': 'Mock Title Zigbee firmware',
'in_progress': False,
'installed_version': '20240314',
- 'latest_version': '20240716',
+ 'latest_version': '20250201',
'release_summary': None,
'release_url': None,
'skipped_version': None,
diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py
index 4dad06b0fa3..a1c9c9d6945 100644
--- a/tests/components/smlight/test_config_flow.py
+++ b/tests/components/smlight/test_config_flow.py
@@ -8,19 +8,20 @@ from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError
import pytest
from homeassistant.components.smlight.const import DOMAIN
-from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
+from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
+from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
-from .conftest import MOCK_HOST, MOCK_PASSWORD, MOCK_USERNAME
+from .conftest import MOCK_DEVICE_NAME, MOCK_HOST, MOCK_PASSWORD, MOCK_USERNAME
from tests.common import MockConfigEntry
DISCOVERY_INFO = ZeroconfServiceInfo(
- ip_address=ip_address("127.0.0.1"),
- ip_addresses=[ip_address("127.0.0.1")],
+ ip_address=ip_address("192.168.1.161"),
+ ip_addresses=[ip_address("192.168.1.161")],
hostname="slzb-06.local.",
name="mock_name",
port=6638,
@@ -29,8 +30,8 @@ DISCOVERY_INFO = ZeroconfServiceInfo(
)
DISCOVERY_INFO_LEGACY = ZeroconfServiceInfo(
- ip_address=ip_address("127.0.0.1"),
- ip_addresses=[ip_address("127.0.0.1")],
+ ip_address=ip_address("192.168.1.161"),
+ ip_addresses=[ip_address("192.168.1.161")],
hostname="slzb-06.local.",
name="mock_name",
port=6638,
@@ -52,7 +53,7 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
- CONF_HOST: MOCK_HOST,
+ CONF_HOST: "slzb-06p7.local",
},
)
@@ -76,7 +77,7 @@ async def test_zeroconf_flow(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO
)
- assert result["description_placeholders"] == {"host": MOCK_HOST}
+ assert result["description_placeholders"] == {"host": MOCK_DEVICE_NAME}
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "confirm_discovery"
@@ -113,7 +114,7 @@ async def test_zeroconf_flow_auth(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO
)
- assert result["description_placeholders"] == {"host": MOCK_HOST}
+ assert result["description_placeholders"] == {"host": MOCK_DEVICE_NAME}
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "confirm_discovery"
@@ -167,7 +168,7 @@ async def test_zeroconf_unsupported_abort(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO
)
- assert result["description_placeholders"] == {"host": MOCK_HOST}
+ assert result["description_placeholders"] == {"host": MOCK_DEVICE_NAME}
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "confirm_discovery"
@@ -489,7 +490,7 @@ async def test_zeroconf_legacy_mac(
data=DISCOVERY_INFO_LEGACY,
)
- assert result["description_placeholders"] == {"host": MOCK_HOST}
+ assert result["description_placeholders"] == {"host": MOCK_DEVICE_NAME}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
@@ -507,6 +508,76 @@ async def test_zeroconf_legacy_mac(
assert len(mock_smlight_client.get_info.mock_calls) == 3
+@pytest.mark.usefixtures("mock_smlight_client")
+async def test_zeroconf_updates_host(
+ hass: HomeAssistant,
+ mock_setup_entry: AsyncMock,
+ mock_config_entry: MockConfigEntry,
+) -> None:
+ """Test zeroconf discovery updates host ip."""
+ mock_config_entry.add_to_hass(hass)
+
+ service_info = DISCOVERY_INFO
+ service_info.ip_address = ip_address("192.168.1.164")
+
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info
+ )
+
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "already_configured"
+
+ assert mock_config_entry.data[CONF_HOST] == "192.168.1.164"
+
+
+@pytest.mark.usefixtures("mock_smlight_client")
+async def test_dhcp_discovery_updates_host(
+ hass: HomeAssistant,
+ mock_setup_entry: AsyncMock,
+ mock_config_entry: MockConfigEntry,
+) -> None:
+ """Test dhcp discovery updates host ip."""
+ mock_config_entry.add_to_hass(hass)
+
+ service_info = DhcpServiceInfo(
+ ip="192.168.1.164",
+ hostname="slzb-06",
+ macaddress="aabbccddeeff",
+ )
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": SOURCE_DHCP}, data=service_info
+ )
+
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "already_configured"
+
+ assert mock_config_entry.data[CONF_HOST] == "192.168.1.164"
+
+
+@pytest.mark.usefixtures("mock_smlight_client")
+async def test_dhcp_discovery_aborts(
+ hass: HomeAssistant,
+ mock_setup_entry: AsyncMock,
+ mock_config_entry: MockConfigEntry,
+) -> None:
+ """Test dhcp discovery updates host ip."""
+ mock_config_entry.add_to_hass(hass)
+
+ service_info = DhcpServiceInfo(
+ ip="192.168.1.161",
+ hostname="slzb-06",
+ macaddress="000000000000",
+ )
+ result = await hass.config_entries.flow.async_init(
+ DOMAIN, context={"source": SOURCE_DHCP}, data=service_info
+ )
+
+ assert result["type"] is FlowResultType.ABORT
+ assert result["reason"] == "already_configured"
+
+ assert mock_config_entry.data[CONF_HOST] == "192.168.1.161"
+
+
async def test_reauth_flow(
hass: HomeAssistant,
mock_smlight_client: MagicMock,
diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py
index d0c5e494ae8..0acbab9f3a4 100644
--- a/tests/components/smlight/test_init.py
+++ b/tests/components/smlight/test_init.py
@@ -85,6 +85,7 @@ async def test_async_setup_no_internet(
freezer: FrozenDateTimeFactory,
) -> None:
"""Test we still load integration when no internet is available."""
+ side_effect = mock_smlight_client.get_firmware_version.side_effect
mock_smlight_client.get_firmware_version.side_effect = SmlightConnectionError
await setup_integration(hass, mock_config_entry_host)
@@ -101,7 +102,7 @@ async def test_async_setup_no_internet(
assert entity is not None
assert entity.state == STATE_UNKNOWN
- mock_smlight_client.get_firmware_version.side_effect = None
+ mock_smlight_client.get_firmware_version.side_effect = side_effect
freezer.tick(SCAN_FIRMWARE_INTERVAL)
async_fire_time_changed(hass)
diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py
index 4fca7369116..632f1b5f26b 100644
--- a/tests/components/smlight/test_update.py
+++ b/tests/components/smlight/test_update.py
@@ -4,13 +4,13 @@ from datetime import timedelta
from unittest.mock import MagicMock, patch
from freezegun.api import FrozenDateTimeFactory
-from pysmlight import Firmware, Info
+from pysmlight import Firmware, Info, Radio
from pysmlight.const import Events as SmEvents
from pysmlight.sse import MessageEvent
import pytest
from syrupy.assertion import SnapshotAssertion
-from homeassistant.components.smlight.const import SCAN_FIRMWARE_INTERVAL
+from homeassistant.components.smlight.const import DOMAIN, SCAN_FIRMWARE_INTERVAL
from homeassistant.components.update import (
ATTR_IN_PROGRESS,
ATTR_INSTALLED_VERSION,
@@ -27,7 +27,12 @@ from homeassistant.helpers import entity_registry as er
from . import get_mock_event_function
from .conftest import setup_integration
-from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
+from tests.common import (
+ MockConfigEntry,
+ async_fire_time_changed,
+ load_json_object_fixture,
+ snapshot_platform,
+)
from tests.typing import WebSocketGenerator
pytestmark = [
@@ -62,12 +67,14 @@ MOCK_FIRMWARE_FAIL = MessageEvent(
MOCK_FIRMWARE_NOTES = [
Firmware(
- ver="v2.3.6",
+ ver="v2.7.2",
mode="ESP",
notes=None,
)
]
+MOCK_RADIO = Radio(chip_index=1, zb_channel=0, zb_type=0, zb_version="20240716")
+
@pytest.fixture
def platforms() -> list[Platform]:
@@ -103,7 +110,7 @@ async def test_update_firmware(
state = hass.states.get(entity_id)
assert state.state == STATE_ON
assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
await hass.services.async_call(
PLATFORM,
@@ -126,7 +133,7 @@ async def test_update_firmware(
event_function(MOCK_FIRMWARE_DONE)
mock_smlight_client.get_info.return_value = Info(
- sw_version="v2.5.2",
+ sw_version="v2.7.5",
)
freezer.tick(timedelta(seconds=5))
@@ -135,8 +142,50 @@ async def test_update_firmware(
state = hass.states.get(entity_id)
assert state.state == STATE_OFF
- assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.7.5"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
+
+
+async def test_update_zigbee2_firmware(
+ hass: HomeAssistant,
+ freezer: FrozenDateTimeFactory,
+ mock_config_entry: MockConfigEntry,
+ mock_smlight_client: MagicMock,
+) -> None:
+ """Test update of zigbee2 firmware where available."""
+ mock_smlight_client.get_info.return_value = Info.from_dict(
+ load_json_object_fixture("info-MR1.json", DOMAIN)
+ )
+ await setup_integration(hass, mock_config_entry)
+ entity_id = "update.mock_title_zigbee_firmware_2"
+ state = hass.states.get(entity_id)
+ assert state.state == STATE_ON
+ assert state.attributes[ATTR_INSTALLED_VERSION] == "20240314"
+ assert state.attributes[ATTR_LATEST_VERSION] == "20240716"
+
+ await hass.services.async_call(
+ PLATFORM,
+ SERVICE_INSTALL,
+ {ATTR_ENTITY_ID: entity_id},
+ blocking=False,
+ )
+
+ assert len(mock_smlight_client.fw_update.mock_calls) == 1
+
+ event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done)
+
+ event_function(MOCK_FIRMWARE_DONE)
+ with patch(
+ "homeassistant.components.smlight.update.get_radio", return_value=MOCK_RADIO
+ ):
+ freezer.tick(timedelta(seconds=5))
+ async_fire_time_changed(hass)
+ await hass.async_block_till_done()
+
+ state = hass.states.get(entity_id)
+ assert state.state == STATE_OFF
+ assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716"
+ assert state.attributes[ATTR_LATEST_VERSION] == "20240716"
async def test_update_legacy_firmware_v2(
@@ -156,7 +205,7 @@ async def test_update_legacy_firmware_v2(
state = hass.states.get(entity_id)
assert state.state == STATE_ON
assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.0.18"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
await hass.services.async_call(
PLATFORM,
@@ -172,7 +221,7 @@ async def test_update_legacy_firmware_v2(
event_function(MOCK_FIRMWARE_DONE)
mock_smlight_client.get_info.return_value = Info(
- sw_version="v2.5.2",
+ sw_version="v2.7.5",
)
freezer.tick(SCAN_FIRMWARE_INTERVAL)
@@ -181,8 +230,8 @@ async def test_update_legacy_firmware_v2(
state = hass.states.get(entity_id)
assert state.state == STATE_OFF
- assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.7.5"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
async def test_update_firmware_failed(
@@ -196,7 +245,7 @@ async def test_update_firmware_failed(
state = hass.states.get(entity_id)
assert state.state == STATE_ON
assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
await hass.services.async_call(
PLATFORM,
@@ -233,7 +282,7 @@ async def test_update_reboot_timeout(
state = hass.states.get(entity_id)
assert state.state == STATE_ON
assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6"
- assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2"
+ assert state.attributes[ATTR_LATEST_VERSION] == "v2.7.5"
with (
patch(
@@ -267,18 +316,29 @@ async def test_update_reboot_timeout(
mock_warning.assert_called_once()
+@pytest.mark.parametrize(
+ "entity_id",
+ [
+ "update.mock_title_core_firmware",
+ "update.mock_title_zigbee_firmware",
+ "update.mock_title_zigbee_firmware_2",
+ ],
+)
async def test_update_release_notes(
hass: HomeAssistant,
+ entity_id: str,
freezer: FrozenDateTimeFactory,
mock_config_entry: MockConfigEntry,
mock_smlight_client: MagicMock,
hass_ws_client: WebSocketGenerator,
) -> None:
"""Test firmware release notes."""
+ mock_smlight_client.get_info.return_value = Info.from_dict(
+ load_json_object_fixture("info-MR1.json", DOMAIN)
+ )
await setup_integration(hass, mock_config_entry)
ws_client = await hass_ws_client(hass)
await hass.async_block_till_done()
- entity_id = "update.mock_title_core_firmware"
state = hass.states.get(entity_id)
assert state
@@ -294,16 +354,30 @@ async def test_update_release_notes(
result = await ws_client.receive_json()
assert result["result"] is not None
+
+async def test_update_blank_release_notes(
+ hass: HomeAssistant,
+ mock_config_entry: MockConfigEntry,
+ mock_smlight_client: MagicMock,
+ hass_ws_client: WebSocketGenerator,
+) -> None:
+ """Test firmware missing release notes."""
+
+ entity_id = "update.mock_title_core_firmware"
mock_smlight_client.get_firmware_version.side_effect = None
mock_smlight_client.get_firmware_version.return_value = MOCK_FIRMWARE_NOTES
- freezer.tick(SCAN_FIRMWARE_INTERVAL)
- async_fire_time_changed(hass)
+ await setup_integration(hass, mock_config_entry)
+ ws_client = await hass_ws_client(hass)
await hass.async_block_till_done()
+ state = hass.states.get(entity_id)
+ assert state
+ assert state.state == STATE_ON
+
await ws_client.send_json(
{
- "id": 2,
+ "id": 1,
"type": "update/release_notes",
"entity_id": entity_id,
}
diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py
index 9ecffd395a3..4d6794b962f 100644
--- a/tests/components/switchbot/__init__.py
+++ b/tests/components/switchbot/__init__.py
@@ -274,3 +274,23 @@ LEAK_SERVICE_INFO = BluetoothServiceInfoBleak(
connectable=False,
tx_power=-127,
)
+
+REMOTE_SERVICE_INFO = BluetoothServiceInfoBleak(
+ name="Any",
+ manufacturer_data={89: b"\xaa\xbb\xcc\xdd\xee\xff"},
+ service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"b V\x00"},
+ service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
+ address="AA:BB:CC:DD:EE:FF",
+ rssi=-60,
+ source="local",
+ advertisement=generate_advertisement_data(
+ local_name="Any",
+ manufacturer_data={89: b"\xaa\xbb\xcc\xdd\xee\xff"},
+ service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"b V\x00"},
+ service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
+ ),
+ device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Any"),
+ time=0,
+ connectable=False,
+ tx_power=-127,
+)
diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py
index acf1bacc054..6a7111a054e 100644
--- a/tests/components/switchbot/test_sensor.py
+++ b/tests/components/switchbot/test_sensor.py
@@ -23,6 +23,7 @@ from homeassistant.setup import async_setup_component
from . import (
LEAK_SERVICE_INFO,
+ REMOTE_SERVICE_INFO,
WOHAND_SERVICE_INFO,
WOMETERTHPC_SERVICE_INFO,
WORELAY_SWITCH_1PM_SERVICE_INFO,
@@ -194,3 +195,42 @@ async def test_leak_sensor(hass: HomeAssistant) -> None:
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
+
+
+@pytest.mark.usefixtures("entity_registry_enabled_by_default")
+async def test_remote(hass: HomeAssistant) -> None:
+ """Test setting up the remote sensor."""
+ await async_setup_component(hass, DOMAIN, {})
+ inject_bluetooth_service_info(hass, REMOTE_SERVICE_INFO)
+
+ entry = MockConfigEntry(
+ domain=DOMAIN,
+ data={
+ CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
+ CONF_NAME: "test-name",
+ CONF_SENSOR_TYPE: "remote",
+ },
+ unique_id="aabbccddeeff",
+ )
+ entry.add_to_hass(hass)
+
+ assert await hass.config_entries.async_setup(entry.entry_id)
+ await hass.async_block_till_done()
+
+ assert len(hass.states.async_all("sensor")) == 2
+
+ battery_sensor = hass.states.get("sensor.test_name_battery")
+ battery_sensor_attrs = battery_sensor.attributes
+ assert battery_sensor.state == "86"
+ assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery"
+ assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%"
+ assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement"
+
+ rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal")
+ rssi_sensor_attrs = rssi_sensor.attributes
+ assert rssi_sensor.state == "-60"
+ assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal"
+ assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm"
+
+ assert await hass.config_entries.async_unload(entry.entry_id)
+ await hass.async_block_till_done()
diff --git a/tests/components/switchbot_cloud/__init__.py b/tests/components/switchbot_cloud/__init__.py
index ce570499b3a..42fe3e4f543 100644
--- a/tests/components/switchbot_cloud/__init__.py
+++ b/tests/components/switchbot_cloud/__init__.py
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
-def configure_integration(hass: HomeAssistant) -> MockConfigEntry:
+async def configure_integration(hass: HomeAssistant) -> MockConfigEntry:
"""Configure the integration."""
config = {
CONF_API_TOKEN: "test-token",
@@ -17,5 +17,7 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry:
domain=DOMAIN, data=config, entry_id="123456", unique_id="123456"
)
entry.add_to_hass(hass)
+ await hass.config_entries.async_setup(entry.entry_id)
+ await hass.async_block_till_done()
return entry
diff --git a/tests/components/switchbot_cloud/fixtures/meter_status.json b/tests/components/switchbot_cloud/fixtures/meter_status.json
new file mode 100644
index 00000000000..8b5bcd0c031
--- /dev/null
+++ b/tests/components/switchbot_cloud/fixtures/meter_status.json
@@ -0,0 +1,9 @@
+{
+ "version": "V3.3",
+ "temperature": 21.8,
+ "battery": 100,
+ "humidity": 32,
+ "deviceId": "meter-id-1",
+ "deviceType": "Meter",
+ "hubDeviceId": "test-hub-id"
+}
diff --git a/tests/components/switchbot_cloud/snapshots/test_sensor.ambr b/tests/components/switchbot_cloud/snapshots/test_sensor.ambr
new file mode 100644
index 00000000000..a9b6fb20bfb
--- /dev/null
+++ b/tests/components/switchbot_cloud/snapshots/test_sensor.ambr
@@ -0,0 +1,307 @@
+# serializer version: 1
+# name: test_meter[sensor.meter_1_battery-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.meter_1_battery',
+ 'has_entity_name': True,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'Battery',
+ 'platform': 'switchbot_cloud',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': 'meter-id-1_battery',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_meter[sensor.meter_1_battery-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'device_class': 'battery',
+ 'friendly_name': 'meter-1 Battery',
+ 'state_class': ,
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.meter_1_battery',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '100',
+ })
+# ---
+# name: test_meter[sensor.meter_1_humidity-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.meter_1_humidity',
+ 'has_entity_name': True,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'Humidity',
+ 'platform': 'switchbot_cloud',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': 'meter-id-1_humidity',
+ 'unit_of_measurement': '%',
+ })
+# ---
+# name: test_meter[sensor.meter_1_humidity-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'device_class': 'humidity',
+ 'friendly_name': 'meter-1 Humidity',
+ 'state_class': ,
+ 'unit_of_measurement': '%',
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.meter_1_humidity',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '32',
+ })
+# ---
+# name: test_meter[sensor.meter_1_temperature-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class': ,
+ }),
+ 'config_entry_id': ,
+ 'device_class': None,
+ 'device_id': ,
+ 'disabled_by': None,
+ 'domain': 'sensor',
+ 'entity_category': None,
+ 'entity_id': 'sensor.meter_1_temperature',
+ 'has_entity_name': True,
+ 'hidden_by': None,
+ 'icon': None,
+ 'id': ,
+ 'labels': set({
+ }),
+ 'name': None,
+ 'options': dict({
+ }),
+ 'original_device_class': ,
+ 'original_icon': None,
+ 'original_name': 'Temperature',
+ 'platform': 'switchbot_cloud',
+ 'previous_unique_id': None,
+ 'supported_features': 0,
+ 'translation_key': None,
+ 'unique_id': 'meter-id-1_temperature',
+ 'unit_of_measurement': ,
+ })
+# ---
+# name: test_meter[sensor.meter_1_temperature-state]
+ StateSnapshot({
+ 'attributes': ReadOnlyDict({
+ 'device_class': 'temperature',
+ 'friendly_name': 'meter-1 Temperature',
+ 'state_class': ,
+ 'unit_of_measurement': ,
+ }),
+ 'context': ,
+ 'entity_id': 'sensor.meter_1_temperature',
+ 'last_changed': ,
+ 'last_reported': ,
+ 'last_updated': ,
+ 'state': '21.8',
+ })
+# ---
+# name: test_meter_no_coordinator_data[sensor.meter_1_battery-entry]
+ EntityRegistryEntrySnapshot({
+ 'aliases': set({
+ }),
+ 'area_id': None,
+ 'capabilities': dict({
+ 'state_class':