mirror of
https://github.com/home-assistant/core.git
synced 2025-08-01 09:38:21 +00:00
Merge branch 'dev' into aranet-threshold-level
This commit is contained in:
commit
c522db9138
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@v3.8.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@ -975,6 +975,7 @@ jobs:
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
@ -1098,6 +1099,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
--durations=10 \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
|
||||
tests/components/history \
|
||||
tests/components/logbook \
|
||||
@ -1228,6 +1230,7 @@ jobs:
|
||||
--durations=0 \
|
||||
--durations-min=10 \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
--dburl=postgresql://postgres:password@127.0.0.1/homeassistant-test \
|
||||
tests/components/history \
|
||||
tests/components/logbook \
|
||||
@ -1374,6 +1377,7 @@ jobs:
|
||||
--durations=0 \
|
||||
--durations-min=1 \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
tests/components/${{ matrix.group }} \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
|
@ -8,7 +8,7 @@ repos:
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
|
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@ -731,6 +731,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
/homeassistant/components/ios/ @robbiet480
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
@ -765,8 +767,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ituran/ @shmuelzon
|
||||
/homeassistant/components/izone/ @Swamp-Ig
|
||||
/tests/components/izone/ @Swamp-Ig
|
||||
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
|
||||
/tests/components/jellyfin/ @j-stienstra @ctalkington
|
||||
/homeassistant/components/jellyfin/ @RunC0deRun @ctalkington
|
||||
/tests/components/jellyfin/ @RunC0deRun @ctalkington
|
||||
/homeassistant/components/jewish_calendar/ @tsvi
|
||||
/tests/components/jewish_calendar/ @tsvi
|
||||
/homeassistant/components/juicenet/ @jesserockz
|
||||
|
2
Dockerfile
generated
2
Dockerfile
generated
@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.21
|
||||
RUN pip3 install uv==0.5.27
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -4,12 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from airgradient import AirGradientClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .coordinator import AirGradientConfigEntry, AirGradientCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
@ -21,9 +20,6 @@ PLATFORMS: list[Platform] = [
|
||||
]
|
||||
|
||||
|
||||
type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool:
|
||||
"""Set up Airgradient from a config entry."""
|
||||
|
||||
@ -31,7 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry)
|
||||
entry.data[CONF_HOST], session=async_get_clientsession(hass)
|
||||
)
|
||||
|
||||
coordinator = AirGradientCoordinator(hass, client)
|
||||
coordinator = AirGradientCoordinator(hass, entry, client)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
|
@ -4,18 +4,17 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from airgradient import AirGradientClient, AirGradientError, Config, Measures
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AirGradientConfigEntry
|
||||
type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -32,11 +31,17 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
config_entry: AirGradientConfigEntry
|
||||
_current_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirGradientConfigEntry,
|
||||
client: AirGradientClient,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"AirGradient {client.host}",
|
||||
update_interval=timedelta(minutes=1),
|
||||
)
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.3"]
|
||||
"requirements": ["aioairq==0.4.4"]
|
||||
}
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"requirements": ["python-homeassistant-analytics==0.9.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -39,7 +39,7 @@
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"cook": "Cooking",
|
||||
"low_water": "Low water",
|
||||
"ota": "Ota",
|
||||
"ota": "OTA update",
|
||||
"provisioning": "Provisioning",
|
||||
"high_temp": "High temperature",
|
||||
"device_failure": "Device failure"
|
||||
|
@ -272,6 +272,7 @@ class AnthropicConversationEntity(
|
||||
continue
|
||||
|
||||
tool_input = llm.ToolInput(
|
||||
id=tool_call.id,
|
||||
tool_name=tool_call.name,
|
||||
tool_args=cast(dict[str, Any], tool_call.input),
|
||||
)
|
||||
|
@ -134,7 +134,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
unique_id for said entry. When a new (zeroconf) service or device is
|
||||
discovered, the identifier is first used to look up if it belongs to an
|
||||
existing config entry. If that's the case, the unique_id from that entry is
|
||||
re-used, otherwise the newly discovered identifier is used instead.
|
||||
reused, otherwise the newly discovered identifier is used instead.
|
||||
"""
|
||||
assert self.atv
|
||||
all_identifiers = set(self.atv.all_identifiers)
|
||||
|
@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import stt
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.helpers import chat_session
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
@ -114,24 +115,25 @@ async def async_pipeline_from_audio_stream(
|
||||
|
||||
Raises PipelineNotFound if no pipeline is found.
|
||||
"""
|
||||
pipeline_input = PipelineInput(
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
pipeline=async_get_pipeline(hass, pipeline_id=pipeline_id),
|
||||
start_stage=start_stage,
|
||||
end_stage=end_stage,
|
||||
event_callback=event_callback,
|
||||
tts_audio_output=tts_audio_output,
|
||||
wake_word_settings=wake_word_settings,
|
||||
audio_settings=audio_settings or AudioSettings(),
|
||||
),
|
||||
)
|
||||
await pipeline_input.validate()
|
||||
await pipeline_input.execute()
|
||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||
pipeline_input = PipelineInput(
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
pipeline=async_get_pipeline(hass, pipeline_id=pipeline_id),
|
||||
start_stage=start_stage,
|
||||
end_stage=end_stage,
|
||||
event_callback=event_callback,
|
||||
tts_audio_output=tts_audio_output,
|
||||
wake_word_settings=wake_word_settings,
|
||||
audio_settings=audio_settings or AudioSettings(),
|
||||
),
|
||||
)
|
||||
await pipeline_input.validate()
|
||||
await pipeline_input.execute()
|
||||
|
@ -624,7 +624,7 @@ class PipelineRun:
|
||||
return
|
||||
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
||||
|
||||
def start(self, device_id: str | None) -> None:
|
||||
def start(self, conversation_id: str, device_id: str | None) -> None:
|
||||
"""Emit run start event."""
|
||||
self._device_id = device_id
|
||||
self._start_debug_recording_thread()
|
||||
@ -632,6 +632,7 @@ class PipelineRun:
|
||||
data = {
|
||||
"pipeline": self.pipeline.id,
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
@ -1015,7 +1016,7 @@ class PipelineRun:
|
||||
async def recognize_intent(
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
conversation_id: str,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
@ -1063,11 +1064,11 @@ class PipelineRun:
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
agent_id = user_input.agent_id
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
if not processed_locally:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
@ -1105,9 +1106,8 @@ class PipelineRun:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
chat_log.async_add_message(
|
||||
conversation.Content(
|
||||
role="assistant",
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.AssistantContent(
|
||||
agent_id=agent_id,
|
||||
content=speech,
|
||||
)
|
||||
@ -1409,12 +1409,15 @@ def _pipeline_debug_recording_thread_proc(
|
||||
wav_writer.close()
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(kw_only=True)
|
||||
class PipelineInput:
|
||||
"""Input to a pipeline run."""
|
||||
|
||||
run: PipelineRun
|
||||
|
||||
conversation_id: str
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
stt_metadata: stt.SpeechMetadata | None = None
|
||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||
|
||||
@ -1430,9 +1433,6 @@ class PipelineInput:
|
||||
tts_input: str | None = None
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
@ -1441,7 +1441,7 @@ class PipelineInput:
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(device_id=self.device_id)
|
||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||
|
@ -14,7 +14,11 @@ import voluptuous as vol
|
||||
from homeassistant.components import conversation, stt, tts, websocket_api
|
||||
from homeassistant.const import ATTR_DEVICE_ID, ATTR_SECONDS, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
chat_session,
|
||||
config_validation as cv,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .const import (
|
||||
@ -145,7 +149,6 @@ async def websocket_run(
|
||||
|
||||
# Arguments to PipelineInput
|
||||
input_args: dict[str, Any] = {
|
||||
"conversation_id": msg.get("conversation_id"),
|
||||
"device_id": msg.get("device_id"),
|
||||
}
|
||||
|
||||
@ -233,38 +236,42 @@ async def websocket_run(
|
||||
audio_settings=audio_settings or AudioSettings(),
|
||||
)
|
||||
|
||||
pipeline_input = PipelineInput(**input_args)
|
||||
with chat_session.async_get_chat_session(
|
||||
hass, msg.get("conversation_id")
|
||||
) as session:
|
||||
input_args["conversation_id"] = session.conversation_id
|
||||
pipeline_input = PipelineInput(**input_args)
|
||||
|
||||
try:
|
||||
await pipeline_input.validate()
|
||||
except PipelineError as error:
|
||||
# Report more specific error when possible
|
||||
connection.send_error(msg["id"], error.code, error.message)
|
||||
return
|
||||
try:
|
||||
await pipeline_input.validate()
|
||||
except PipelineError as error:
|
||||
# Report more specific error when possible
|
||||
connection.send_error(msg["id"], error.code, error.message)
|
||||
return
|
||||
|
||||
# Confirm subscription
|
||||
connection.send_result(msg["id"])
|
||||
# Confirm subscription
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
run_task = hass.async_create_task(pipeline_input.execute())
|
||||
run_task = hass.async_create_task(pipeline_input.execute())
|
||||
|
||||
# Cancel pipeline if user unsubscribes
|
||||
connection.subscriptions[msg["id"]] = run_task.cancel
|
||||
# Cancel pipeline if user unsubscribes
|
||||
connection.subscriptions[msg["id"]] = run_task.cancel
|
||||
|
||||
try:
|
||||
# Task contains a timeout
|
||||
async with asyncio.timeout(timeout):
|
||||
await run_task
|
||||
except TimeoutError:
|
||||
pipeline_input.run.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.ERROR,
|
||||
{"code": "timeout", "message": "Timeout running pipeline"},
|
||||
try:
|
||||
# Task contains a timeout
|
||||
async with asyncio.timeout(timeout):
|
||||
await run_task
|
||||
except TimeoutError:
|
||||
pipeline_input.run.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.ERROR,
|
||||
{"code": "timeout", "message": "Timeout running pipeline"},
|
||||
)
|
||||
)
|
||||
)
|
||||
finally:
|
||||
if unregister_handler is not None:
|
||||
# Unregister binary handler
|
||||
unregister_handler()
|
||||
finally:
|
||||
if unregister_handler is not None:
|
||||
# Unregister binary handler
|
||||
unregister_handler()
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -8,7 +8,7 @@ from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Final, Literal, final
|
||||
from typing import Any, Literal, final
|
||||
|
||||
from homeassistant.components import conversation, media_source, stt, tts
|
||||
from homeassistant.components.assist_pipeline import (
|
||||
@ -28,14 +28,12 @@ from homeassistant.components.tts import (
|
||||
)
|
||||
from homeassistant.core import Context, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity
|
||||
from homeassistant.helpers import chat_session, entity
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
from .const import AssistSatelliteEntityFeature
|
||||
from .errors import AssistSatelliteError, SatelliteBusyError
|
||||
|
||||
_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -114,7 +112,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
_attr_vad_sensitivity_entity_id: str | None = None
|
||||
|
||||
_conversation_id: str | None = None
|
||||
_conversation_id_time: float | None = None
|
||||
|
||||
_run_has_tts: bool = False
|
||||
_is_announcing = False
|
||||
@ -260,8 +257,27 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
else:
|
||||
self._extra_system_prompt = start_message or None
|
||||
|
||||
with (
|
||||
# Not passing in a conversation ID will force a new one to be created
|
||||
chat_session.async_get_chat_session(self.hass) as session,
|
||||
conversation.async_get_chat_log(self.hass, session) as chat_log,
|
||||
):
|
||||
self._conversation_id = session.conversation_id
|
||||
|
||||
if start_message:
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.AssistantContent(
|
||||
agent_id=self.entity_id, content=start_message
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
await self.async_start_conversation(announcement)
|
||||
except Exception:
|
||||
# Clear prompt on error
|
||||
self._conversation_id = None
|
||||
self._extra_system_prompt = None
|
||||
raise
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
|
||||
@ -325,51 +341,52 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
assert self._context is not None
|
||||
|
||||
# Reset conversation id if necessary
|
||||
if self._conversation_id_time and (
|
||||
(time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC
|
||||
):
|
||||
self._conversation_id = None
|
||||
self._conversation_id_time = None
|
||||
|
||||
# Set entity state based on pipeline events
|
||||
self._run_has_tts = False
|
||||
|
||||
assert self.platform.config_entry is not None
|
||||
self._pipeline_task = self.platform.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
async_pipeline_from_audio_stream(
|
||||
self.hass,
|
||||
context=self._context,
|
||||
event_callback=self._internal_on_pipeline_event,
|
||||
stt_metadata=stt.SpeechMetadata(
|
||||
language="", # set in async_pipeline_from_audio_stream
|
||||
format=stt.AudioFormats.WAV,
|
||||
codec=stt.AudioCodecs.PCM,
|
||||
bit_rate=stt.AudioBitRates.BITRATE_16,
|
||||
sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
|
||||
channel=stt.AudioChannels.CHANNEL_MONO,
|
||||
),
|
||||
stt_stream=audio_stream,
|
||||
pipeline_id=self._resolve_pipeline(),
|
||||
conversation_id=self._conversation_id,
|
||||
device_id=device_id,
|
||||
tts_audio_output=self.tts_options,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
audio_settings=AudioSettings(
|
||||
silence_seconds=self._resolve_vad_sensitivity()
|
||||
),
|
||||
start_stage=start_stage,
|
||||
end_stage=end_stage,
|
||||
conversation_extra_system_prompt=extra_system_prompt,
|
||||
),
|
||||
f"{self.entity_id}_pipeline",
|
||||
)
|
||||
|
||||
try:
|
||||
await self._pipeline_task
|
||||
finally:
|
||||
self._pipeline_task = None
|
||||
with chat_session.async_get_chat_session(
|
||||
self.hass, self._conversation_id
|
||||
) as session:
|
||||
# Store the conversation ID. If it is no longer valid, get_chat_session will reset it
|
||||
self._conversation_id = session.conversation_id
|
||||
self._pipeline_task = (
|
||||
self.platform.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
async_pipeline_from_audio_stream(
|
||||
self.hass,
|
||||
context=self._context,
|
||||
event_callback=self._internal_on_pipeline_event,
|
||||
stt_metadata=stt.SpeechMetadata(
|
||||
language="", # set in async_pipeline_from_audio_stream
|
||||
format=stt.AudioFormats.WAV,
|
||||
codec=stt.AudioCodecs.PCM,
|
||||
bit_rate=stt.AudioBitRates.BITRATE_16,
|
||||
sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
|
||||
channel=stt.AudioChannels.CHANNEL_MONO,
|
||||
),
|
||||
stt_stream=audio_stream,
|
||||
pipeline_id=self._resolve_pipeline(),
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
tts_audio_output=self.tts_options,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
audio_settings=AudioSettings(
|
||||
silence_seconds=self._resolve_vad_sensitivity()
|
||||
),
|
||||
start_stage=start_stage,
|
||||
end_stage=end_stage,
|
||||
conversation_extra_system_prompt=extra_system_prompt,
|
||||
),
|
||||
f"{self.entity_id}_pipeline",
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
await self._pipeline_task
|
||||
finally:
|
||||
self._pipeline_task = None
|
||||
|
||||
async def _cancel_running_pipeline(self) -> None:
|
||||
"""Cancel the current pipeline if it's running."""
|
||||
@ -393,11 +410,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self._set_state(AssistSatelliteState.LISTENING)
|
||||
elif event.type is PipelineEventType.INTENT_START:
|
||||
self._set_state(AssistSatelliteState.PROCESSING)
|
||||
elif event.type is PipelineEventType.INTENT_END:
|
||||
assert event.data is not None
|
||||
# Update timeout
|
||||
self._conversation_id_time = time.monotonic()
|
||||
self._conversation_id = event.data["intent_output"]["conversation_id"]
|
||||
elif event.type is PipelineEventType.TTS_START:
|
||||
# Wait until tts_response_finished is called to return to waiting state
|
||||
self._run_has_tts = True
|
||||
|
@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
|
||||
}
|
||||
|
@ -26,15 +26,18 @@ from .manager import (
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .models import AddonInfo, AgentBackup, BackupNotFound, Folder
|
||||
from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
@ -45,10 +48,13 @@ __all__ = [
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupManagerError",
|
||||
"BackupNotFound",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"CreateBackupStage",
|
||||
"CreateBackupState",
|
||||
"Folder",
|
||||
"IdleEvent",
|
||||
"IncorrectPasswordError",
|
||||
@ -56,6 +62,7 @@ __all__ = [
|
||||
"ManagerBackup",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
|
@ -11,13 +11,7 @@ from propcache.api import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .models import AgentBackup, BackupError
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
from .models import AgentBackup, BackupAgentError
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
@ -27,12 +21,6 @@ class BackupAgentUnreachableError(BackupAgentError):
|
||||
_message = "The backup agent is unreachable."
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
error_code = "backup_not_found"
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Backup agent interface."""
|
||||
|
||||
|
@ -11,9 +11,9 @@ from typing import Any
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, BackupNotFound, LocalBackupAgent
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
from .util import read_backup, suggested_filename
|
||||
|
||||
|
||||
|
@ -21,6 +21,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@ -69,13 +70,16 @@ class DownloadBackupView(HomeAssistantView):
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
try:
|
||||
if not password or not backup.protected:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
async def _send_backup_no_password(
|
||||
self,
|
||||
|
@ -9,6 +9,7 @@ from dataclasses import dataclass, replace
|
||||
from enum import StrEnum
|
||||
import hashlib
|
||||
import io
|
||||
from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
@ -50,7 +51,14 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, BackupError, BackupManagerError, BaseBackup, Folder
|
||||
from .models import (
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
@ -274,12 +282,6 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Get restore events after core restart."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
@ -826,7 +828,7 @@ class BackupManager:
|
||||
password=None,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors, [])
|
||||
return written_backup.backup.backup_id
|
||||
|
||||
async def async_create_backup(
|
||||
@ -950,12 +952,23 @@ class BackupManager:
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
unavailable_agents = [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
]
|
||||
if not (
|
||||
available_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id in self.backup_agents
|
||||
]
|
||||
):
|
||||
raise BackupManagerError(
|
||||
f"At least one available backup agent must be selected, got {agent_ids}"
|
||||
)
|
||||
if unavailable_agents:
|
||||
LOGGER.warning(
|
||||
"Backup agents %s are not available, will backupp to %s",
|
||||
unavailable_agents,
|
||||
available_agents,
|
||||
)
|
||||
if include_all_addons and include_addons:
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
@ -972,7 +985,7 @@ class BackupManager:
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
backup_name=backup_name,
|
||||
extra_metadata=extra_metadata
|
||||
| {
|
||||
@ -991,7 +1004,9 @@ class BackupManager:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings, password),
|
||||
self._async_finish_backup(
|
||||
available_agents, unavailable_agents, with_automatic_settings, password
|
||||
),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
@ -1008,7 +1023,11 @@ class BackupManager:
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool, password: str | None
|
||||
self,
|
||||
available_agents: list[str],
|
||||
unavailable_agents: list[str],
|
||||
with_automatic_settings: bool,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
@ -1027,7 +1046,7 @@ class BackupManager:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
written_backup.backup.backup_id,
|
||||
agent_ids,
|
||||
available_agents,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
@ -1040,13 +1059,15 @@ class BackupManager:
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
agent_ids=available_agents,
|
||||
open_stream=written_backup.open_stream,
|
||||
password=password,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(
|
||||
written_backup.backup, agent_errors, unavailable_agents
|
||||
)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
@ -1055,7 +1076,7 @@ class BackupManager:
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
@ -1215,10 +1236,10 @@ class BackupManager:
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self, agent_errors: dict[str, Exception]
|
||||
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
if not agent_errors:
|
||||
if not agent_errors and not unavailable_agents:
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
ir.async_create_issue(
|
||||
@ -1232,7 +1253,13 @@ class BackupManager:
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={
|
||||
"failed_agents": ", ".join(
|
||||
self.backup_agents[agent_id].name for agent_id in agent_errors
|
||||
chain(
|
||||
(
|
||||
self.backup_agents[agent_id].name
|
||||
for agent_id in agent_errors
|
||||
),
|
||||
unavailable_agents,
|
||||
)
|
||||
)
|
||||
},
|
||||
)
|
||||
@ -1301,11 +1328,12 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_agent_ids=list(agent_errors),
|
||||
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@ -1411,7 +1439,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
manager = self._hass.data[DATA_MANAGER]
|
||||
|
||||
agent_config = manager.config.data.agents.get(self._local_agent_id)
|
||||
if agent_config and not agent_config.protected:
|
||||
if (
|
||||
self._local_agent_id in agent_ids
|
||||
and agent_config
|
||||
and not agent_config.protected
|
||||
):
|
||||
password = None
|
||||
|
||||
backup = AgentBackup(
|
||||
|
@ -41,12 +41,6 @@ class BaseBackup:
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup(BaseBackup):
|
||||
@ -83,7 +77,25 @@ class BackupError(HomeAssistantError):
|
||||
error_code = "unknown"
|
||||
|
||||
|
||||
class BackupAgentError(BackupError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
error_code = "backup_agent_error"
|
||||
|
||||
|
||||
class BackupManagerError(BackupError):
|
||||
"""Backup manager error."""
|
||||
|
||||
error_code = "backup_manager_error"
|
||||
|
||||
|
||||
class BackupReaderWriterError(BackupError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError, BackupManagerError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
error_code = "backup_not_found"
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from concurrent.futures import CancelledError, Future
|
||||
import copy
|
||||
from dataclasses import dataclass, replace
|
||||
from io import BytesIO
|
||||
@ -12,6 +13,7 @@ import os
|
||||
from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
import threading
|
||||
from typing import IO, Any, Self, cast
|
||||
|
||||
import aiohttp
|
||||
@ -22,7 +24,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
from homeassistant.util.thread import ThreadWithException
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
@ -121,7 +122,7 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
def suggested_filename_from_name_date(name: str, date_str: str) -> str:
|
||||
"""Suggest a filename for the backup."""
|
||||
date = dt_util.parse_datetime(date_str, raise_on_error=True)
|
||||
return "_".join(f"{name} - {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
|
||||
return "_".join(f"{name} {date.strftime('%Y-%m-%d %H.%M %S%f')}.tar".split())
|
||||
|
||||
|
||||
def suggested_filename(backup: AgentBackup) -> str:
|
||||
@ -167,23 +168,38 @@ class AsyncIteratorReader:
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._next_future: Future[bytes | None] | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the reader."""
|
||||
self._aborted = True
|
||||
if self._next_future is not None:
|
||||
self._next_future.cancel()
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
||||
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
).result()
|
||||
)
|
||||
if self._aborted:
|
||||
self._next_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._buffer = self._next_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
@ -205,9 +221,11 @@ class AsyncIteratorWriter:
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._pos: int = 0
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
self._write_future: Future[bytes | None] | None = None
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
@ -219,13 +237,28 @@ class AsyncIteratorWriter:
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the writer."""
|
||||
self._aborted = True
|
||||
if self._write_future is not None:
|
||||
self._write_future.cancel()
|
||||
|
||||
def tell(self) -> int:
|
||||
"""Return the current position in the iterator."""
|
||||
return self._pos
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
||||
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||
self._queue.put(s), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._write_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._write_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos += len(s)
|
||||
return len(s)
|
||||
|
||||
@ -415,7 +448,9 @@ def _encrypt_backup(
|
||||
class _CipherWorkerStatus:
|
||||
done: asyncio.Event
|
||||
error: Exception | None = None
|
||||
thread: ThreadWithException
|
||||
reader: AsyncIteratorReader
|
||||
thread: threading.Thread
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
@ -468,11 +503,13 @@ class _CipherBackupStreamer:
|
||||
stream = await self._open_stream()
|
||||
reader = AsyncIteratorReader(self._hass, stream)
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = ThreadWithException(
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(done=asyncio.Event(), thread=worker)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
)
|
||||
self._workers.append(worker_status)
|
||||
worker.start()
|
||||
return writer
|
||||
@ -480,9 +517,8 @@ class _CipherBackupStreamer:
|
||||
async def wait(self) -> None:
|
||||
"""Wait for the worker threads to finish."""
|
||||
for worker in self._workers:
|
||||
if not worker.thread.is_alive():
|
||||
continue
|
||||
worker.thread.raise_exc(AbortCipher)
|
||||
worker.reader.abort()
|
||||
worker.writer.abort()
|
||||
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
|
||||
|
||||
|
||||
|
@ -15,7 +15,7 @@ from .manager import (
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import Folder
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@callback
|
||||
@ -57,7 +57,7 @@ async def handle_info(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"backups": list(backups.values()),
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"last_non_idle_event": manager.last_non_idle_event,
|
||||
@ -91,7 +91,7 @@ async def handle_details(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
|
||||
@ -151,6 +151,8 @@ async def handle_restore(
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
@ -179,6 +181,8 @@ async def handle_can_decrypt_on_download(
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
)
|
||||
except BackupNotFound:
|
||||
connection.send_error(msg["id"], "backup_not_found", "Backup not found")
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
except DecryptOnDowloadNotSupported:
|
||||
|
@ -19,6 +19,8 @@ from .const import (
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -1,8 +1,6 @@
|
||||
"""The bluesound component."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyblu import Player, SyncStatus
|
||||
from pyblu import Player
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -14,7 +12,11 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .coordinator import (
|
||||
BluesoundConfigEntry,
|
||||
BluesoundCoordinator,
|
||||
BluesoundRuntimeData,
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@ -23,18 +25,6 @@ PLATFORMS = [
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundRuntimeData:
|
||||
"""Bluesound data class."""
|
||||
|
||||
player: Player
|
||||
sync_status: SyncStatus
|
||||
coordinator: BluesoundCoordinator
|
||||
|
||||
|
||||
type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
return True
|
||||
@ -53,7 +43,7 @@ async def async_setup_entry(
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
|
||||
coordinator = BluesoundCoordinator(hass, player, sync_status)
|
||||
coordinator = BluesoundCoordinator(hass, config_entry, player, sync_status)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status, coordinator)
|
||||
|
@ -12,6 +12,7 @@ import logging
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
@ -21,6 +22,15 @@ NODE_OFFLINE_CHECK_TIMEOUT = timedelta(minutes=3)
|
||||
PRESET_AND_INPUTS_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundRuntimeData:
|
||||
"""Bluesound data class."""
|
||||
|
||||
player: Player
|
||||
sync_status: SyncStatus
|
||||
coordinator: BluesoundCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundData:
|
||||
"""Define a class to hold Bluesound data."""
|
||||
@ -31,6 +41,9 @@ class BluesoundData:
|
||||
inputs: list[Input]
|
||||
|
||||
|
||||
type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
|
||||
|
||||
def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]:
|
||||
"""Cancel a task."""
|
||||
|
||||
@ -45,8 +58,14 @@ def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]
|
||||
class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
|
||||
"""Define an object to hold Bluesound data."""
|
||||
|
||||
config_entry: BluesoundConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, player: Player, sync_status: SyncStatus
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: BluesoundConfigEntry,
|
||||
player: Player,
|
||||
sync_status: SyncStatus,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.player = player
|
||||
@ -55,12 +74,11 @@ class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=sync_status.name,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
assert self.config_entry is not None
|
||||
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
status = await self.player.status()
|
||||
|
@ -28,7 +28,7 @@
|
||||
"services": {
|
||||
"join": {
|
||||
"name": "Join",
|
||||
"description": "Group player together.",
|
||||
"description": "Groups players together under a single master speaker.",
|
||||
"fields": {
|
||||
"master": {
|
||||
"name": "Master",
|
||||
@ -36,23 +36,23 @@
|
||||
},
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Name of entity that will coordinate the grouping. Platform dependent."
|
||||
"description": "Name of entity that will group to master speaker. Platform dependent."
|
||||
}
|
||||
}
|
||||
},
|
||||
"unjoin": {
|
||||
"name": "Unjoin",
|
||||
"description": "Unjoin the player from a group.",
|
||||
"description": "Separates a player from a group.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Name of entity that will be unjoined from their group. Platform dependent."
|
||||
"description": "Name of entity that will be separated from their group. Platform dependent."
|
||||
}
|
||||
}
|
||||
},
|
||||
"set_sleep_timer": {
|
||||
"name": "Set sleep timer",
|
||||
"description": "Set a Bluesound timer. It will increase timer in steps: 15, 30, 45, 60, 90, 0.",
|
||||
"description": "Sets a Bluesound timer that will turn off the speaker. It will increase in steps: 15, 30, 45, 60, 90, 0.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
@ -62,7 +62,7 @@
|
||||
},
|
||||
"clear_sleep_timer": {
|
||||
"name": "Clear sleep timer",
|
||||
"description": "Clear a Bluesound timer.",
|
||||
"description": "Clears a Bluesound timer.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import datetime
|
||||
import logging
|
||||
import platform
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import (
|
||||
@ -302,7 +302,6 @@ async def async_update_device(
|
||||
entry: ConfigEntry,
|
||||
adapter: str,
|
||||
details: AdapterDetails,
|
||||
via_device_domain: str | None = None,
|
||||
via_device_id: str | None = None,
|
||||
) -> None:
|
||||
"""Update device registry entry.
|
||||
@ -322,10 +321,11 @@ async def async_update_device(
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
hw_version=details.get(ADAPTER_HW_VERSION),
|
||||
)
|
||||
if via_device_id:
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, via_device_id=via_device_id
|
||||
)
|
||||
if via_device_id and (via_device_entry := device_registry.async_get(via_device_id)):
|
||||
kwargs: dict[str, Any] = {"via_device_id": via_device_id}
|
||||
if not device_entry.area_id and via_device_entry.area_id:
|
||||
kwargs["area_id"] = via_device_entry.area_id
|
||||
device_registry.async_update_device(device_entry.id, **kwargs)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@ -360,7 +360,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
source_entry.title,
|
||||
details,
|
||||
source_domain,
|
||||
entry.data.get(CONF_SOURCE_DEVICE_ID),
|
||||
)
|
||||
return True
|
||||
|
@ -140,7 +140,7 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=adapter_title(adapter, details), data={}
|
||||
)
|
||||
|
||||
configured_addresses = self._async_current_ids()
|
||||
configured_addresses = self._async_current_ids(include_ignore=False)
|
||||
bluetooth_adapters = get_adapters()
|
||||
await bluetooth_adapters.refresh()
|
||||
self._adapters = bluetooth_adapters.adapters
|
||||
@ -155,12 +155,8 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
and not (system == "Linux" and details[ADAPTER_ADDRESS] == DEFAULT_ADDRESS)
|
||||
]
|
||||
if not unconfigured_adapters:
|
||||
ignored_adapters = len(
|
||||
self._async_current_entries(include_ignore=True)
|
||||
) - len(self._async_current_entries(include_ignore=False))
|
||||
return self.async_abort(
|
||||
reason="no_adapters",
|
||||
description_placeholders={"ignored_adapters": str(ignored_adapters)},
|
||||
)
|
||||
if len(unconfigured_adapters) == 1:
|
||||
self._adapter = list(self._adapters)[0]
|
||||
|
@ -16,11 +16,11 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.8.0",
|
||||
"bluetooth-adapters==0.21.1",
|
||||
"bleak-retry-connector==3.8.1",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.23.3",
|
||||
"dbus-fast==2.31.0",
|
||||
"habluetooth==3.21.0"
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.21.1"
|
||||
]
|
||||
}
|
||||
|
@ -23,7 +23,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found. There are {ignored_adapters} ignored adapters."
|
||||
"no_adapters": "No unconfigured Bluetooth adapters found."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.12.3"]
|
||||
"requirements": ["bthome-ble==3.12.4"]
|
||||
}
|
||||
|
@ -67,6 +67,11 @@ SENSOR_DESCRIPTIONS = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
# Channel (-)
|
||||
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Conductivity (µS/cm)
|
||||
(
|
||||
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
||||
|
@ -29,6 +29,7 @@ from homeassistant.components.google_assistant import helpers as google_helpers
|
||||
from homeassistant.components.homeassistant import exposed_entities
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.system_health import get_info as get_system_health_info
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -107,6 +108,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
hass.http.register_view(CloudRegisterView)
|
||||
hass.http.register_view(CloudResendConfirmView)
|
||||
hass.http.register_view(CloudForgotPasswordView)
|
||||
hass.http.register_view(DownloadSupportPackageView)
|
||||
|
||||
_CLOUD_ERRORS.update(
|
||||
{
|
||||
@ -389,6 +391,59 @@ class CloudForgotPasswordView(HomeAssistantView):
|
||||
return self.json_message("ok")
|
||||
|
||||
|
||||
class DownloadSupportPackageView(HomeAssistantView):
|
||||
"""Download support package view."""
|
||||
|
||||
url = "/api/cloud/support_package"
|
||||
name = "api:cloud:support_package"
|
||||
|
||||
def _generate_markdown(
|
||||
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
|
||||
) -> str:
|
||||
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
|
||||
if len(domain_info) == 0:
|
||||
return "No information available\n"
|
||||
|
||||
markdown = ""
|
||||
first = True
|
||||
for key, value in domain_info.items():
|
||||
markdown += f"{key} | {value}\n"
|
||||
if first:
|
||||
markdown += "--- | ---\n"
|
||||
first = False
|
||||
return markdown + "\n"
|
||||
|
||||
markdown = "## System Information\n\n"
|
||||
markdown += get_domain_table_markdown(hass_info)
|
||||
|
||||
for domain, domain_info in domains_info.items():
|
||||
domain_info_md = get_domain_table_markdown(domain_info)
|
||||
markdown += (
|
||||
f"<details><summary>{domain}</summary>\n\n"
|
||||
f"{domain_info_md}"
|
||||
"</details>\n\n"
|
||||
)
|
||||
|
||||
return markdown
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Download support package file."""
|
||||
|
||||
hass = request.app[KEY_HASS]
|
||||
domain_health = await get_system_health_info(hass)
|
||||
|
||||
hass_info = domain_health.pop("homeassistant", {})
|
||||
markdown = self._generate_markdown(hass_info, domain_health)
|
||||
|
||||
return web.Response(
|
||||
body=markdown,
|
||||
content_type="text/markdown",
|
||||
headers={
|
||||
"Content-Disposition": 'attachment; filename="support_package.md"'
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "cloud/remove_data"})
|
||||
@websocket_api.async_response
|
||||
|
@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.88.1"],
|
||||
"requirements": ["hass-nabucasa==0.89.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -38,6 +38,156 @@ ATTR_GENDER = "gender"
|
||||
DEPRECATED_VOICES = {"XiaoxuanNeural": "XiaozhenNeural"}
|
||||
SUPPORT_LANGUAGES = list(TTS_VOICES)
|
||||
|
||||
DEFAULT_VOICES = {
|
||||
"af-ZA": "AdriNeural",
|
||||
"am-ET": "MekdesNeural",
|
||||
"ar-AE": "FatimaNeural",
|
||||
"ar-BH": "LailaNeural",
|
||||
"ar-DZ": "AminaNeural",
|
||||
"ar-EG": "SalmaNeural",
|
||||
"ar-IQ": "RanaNeural",
|
||||
"ar-JO": "SanaNeural",
|
||||
"ar-KW": "NouraNeural",
|
||||
"ar-LB": "LaylaNeural",
|
||||
"ar-LY": "ImanNeural",
|
||||
"ar-MA": "MounaNeural",
|
||||
"ar-OM": "AbdullahNeural",
|
||||
"ar-QA": "AmalNeural",
|
||||
"ar-SA": "ZariyahNeural",
|
||||
"ar-SY": "AmanyNeural",
|
||||
"ar-TN": "ReemNeural",
|
||||
"ar-YE": "MaryamNeural",
|
||||
"az-AZ": "BabekNeural",
|
||||
"bg-BG": "KalinaNeural",
|
||||
"bn-BD": "NabanitaNeural",
|
||||
"bn-IN": "TanishaaNeural",
|
||||
"bs-BA": "GoranNeural",
|
||||
"ca-ES": "JoanaNeural",
|
||||
"cs-CZ": "VlastaNeural",
|
||||
"cy-GB": "NiaNeural",
|
||||
"da-DK": "ChristelNeural",
|
||||
"de-AT": "IngridNeural",
|
||||
"de-CH": "LeniNeural",
|
||||
"de-DE": "KatjaNeural",
|
||||
"el-GR": "AthinaNeural",
|
||||
"en-AU": "NatashaNeural",
|
||||
"en-CA": "ClaraNeural",
|
||||
"en-GB": "LibbyNeural",
|
||||
"en-HK": "YanNeural",
|
||||
"en-IE": "EmilyNeural",
|
||||
"en-IN": "NeerjaNeural",
|
||||
"en-KE": "AsiliaNeural",
|
||||
"en-NG": "EzinneNeural",
|
||||
"en-NZ": "MollyNeural",
|
||||
"en-PH": "RosaNeural",
|
||||
"en-SG": "LunaNeural",
|
||||
"en-TZ": "ImaniNeural",
|
||||
"en-US": "JennyNeural",
|
||||
"en-ZA": "LeahNeural",
|
||||
"es-AR": "ElenaNeural",
|
||||
"es-BO": "SofiaNeural",
|
||||
"es-CL": "CatalinaNeural",
|
||||
"es-CO": "SalomeNeural",
|
||||
"es-CR": "MariaNeural",
|
||||
"es-CU": "BelkysNeural",
|
||||
"es-DO": "RamonaNeural",
|
||||
"es-EC": "AndreaNeural",
|
||||
"es-ES": "ElviraNeural",
|
||||
"es-GQ": "TeresaNeural",
|
||||
"es-GT": "MartaNeural",
|
||||
"es-HN": "KarlaNeural",
|
||||
"es-MX": "DaliaNeural",
|
||||
"es-NI": "YolandaNeural",
|
||||
"es-PA": "MargaritaNeural",
|
||||
"es-PE": "CamilaNeural",
|
||||
"es-PR": "KarinaNeural",
|
||||
"es-PY": "TaniaNeural",
|
||||
"es-SV": "LorenaNeural",
|
||||
"es-US": "PalomaNeural",
|
||||
"es-UY": "ValentinaNeural",
|
||||
"es-VE": "PaolaNeural",
|
||||
"et-EE": "AnuNeural",
|
||||
"eu-ES": "AinhoaNeural",
|
||||
"fa-IR": "DilaraNeural",
|
||||
"fi-FI": "SelmaNeural",
|
||||
"fil-PH": "BlessicaNeural",
|
||||
"fr-BE": "CharlineNeural",
|
||||
"fr-CA": "SylvieNeural",
|
||||
"fr-CH": "ArianeNeural",
|
||||
"fr-FR": "DeniseNeural",
|
||||
"ga-IE": "OrlaNeural",
|
||||
"gl-ES": "SabelaNeural",
|
||||
"gu-IN": "DhwaniNeural",
|
||||
"he-IL": "HilaNeural",
|
||||
"hi-IN": "SwaraNeural",
|
||||
"hr-HR": "GabrijelaNeural",
|
||||
"hu-HU": "NoemiNeural",
|
||||
"hy-AM": "AnahitNeural",
|
||||
"id-ID": "GadisNeural",
|
||||
"is-IS": "GudrunNeural",
|
||||
"it-IT": "ElsaNeural",
|
||||
"ja-JP": "NanamiNeural",
|
||||
"jv-ID": "SitiNeural",
|
||||
"ka-GE": "EkaNeural",
|
||||
"kk-KZ": "AigulNeural",
|
||||
"km-KH": "SreymomNeural",
|
||||
"kn-IN": "SapnaNeural",
|
||||
"ko-KR": "SunHiNeural",
|
||||
"lo-LA": "KeomanyNeural",
|
||||
"lt-LT": "OnaNeural",
|
||||
"lv-LV": "EveritaNeural",
|
||||
"mk-MK": "MarijaNeural",
|
||||
"ml-IN": "SobhanaNeural",
|
||||
"mn-MN": "BataaNeural",
|
||||
"mr-IN": "AarohiNeural",
|
||||
"ms-MY": "YasminNeural",
|
||||
"mt-MT": "GraceNeural",
|
||||
"my-MM": "NilarNeural",
|
||||
"nb-NO": "IselinNeural",
|
||||
"ne-NP": "HemkalaNeural",
|
||||
"nl-BE": "DenaNeural",
|
||||
"nl-NL": "ColetteNeural",
|
||||
"pl-PL": "AgnieszkaNeural",
|
||||
"ps-AF": "LatifaNeural",
|
||||
"pt-BR": "FranciscaNeural",
|
||||
"pt-PT": "RaquelNeural",
|
||||
"ro-RO": "AlinaNeural",
|
||||
"ru-RU": "SvetlanaNeural",
|
||||
"si-LK": "ThiliniNeural",
|
||||
"sk-SK": "ViktoriaNeural",
|
||||
"sl-SI": "PetraNeural",
|
||||
"so-SO": "UbaxNeural",
|
||||
"sq-AL": "AnilaNeural",
|
||||
"sr-RS": "SophieNeural",
|
||||
"su-ID": "TutiNeural",
|
||||
"sv-SE": "SofieNeural",
|
||||
"sw-KE": "ZuriNeural",
|
||||
"sw-TZ": "RehemaNeural",
|
||||
"ta-IN": "PallaviNeural",
|
||||
"ta-LK": "SaranyaNeural",
|
||||
"ta-MY": "KaniNeural",
|
||||
"ta-SG": "VenbaNeural",
|
||||
"te-IN": "ShrutiNeural",
|
||||
"th-TH": "AcharaNeural",
|
||||
"tr-TR": "EmelNeural",
|
||||
"uk-UA": "PolinaNeural",
|
||||
"ur-IN": "GulNeural",
|
||||
"ur-PK": "UzmaNeural",
|
||||
"uz-UZ": "MadinaNeural",
|
||||
"vi-VN": "HoaiMyNeural",
|
||||
"wuu-CN": "XiaotongNeural",
|
||||
"yue-CN": "XiaoMinNeural",
|
||||
"zh-CN": "XiaoxiaoNeural",
|
||||
"zh-CN-henan": "YundengNeural",
|
||||
"zh-CN-liaoning": "XiaobeiNeural",
|
||||
"zh-CN-shaanxi": "XiaoniNeural",
|
||||
"zh-CN-shandong": "YunxiangNeural",
|
||||
"zh-CN-sichuan": "YunxiNeural",
|
||||
"zh-HK": "HiuMaanNeural",
|
||||
"zh-TW": "HsiaoChenNeural",
|
||||
"zu-ZA": "ThandoNeural",
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -186,12 +336,13 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
"""Load TTS from Home Assistant Cloud."""
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str | None = options.get(ATTR_VOICE)
|
||||
if original_voice is None and language == self._language:
|
||||
original_voice = self._voice
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = TTS_VOICES[language][0]
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
@ -266,12 +417,13 @@ class CloudProvider(Provider):
|
||||
assert self.hass is not None
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str | None = options.get(ATTR_VOICE)
|
||||
if original_voice is None and language == self._language:
|
||||
original_voice = self._voice
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = TTS_VOICES[language][0]
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
|
@ -302,7 +302,8 @@ def config_entries_progress(
|
||||
[
|
||||
flw
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"] != config_entries.SOURCE_USER
|
||||
if flw["context"]["source"]
|
||||
not in (config_entries.SOURCE_RECONFIGURE, config_entries.SOURCE_USER)
|
||||
],
|
||||
)
|
||||
|
||||
|
@ -30,6 +30,16 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .chat_log import (
|
||||
AssistantContent,
|
||||
ChatLog,
|
||||
Content,
|
||||
ConverseError,
|
||||
SystemContent,
|
||||
ToolResultContent,
|
||||
UserContent,
|
||||
async_get_chat_log,
|
||||
)
|
||||
from .const import (
|
||||
ATTR_AGENT_ID,
|
||||
ATTR_CONVERSATION_ID,
|
||||
@ -48,13 +58,13 @@ from .default_agent import DefaultAgent, async_setup_default_agent
|
||||
from .entity import ConversationEntity
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||
from .session import ChatLog, Content, ConverseError, NativeContent, async_get_chat_log
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"HOME_ASSISTANT_AGENT",
|
||||
"OLD_HOME_ASSISTANT_AGENT",
|
||||
"AssistantContent",
|
||||
"ChatLog",
|
||||
"Content",
|
||||
"ConversationEntity",
|
||||
@ -63,7 +73,9 @@ __all__ = [
|
||||
"ConversationResult",
|
||||
"ConversationTraceEventType",
|
||||
"ConverseError",
|
||||
"NativeContent",
|
||||
"SystemContent",
|
||||
"ToolResultContent",
|
||||
"UserContent",
|
||||
"async_conversation_trace_append",
|
||||
"async_converse",
|
||||
"async_get_agent_info",
|
||||
|
@ -2,19 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from collections.abc import AsyncGenerator, Generator
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, intent, llm, template
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
@ -31,7 +28,7 @@ LOGGER = logging.getLogger(__name__)
|
||||
def async_get_chat_log(
|
||||
hass: HomeAssistant,
|
||||
session: chat_session.ChatSession,
|
||||
user_input: ConversationInput,
|
||||
user_input: ConversationInput | None = None,
|
||||
) -> Generator[ChatLog]:
|
||||
"""Return chat log for a specific chat session."""
|
||||
all_history = hass.data.get(DATA_CHAT_HISTORY)
|
||||
@ -42,9 +39,24 @@ def async_get_chat_log(
|
||||
history = all_history.get(session.conversation_id)
|
||||
|
||||
if history:
|
||||
history = replace(history, messages=history.messages.copy())
|
||||
history = replace(history, content=history.content.copy())
|
||||
else:
|
||||
history = ChatLog(hass, session.conversation_id, user_input.agent_id)
|
||||
history = ChatLog(hass, session.conversation_id)
|
||||
|
||||
if user_input is not None:
|
||||
history.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
last_message = history.content[-1]
|
||||
|
||||
yield history
|
||||
|
||||
if history.content[-1] is last_message:
|
||||
LOGGER.debug(
|
||||
"History opened but no assistant message was added, ignoring update"
|
||||
)
|
||||
return
|
||||
|
||||
if session.conversation_id not in all_history:
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
@ -53,22 +65,6 @@ def async_get_chat_log(
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
message: Content = Content(
|
||||
role="user",
|
||||
agent_id=user_input.agent_id,
|
||||
content=user_input.text,
|
||||
)
|
||||
history.async_add_message(message)
|
||||
|
||||
yield history
|
||||
|
||||
if history.messages[-1] is message:
|
||||
LOGGER.debug(
|
||||
"History opened but no assistant message was added, ignoring update"
|
||||
)
|
||||
return
|
||||
|
||||
history.last_updated = dt_util.utcnow()
|
||||
all_history[session.conversation_id] = history
|
||||
|
||||
|
||||
@ -94,63 +90,103 @@ class ConverseError(HomeAssistantError):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Content:
|
||||
@dataclass(frozen=True)
|
||||
class SystemContent:
|
||||
"""Base class for chat messages."""
|
||||
|
||||
role: Literal["system", "assistant", "user"]
|
||||
agent_id: str | None
|
||||
role: str = field(init=False, default="system")
|
||||
content: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NativeContent[_NativeT]:
|
||||
"""Native content."""
|
||||
class UserContent:
|
||||
"""Assistant content."""
|
||||
|
||||
role: str = field(init=False, default="native")
|
||||
role: str = field(init=False, default="user")
|
||||
content: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssistantContent:
|
||||
"""Assistant content."""
|
||||
|
||||
role: str = field(init=False, default="assistant")
|
||||
agent_id: str
|
||||
content: _NativeT
|
||||
content: str
|
||||
tool_calls: list[llm.ToolInput] | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ToolResultContent:
|
||||
"""Tool result content."""
|
||||
|
||||
role: str = field(init=False, default="tool_result")
|
||||
agent_id: str
|
||||
tool_call_id: str
|
||||
tool_name: str
|
||||
tool_result: JsonObjectType
|
||||
|
||||
|
||||
Content = SystemContent | UserContent | AssistantContent | ToolResultContent
|
||||
|
||||
|
||||
@dataclass
|
||||
class ChatLog[_NativeT]:
|
||||
class ChatLog:
|
||||
"""Class holding the chat history of a specific conversation."""
|
||||
|
||||
hass: HomeAssistant
|
||||
conversation_id: str
|
||||
agent_id: str | None
|
||||
user_name: str | None = None
|
||||
messages: list[Content | NativeContent[_NativeT]] = field(
|
||||
default_factory=lambda: [Content(role="system", agent_id=None, content="")]
|
||||
)
|
||||
content: list[Content] = field(default_factory=lambda: [SystemContent(content="")])
|
||||
extra_system_prompt: str | None = None
|
||||
llm_api: llm.APIInstance | None = None
|
||||
last_updated: datetime = field(default_factory=dt_util.utcnow)
|
||||
|
||||
@callback
|
||||
def async_add_message(self, message: Content | NativeContent[_NativeT]) -> None:
|
||||
"""Process intent."""
|
||||
if message.role == "system":
|
||||
raise ValueError("Cannot add system messages to history")
|
||||
if message.role != "native" and self.messages[-1].role == message.role:
|
||||
raise ValueError("Cannot add two assistant or user messages in a row")
|
||||
|
||||
self.messages.append(message)
|
||||
def async_add_user_content(self, content: UserContent) -> None:
|
||||
"""Add user content to the log."""
|
||||
self.content.append(content)
|
||||
|
||||
@callback
|
||||
def async_get_messages(
|
||||
self, agent_id: str | None = None
|
||||
) -> list[Content | NativeContent[_NativeT]]:
|
||||
"""Get messages for a specific agent ID.
|
||||
def async_add_assistant_content_without_tools(
|
||||
self, content: AssistantContent
|
||||
) -> None:
|
||||
"""Add assistant content to the log."""
|
||||
if content.tool_calls is not None:
|
||||
raise ValueError("Tool calls not allowed")
|
||||
self.content.append(content)
|
||||
|
||||
This will filter out any native message tied to other agent IDs.
|
||||
It can still include assistant/user messages generated by other agents.
|
||||
"""
|
||||
return [
|
||||
message
|
||||
for message in self.messages
|
||||
if message.role != "native" or message.agent_id == agent_id
|
||||
]
|
||||
async def async_add_assistant_content(
|
||||
self, content: AssistantContent
|
||||
) -> AsyncGenerator[ToolResultContent]:
|
||||
"""Add assistant content."""
|
||||
self.content.append(content)
|
||||
|
||||
if content.tool_calls is None:
|
||||
return
|
||||
|
||||
if self.llm_api is None:
|
||||
raise ValueError("No LLM API configured")
|
||||
|
||||
for tool_input in content.tool_calls:
|
||||
LOGGER.debug(
|
||||
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
||||
)
|
||||
|
||||
try:
|
||||
tool_result = await self.llm_api.async_call_tool(tool_input)
|
||||
except (HomeAssistantError, vol.Invalid) as e:
|
||||
tool_result = {"error": type(e).__name__}
|
||||
if str(e):
|
||||
tool_result["error_text"] = str(e)
|
||||
LOGGER.debug("Tool response: %s", tool_result)
|
||||
|
||||
response_content = ToolResultContent(
|
||||
agent_id=content.agent_id,
|
||||
tool_call_id=tool_input.id,
|
||||
tool_name=tool_input.tool_name,
|
||||
tool_result=tool_result,
|
||||
)
|
||||
self.content.append(response_content)
|
||||
yield response_content
|
||||
|
||||
async def async_update_llm_data(
|
||||
self,
|
||||
@ -250,36 +286,16 @@ class ChatLog[_NativeT]:
|
||||
prompt = "\n".join(prompt_parts)
|
||||
|
||||
self.llm_api = llm_api
|
||||
self.user_name = user_name
|
||||
self.extra_system_prompt = extra_system_prompt
|
||||
self.messages[0] = Content(
|
||||
role="system",
|
||||
agent_id=user_input.agent_id,
|
||||
content=prompt,
|
||||
)
|
||||
self.content[0] = SystemContent(content=prompt)
|
||||
|
||||
LOGGER.debug("Prompt: %s", self.messages)
|
||||
LOGGER.debug("Prompt: %s", self.content)
|
||||
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)
|
||||
|
||||
trace.async_conversation_trace_append(
|
||||
trace.ConversationTraceEventType.AGENT_DETAIL,
|
||||
{
|
||||
"messages": self.messages,
|
||||
"messages": self.content,
|
||||
"tools": self.llm_api.tools if self.llm_api else None,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_call_tool(self, tool_input: llm.ToolInput) -> JsonObjectType:
|
||||
"""Invoke LLM tool for the configured LLM API."""
|
||||
if not self.llm_api:
|
||||
raise ValueError("No LLM API configured")
|
||||
LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args)
|
||||
|
||||
try:
|
||||
tool_response = await self.llm_api.async_call_tool(tool_input)
|
||||
except (HomeAssistantError, vol.Invalid) as e:
|
||||
tool_response = {"error": type(e).__name__}
|
||||
if str(e):
|
||||
tool_response["error_text"] = str(e)
|
||||
LOGGER.debug("Tool response: %s", tool_response)
|
||||
return tool_response
|
@ -55,6 +55,7 @@ from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_state_added_domain
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .chat_log import AssistantContent, async_get_chat_log
|
||||
from .const import (
|
||||
DATA_DEFAULT_ENTITY,
|
||||
DEFAULT_EXPOSED_ATTRIBUTES,
|
||||
@ -63,7 +64,6 @@ from .const import (
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .session import Content, async_get_chat_log
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -379,10 +379,9 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
|
||||
speech: str = response.speech.get("plain", {}).get("speech", "")
|
||||
chat_log.async_add_message(
|
||||
Content(
|
||||
role="assistant",
|
||||
agent_id=user_input.agent_id,
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
AssistantContent(
|
||||
agent_id=user_input.agent_id, # type: ignore[arg-type]
|
||||
content=speech,
|
||||
)
|
||||
)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.0", "home-assistant-intents==2025.1.28"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
}
|
||||
|
@ -14,8 +14,8 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.0.3",
|
||||
"aiodiscover==2.1.0",
|
||||
"aiodhcpwatcher==1.1.0",
|
||||
"aiodiscover==2.2.2",
|
||||
"cached-ipaddress==0.8.0"
|
||||
]
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
Data is fetched from DWD:
|
||||
https://rcccm.dwd.de/DE/wetter/warnungen_aktuell/objekt_einbindung/objekteinbindung.html
|
||||
|
||||
Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor
|
||||
Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor,extremem
|
||||
Unwetterwarnungen (Stufe 3)
|
||||
Warnungen vor markantem Wetter (Stufe 2) # codespell:ignore vor
|
||||
Wetterwarnungen (Stufe 1)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/econet",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["paho_mqtt", "pyeconet"],
|
||||
"requirements": ["pyeconet==0.1.23"]
|
||||
"requirements": ["pyeconet==0.1.26"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0b0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
||||
}
|
||||
|
@ -4,12 +4,16 @@ from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from . import api
|
||||
from .coordinator import (
|
||||
@ -44,7 +48,9 @@ async def async_setup_entry(
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
hop_coordinator = ElectricKiwiHOPDataCoordinator(hass, entry, ek_api)
|
||||
account_coordinator = ElectricKiwiAccountDataCoordinator(hass, entry, ek_api)
|
||||
@ -53,6 +59,8 @@ async def async_setup_entry(
|
||||
await ek_api.set_active_session()
|
||||
await hop_coordinator.async_config_entry_first_refresh()
|
||||
await account_coordinator.async_config_entry_first_refresh()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except ApiException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
@ -70,3 +78,53 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElectricKiwiConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, config_entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
try:
|
||||
await ek_api.set_active_session()
|
||||
connection_details = await ek_api.get_connection_details()
|
||||
except AuthException:
|
||||
config_entry.async_start_reauth(hass)
|
||||
return False
|
||||
except ApiException:
|
||||
return False
|
||||
unique_id = str(ek_api.customer_number)
|
||||
identifier = ek_api.electricity.identifier
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=unique_id, minor_version=2
|
||||
)
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
for entity in entity_entries:
|
||||
assert entity.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=entity.unique_id.replace(
|
||||
f"{unique_id}_{connection_details.id}", f"{unique_id}_{identifier}"
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -2,17 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from electrickiwi_api import AbstractAuth
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
|
||||
from .const import API_BASE_URL
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(AbstractAuth):
|
||||
class ConfigEntryElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
@ -29,4 +28,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
"""Return a valid access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
return str(self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
class ConfigFlowElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
token: str,
|
||||
) -> None:
|
||||
"""Initialize ConfigFlowFitbitApi."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass), API_BASE_URL)
|
||||
self._token = token
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return the token for the Electric Kiwi API."""
|
||||
return self._token
|
||||
|
@ -6,9 +6,14 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import DOMAIN, SCOPE_VALUES
|
||||
|
||||
|
||||
@ -17,6 +22,8 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
):
|
||||
"""Config flow to handle Electric Kiwi OAuth2 authentication."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
@ -40,12 +47,30 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self._get_reauth_entry().title},
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for Electric Kiwi."""
|
||||
existing_entry = await self.async_set_unique_id(DOMAIN)
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=data)
|
||||
return await super().async_oauth_create_entry(data)
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigFlowElectricKiwiAuth(self.hass, data["token"]["access_token"])
|
||||
)
|
||||
|
||||
try:
|
||||
session = await ek_api.get_active_session()
|
||||
except ApiException:
|
||||
return self.async_abort(reason="connection_error")
|
||||
|
||||
unique_id = str(session.data.customer_number)
|
||||
await self.async_set_unique_id(unique_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=unique_id, data=data)
|
||||
|
@ -8,4 +8,4 @@ OAUTH2_AUTHORIZE = "https://welcome.electrickiwi.co.nz/oauth/authorize"
|
||||
OAUTH2_TOKEN = "https://welcome.electrickiwi.co.nz/oauth/token"
|
||||
API_BASE_URL = "https://api.electrickiwi.co.nz"
|
||||
|
||||
SCOPE_VALUES = "read_connection_detail read_billing_frequency read_account_running_balance read_consumption_summary read_consumption_averages read_hop_intervals_config read_hop_connection save_hop_connection read_session"
|
||||
SCOPE_VALUES = "read_customer_details read_connection_detail read_connection read_billing_address get_bill_address read_billing_frequency read_billing_details read_billing_bills read_billing_bill read_billing_bill_id read_billing_bill_file read_account_running_balance read_customer_account_summary read_consumption_summary download_consumption_file read_consumption_averages get_consumption_averages read_hop_intervals_config read_hop_intervals read_hop_connection read_hop_specific_connection save_hop_connection save_hop_specific_connection read_outage_contact get_outage_contact_info_for_icp read_session read_session_data_login"
|
||||
|
@ -10,7 +10,7 @@ import logging
|
||||
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
|
||||
from electrickiwi_api.model import AccountSummary, Hop, HopIntervals
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -34,7 +34,7 @@ class ElectricKiwiRuntimeData:
|
||||
type ElectricKiwiConfigEntry = ConfigEntry[ElectricKiwiRuntimeData]
|
||||
|
||||
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountSummary]):
|
||||
"""ElectricKiwi Account Data object."""
|
||||
|
||||
def __init__(
|
||||
@ -51,13 +51,13 @@ class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
name="Electric Kiwi Account Data",
|
||||
update_interval=ACCOUNT_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
|
||||
async def _async_update_data(self) -> AccountBalance:
|
||||
async def _async_update_data(self) -> AccountSummary:
|
||||
"""Fetch data from Account balance API endpoint."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._ek_api.get_account_balance()
|
||||
return await self.ek_api.get_account_summary()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@ -85,7 +85,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=HOP_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
self.hop_intervals: HopIntervals | None = None
|
||||
|
||||
def get_hop_options(self) -> dict[str, int]:
|
||||
@ -100,7 +100,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
async def async_update_hop(self, hop_interval: int) -> Hop:
|
||||
"""Update selected hop and data."""
|
||||
try:
|
||||
self.async_set_updated_data(await self._ek_api.post_hop(hop_interval))
|
||||
self.async_set_updated_data(await self.ek_api.post_hop(hop_interval))
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@ -118,7 +118,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
if self.hop_intervals is None:
|
||||
hop_intervals: HopIntervals = await self._ek_api.get_hop_intervals()
|
||||
hop_intervals: HopIntervals = await self.ek_api.get_hop_intervals()
|
||||
hop_intervals.intervals = OrderedDict(
|
||||
filter(
|
||||
lambda pair: pair[1].active == 1,
|
||||
@ -127,7 +127,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
)
|
||||
|
||||
self.hop_intervals = hop_intervals
|
||||
return await self._ek_api.get_hop()
|
||||
return await self.ek_api.get_hop()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
|
@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/electric_kiwi",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["electrickiwi-api==0.8.5"]
|
||||
"requirements": ["electrickiwi-api==0.9.12"]
|
||||
}
|
||||
|
@ -53,8 +53,8 @@ class ElectricKiwiSelectHOPEntity(
|
||||
"""Initialise the HOP selection entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
self.values_dict = coordinator.get_hop_options()
|
||||
|
@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from electrickiwi_api.model import AccountBalance, Hop
|
||||
from electrickiwi_api.model import AccountSummary, Hop
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@ -39,7 +39,15 @@ ATTR_HOP_PERCENTAGE = "hop_percentage"
|
||||
class ElectricKiwiAccountSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Electric Kiwi sensor entity."""
|
||||
|
||||
value_func: Callable[[AccountBalance], float | datetime]
|
||||
value_func: Callable[[AccountSummary], float | datetime]
|
||||
|
||||
|
||||
def _get_hop_percentage(account_balance: AccountSummary) -> float:
|
||||
"""Return the hop percentage from account summary."""
|
||||
if power := account_balance.services.get("power"):
|
||||
if connection := power.connections[0]:
|
||||
return float(connection.hop_percentage)
|
||||
return 0.0
|
||||
|
||||
|
||||
ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
@ -72,9 +80,7 @@ ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
translation_key="hop_power_savings",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_func=lambda account_balance: float(
|
||||
account_balance.connections[0].hop_percentage
|
||||
),
|
||||
value_func=_get_hop_percentage,
|
||||
),
|
||||
)
|
||||
|
||||
@ -165,8 +171,8 @@ class ElectricKiwiAccountEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
@ -194,8 +200,8 @@ class ElectricKiwiHOPEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
|
@ -21,7 +21,8 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]"
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.6.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
|
||||
}
|
||||
|
@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.6.0"
|
||||
"bleak-esphome==2.7.1"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@ -1,33 +1,27 @@
|
||||
"""The FAA Delays integration."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FAADataUpdateCoordinator
|
||||
from .coordinator import FAAConfigEntry, FAADataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
|
||||
"""Set up FAA Delays from a config entry."""
|
||||
code = entry.data[CONF_ID]
|
||||
|
||||
coordinator = FAADataUpdateCoordinator(hass, code)
|
||||
coordinator = FAADataUpdateCoordinator(hass, entry, code)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FAADataUpdateCoordinator
|
||||
from . import FAAConfigEntry, FAADataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@ -84,10 +83,10 @@ FAA_BINARY_SENSORS: tuple[FaaDelaysBinarySensorEntityDescription, ...] = (
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant, entry: FAAConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up a FAA sensor based on a config entry."""
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entities = [
|
||||
FAABinarySensor(coordinator, entry.entry_id, description)
|
||||
|
@ -7,6 +7,7 @@ import logging
|
||||
from aiohttp import ClientConnectionError
|
||||
from faadelays import Airport
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@ -15,14 +16,20 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type FAAConfigEntry = ConfigEntry[FAADataUpdateCoordinator]
|
||||
|
||||
|
||||
class FAADataUpdateCoordinator(DataUpdateCoordinator[Airport]):
|
||||
"""Class to manage fetching FAA API data from a single endpoint."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, code: str) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FAAConfigEntry, code: str) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=1)
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
)
|
||||
self.session = aiohttp_client.async_get_clientsession(hass)
|
||||
self.data = Airport(code, self.session)
|
||||
|
@ -4,20 +4,20 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
||||
from .const import PLATFORMS
|
||||
from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
|
||||
"""Set up Fast.com from a config entry."""
|
||||
coordinator = FastdotcomDataUpdateCoordinator(hass)
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
coordinator = FastdotcomDataUpdateCoordinator(hass, entry)
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry,
|
||||
@ -36,8 +36,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
|
||||
"""Unload Fast.com config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@ -6,20 +6,24 @@ from datetime import timedelta
|
||||
|
||||
from fastdotcom import fast_com
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DEFAULT_INTERVAL, DOMAIN, LOGGER
|
||||
|
||||
type FastdotcomConfigEntry = ConfigEntry[FastdotcomDataUpdateCoordinator]
|
||||
|
||||
|
||||
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
|
||||
"""Class to manage fetching Fast.com data API."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FastdotcomConfigEntry) -> None:
|
||||
"""Initialize the coordinator for Fast.com."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(hours=DEFAULT_INTERVAL),
|
||||
)
|
||||
|
@ -4,21 +4,13 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
||||
from .coordinator import FastdotcomConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
hass: HomeAssistant, config_entry: FastdotcomConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for the config entry."""
|
||||
coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
|
||||
return {
|
||||
"coordinator_data": coordinator.data,
|
||||
}
|
||||
return {"coordinator_data": config_entry.runtime_data.data}
|
||||
|
@ -7,7 +7,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfDataRate
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
@ -15,17 +14,16 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
||||
from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: FastdotcomConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Fast.com sensor."""
|
||||
coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities([SpeedtestSensor(entry.entry_id, coordinator)])
|
||||
async_add_entities([SpeedtestSensor(entry.entry_id, entry.runtime_data)])
|
||||
|
||||
|
||||
class SpeedtestSensor(CoordinatorEntity[FastdotcomDataUpdateCoordinator], SensorEntity):
|
||||
|
@ -21,9 +21,11 @@ from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import raise_if_invalid_filename
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.ulid import ulid_hex
|
||||
|
||||
DOMAIN = "file_upload"
|
||||
_DATA: HassKey[FileUploadData] = HassKey(DOMAIN)
|
||||
|
||||
ONE_MEGABYTE = 1024 * 1024
|
||||
MAX_SIZE = 100 * ONE_MEGABYTE
|
||||
@ -41,7 +43,7 @@ def process_uploaded_file(hass: HomeAssistant, file_id: str) -> Iterator[Path]:
|
||||
if DOMAIN not in hass.data:
|
||||
raise ValueError("File does not exist")
|
||||
|
||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
||||
file_upload_data = hass.data[_DATA]
|
||||
|
||||
if not file_upload_data.has_file(file_id):
|
||||
raise ValueError("File does not exist")
|
||||
@ -149,10 +151,10 @@ class FileUploadView(HomeAssistantView):
|
||||
hass = request.app[KEY_HASS]
|
||||
file_id = ulid_hex()
|
||||
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = await FileUploadData.create(hass)
|
||||
if _DATA not in hass.data:
|
||||
hass.data[_DATA] = await FileUploadData.create(hass)
|
||||
|
||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
||||
file_upload_data = hass.data[_DATA]
|
||||
file_dir = file_upload_data.file_dir(file_id)
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
||||
SimpleQueue()
|
||||
@ -206,7 +208,7 @@ class FileUploadView(HomeAssistantView):
|
||||
raise web.HTTPNotFound
|
||||
|
||||
file_id = data["file_id"]
|
||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
||||
file_upload_data = hass.data[_DATA]
|
||||
|
||||
if file_upload_data.files.pop(file_id, None) is None:
|
||||
raise web.HTTPNotFound
|
||||
|
@ -3,29 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyfireservicerota import (
|
||||
ExpiredTokenError,
|
||||
FireServiceRota,
|
||||
FireServiceRotaIncidents,
|
||||
InvalidAuthError,
|
||||
InvalidTokenError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_USERNAME, Platform
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN, WSS_BWRURL
|
||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
|
||||
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
@ -40,17 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if client.token_refresh_failure:
|
||||
return False
|
||||
|
||||
async def async_update_data():
|
||||
return await client.async_update()
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="duty binary sensor",
|
||||
update_method=async_update_data,
|
||||
update_interval=MIN_TIME_BETWEEN_UPDATES,
|
||||
)
|
||||
coordinator = FireServiceUpdateCoordinator(hass, client, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
@ -74,165 +51,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if unload_ok:
|
||||
del hass.data[DOMAIN][entry.entry_id]
|
||||
return unload_ok
|
||||
|
||||
|
||||
class FireServiceRotaOauth:
|
||||
"""Handle authentication tokens."""
|
||||
|
||||
def __init__(self, hass, entry, fsr):
|
||||
"""Initialize the oauth object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._url = entry.data[CONF_URL]
|
||||
self._username = entry.data[CONF_USERNAME]
|
||||
self._fsr = fsr
|
||||
|
||||
async def async_refresh_tokens(self) -> bool:
|
||||
"""Refresh tokens and update config entry."""
|
||||
_LOGGER.debug("Refreshing authentication tokens after expiration")
|
||||
|
||||
try:
|
||||
token_info = await self._hass.async_add_executor_job(
|
||||
self._fsr.refresh_tokens
|
||||
)
|
||||
|
||||
except (InvalidAuthError, InvalidTokenError) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Error refreshing tokens, triggered reauth workflow"
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Saving new tokens in config entry")
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._entry,
|
||||
data={
|
||||
"auth_implementation": DOMAIN,
|
||||
CONF_URL: self._url,
|
||||
CONF_USERNAME: self._username,
|
||||
CONF_TOKEN: token_info,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FireServiceRotaWebSocket:
|
||||
"""Define a FireServiceRota websocket manager object."""
|
||||
|
||||
def __init__(self, hass, entry):
|
||||
"""Initialize the websocket object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._fsr_incidents = FireServiceRotaIncidents(on_incident=self._on_incident)
|
||||
self.incident_data = None
|
||||
|
||||
def _construct_url(self) -> str:
|
||||
"""Return URL with latest access token."""
|
||||
return WSS_BWRURL.format(
|
||||
self._entry.data[CONF_URL], self._entry.data[CONF_TOKEN]["access_token"]
|
||||
)
|
||||
|
||||
def _on_incident(self, data) -> None:
|
||||
"""Received new incident, update data."""
|
||||
_LOGGER.debug("Received new incident via websocket: %s", data)
|
||||
self.incident_data = data
|
||||
dispatcher_send(self._hass, f"{DOMAIN}_{self._entry.entry_id}_update")
|
||||
|
||||
def start_listener(self) -> None:
|
||||
"""Start the websocket listener."""
|
||||
_LOGGER.debug("Starting incidents listener")
|
||||
self._fsr_incidents.start(self._construct_url())
|
||||
|
||||
def stop_listener(self) -> None:
|
||||
"""Stop the websocket listener."""
|
||||
_LOGGER.debug("Stopping incidents listener")
|
||||
self._fsr_incidents.stop()
|
||||
|
||||
|
||||
class FireServiceRotaClient:
|
||||
"""Getting the latest data from fireservicerota."""
|
||||
|
||||
def __init__(self, hass, entry):
|
||||
"""Initialize the data object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._url = entry.data[CONF_URL]
|
||||
self._tokens = entry.data[CONF_TOKEN]
|
||||
|
||||
self.entry_id = entry.entry_id
|
||||
self.unique_id = entry.unique_id
|
||||
|
||||
self.token_refresh_failure = False
|
||||
self.incident_id = None
|
||||
self.on_duty = False
|
||||
|
||||
self.fsr = FireServiceRota(base_url=self._url, token_info=self._tokens)
|
||||
|
||||
self.oauth = FireServiceRotaOauth(
|
||||
self._hass,
|
||||
self._entry,
|
||||
self.fsr,
|
||||
)
|
||||
|
||||
self.websocket = FireServiceRotaWebSocket(self._hass, self._entry)
|
||||
|
||||
async def setup(self) -> None:
|
||||
"""Set up the data client."""
|
||||
await self._hass.async_add_executor_job(self.websocket.start_listener)
|
||||
|
||||
async def update_call(self, func, *args):
|
||||
"""Perform update call and return data."""
|
||||
if self.token_refresh_failure:
|
||||
return None
|
||||
|
||||
try:
|
||||
return await self._hass.async_add_executor_job(func, *args)
|
||||
except (ExpiredTokenError, InvalidTokenError):
|
||||
await self._hass.async_add_executor_job(self.websocket.stop_listener)
|
||||
self.token_refresh_failure = True
|
||||
|
||||
if await self.oauth.async_refresh_tokens():
|
||||
self.token_refresh_failure = False
|
||||
await self._hass.async_add_executor_job(self.websocket.start_listener)
|
||||
|
||||
return await self._hass.async_add_executor_job(func, *args)
|
||||
|
||||
async def async_update(self) -> dict | None:
|
||||
"""Get the latest availability data."""
|
||||
data = await self.update_call(
|
||||
self.fsr.get_availability, str(self._hass.config.time_zone)
|
||||
)
|
||||
|
||||
if not data:
|
||||
return None
|
||||
|
||||
self.on_duty = bool(data.get("available"))
|
||||
|
||||
_LOGGER.debug("Updated availability data: %s", data)
|
||||
return data
|
||||
|
||||
async def async_response_update(self) -> dict | None:
|
||||
"""Get the latest incident response data."""
|
||||
|
||||
if not self.incident_id:
|
||||
return None
|
||||
|
||||
_LOGGER.debug("Updating response data for incident id %s", self.incident_id)
|
||||
|
||||
return await self.update_call(self.fsr.get_incident_response, self.incident_id)
|
||||
|
||||
async def async_set_response(self, value) -> None:
|
||||
"""Set incident response status."""
|
||||
|
||||
if not self.incident_id:
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Setting incident response for incident id '%s' to state '%s'",
|
||||
self.incident_id,
|
||||
value,
|
||||
)
|
||||
|
||||
await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
|
||||
|
@ -8,13 +8,10 @@ from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FireServiceRotaClient
|
||||
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN as FIRESERVICEROTA_DOMAIN
|
||||
from .coordinator import FireServiceRotaClient, FireServiceUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@ -26,14 +23,16 @@ async def async_setup_entry(
|
||||
DATA_CLIENT
|
||||
]
|
||||
|
||||
coordinator: DataUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
|
||||
coordinator: FireServiceUpdateCoordinator = hass.data[FIRESERVICEROTA_DOMAIN][
|
||||
entry.entry_id
|
||||
][DATA_COORDINATOR]
|
||||
|
||||
async_add_entities([ResponseBinarySensor(coordinator, client, entry)])
|
||||
|
||||
|
||||
class ResponseBinarySensor(CoordinatorEntity, BinarySensorEntity):
|
||||
class ResponseBinarySensor(
|
||||
CoordinatorEntity[FireServiceUpdateCoordinator], BinarySensorEntity
|
||||
):
|
||||
"""Representation of an FireServiceRota sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@ -41,7 +40,7 @@ class ResponseBinarySensor(CoordinatorEntity, BinarySensorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
coordinator: FireServiceUpdateCoordinator,
|
||||
client: FireServiceRotaClient,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
|
213
homeassistant/components/fireservicerota/coordinator.py
Normal file
213
homeassistant/components/fireservicerota/coordinator.py
Normal file
@ -0,0 +1,213 @@
|
||||
"""The FireServiceRota integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyfireservicerota import (
|
||||
ExpiredTokenError,
|
||||
FireServiceRota,
|
||||
FireServiceRotaIncidents,
|
||||
InvalidAuthError,
|
||||
InvalidTokenError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, WSS_BWRURL
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
class FireServiceUpdateCoordinator(DataUpdateCoordinator[dict | None]):
|
||||
"""Data update coordinator for FireServiceRota."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, client: FireServiceRotaClient, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Initialize the FireServiceRota DataUpdateCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="duty binary sensor",
|
||||
config_entry=entry,
|
||||
update_interval=MIN_TIME_BETWEEN_UPDATES,
|
||||
)
|
||||
|
||||
self.client = client
|
||||
|
||||
async def _async_update_data(self) -> dict | None:
|
||||
"""Get the latest availability data."""
|
||||
return await self.client.async_update()
|
||||
|
||||
|
||||
class FireServiceRotaOauth:
|
||||
"""Handle authentication tokens."""
|
||||
|
||||
def __init__(self, hass, entry, fsr):
|
||||
"""Initialize the oauth object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._url = entry.data[CONF_URL]
|
||||
self._username = entry.data[CONF_USERNAME]
|
||||
self._fsr = fsr
|
||||
|
||||
async def async_refresh_tokens(self) -> bool:
|
||||
"""Refresh tokens and update config entry."""
|
||||
_LOGGER.debug("Refreshing authentication tokens after expiration")
|
||||
|
||||
try:
|
||||
token_info = await self._hass.async_add_executor_job(
|
||||
self._fsr.refresh_tokens
|
||||
)
|
||||
|
||||
except (InvalidAuthError, InvalidTokenError) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Error refreshing tokens, triggered reauth workflow"
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Saving new tokens in config entry")
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._entry,
|
||||
data={
|
||||
"auth_implementation": DOMAIN,
|
||||
CONF_URL: self._url,
|
||||
CONF_USERNAME: self._username,
|
||||
CONF_TOKEN: token_info,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FireServiceRotaWebSocket:
|
||||
"""Define a FireServiceRota websocket manager object."""
|
||||
|
||||
def __init__(self, hass, entry):
|
||||
"""Initialize the websocket object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._fsr_incidents = FireServiceRotaIncidents(on_incident=self._on_incident)
|
||||
self.incident_data = None
|
||||
|
||||
def _construct_url(self) -> str:
|
||||
"""Return URL with latest access token."""
|
||||
return WSS_BWRURL.format(
|
||||
self._entry.data[CONF_URL], self._entry.data[CONF_TOKEN]["access_token"]
|
||||
)
|
||||
|
||||
def _on_incident(self, data) -> None:
|
||||
"""Received new incident, update data."""
|
||||
_LOGGER.debug("Received new incident via websocket: %s", data)
|
||||
self.incident_data = data
|
||||
dispatcher_send(self._hass, f"{DOMAIN}_{self._entry.entry_id}_update")
|
||||
|
||||
def start_listener(self) -> None:
|
||||
"""Start the websocket listener."""
|
||||
_LOGGER.debug("Starting incidents listener")
|
||||
self._fsr_incidents.start(self._construct_url())
|
||||
|
||||
def stop_listener(self) -> None:
|
||||
"""Stop the websocket listener."""
|
||||
_LOGGER.debug("Stopping incidents listener")
|
||||
self._fsr_incidents.stop()
|
||||
|
||||
|
||||
class FireServiceRotaClient:
|
||||
"""Getting the latest data from fireservicerota."""
|
||||
|
||||
def __init__(self, hass, entry):
|
||||
"""Initialize the data object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
|
||||
self._url = entry.data[CONF_URL]
|
||||
self._tokens = entry.data[CONF_TOKEN]
|
||||
|
||||
self.entry_id = entry.entry_id
|
||||
self.unique_id = entry.unique_id
|
||||
|
||||
self.token_refresh_failure = False
|
||||
self.incident_id = None
|
||||
self.on_duty = False
|
||||
|
||||
self.fsr = FireServiceRota(base_url=self._url, token_info=self._tokens)
|
||||
|
||||
self.oauth = FireServiceRotaOauth(
|
||||
self._hass,
|
||||
self._entry,
|
||||
self.fsr,
|
||||
)
|
||||
|
||||
self.websocket = FireServiceRotaWebSocket(self._hass, self._entry)
|
||||
|
||||
async def setup(self) -> None:
|
||||
"""Set up the data client."""
|
||||
await self._hass.async_add_executor_job(self.websocket.start_listener)
|
||||
|
||||
async def update_call(self, func, *args):
|
||||
"""Perform update call and return data."""
|
||||
if self.token_refresh_failure:
|
||||
return None
|
||||
|
||||
try:
|
||||
return await self._hass.async_add_executor_job(func, *args)
|
||||
except (ExpiredTokenError, InvalidTokenError):
|
||||
await self._hass.async_add_executor_job(self.websocket.stop_listener)
|
||||
self.token_refresh_failure = True
|
||||
|
||||
if await self.oauth.async_refresh_tokens():
|
||||
self.token_refresh_failure = False
|
||||
await self._hass.async_add_executor_job(self.websocket.start_listener)
|
||||
|
||||
return await self._hass.async_add_executor_job(func, *args)
|
||||
|
||||
async def async_update(self) -> dict | None:
|
||||
"""Get the latest availability data."""
|
||||
data = await self.update_call(
|
||||
self.fsr.get_availability, str(self._hass.config.time_zone)
|
||||
)
|
||||
|
||||
if not data:
|
||||
return None
|
||||
|
||||
self.on_duty = bool(data.get("available"))
|
||||
|
||||
_LOGGER.debug("Updated availability data: %s", data)
|
||||
return data
|
||||
|
||||
async def async_response_update(self) -> dict | None:
|
||||
"""Get the latest incident response data."""
|
||||
|
||||
if not self.incident_id:
|
||||
return None
|
||||
|
||||
_LOGGER.debug("Updating response data for incident id %s", self.incident_id)
|
||||
|
||||
return await self.update_call(self.fsr.get_incident_response, self.incident_id)
|
||||
|
||||
async def async_set_response(self, value) -> None:
|
||||
"""Set incident response status."""
|
||||
|
||||
if not self.incident_id:
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Setting incident response for incident id '%s' to state '%s'",
|
||||
self.incident_id,
|
||||
value,
|
||||
)
|
||||
|
||||
await self.update_call(self.fsr.set_incident_response, self.incident_id, value)
|
@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250131.0"]
|
||||
"requirements": ["home-assistant-frontend==20250205.0"]
|
||||
}
|
||||
|
@ -244,7 +244,7 @@ class AFSAPIDevice(MediaPlayerEntity):
|
||||
"""Send volume up command."""
|
||||
volume = await self.fs_device.get_volume()
|
||||
volume = int(volume or 0) + 1
|
||||
await self.fs_device.set_volume(min(volume, self._max_volume))
|
||||
await self.fs_device.set_volume(min(volume, self._max_volume or 1))
|
||||
|
||||
async def async_volume_down(self) -> None:
|
||||
"""Send volume down command."""
|
||||
|
@ -28,14 +28,14 @@
|
||||
"user": {
|
||||
"description": "Enter the settings to connect to the camera.",
|
||||
"data": {
|
||||
"still_image_url": "Still Image URL (e.g. http://...)",
|
||||
"stream_source": "Stream Source URL (e.g. rtsp://...)",
|
||||
"still_image_url": "Still image URL (e.g. http://...)",
|
||||
"stream_source": "Stream source URL (e.g. rtsp://...)",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"authentication": "Authentication",
|
||||
"limit_refetch_to_url_change": "Limit refetch to url change",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"framerate": "Frame Rate (Hz)",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
}
|
||||
},
|
||||
|
@ -14,5 +14,5 @@
|
||||
},
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ismartgate"],
|
||||
"requirements": ["ismartgate==5.0.1"]
|
||||
"requirements": ["ismartgate==5.0.2"]
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ CONF_PROMPT = "prompt"
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-1.5-flash-latest"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.api_core.exceptions import GoogleAPIError
|
||||
import google.generativeai as genai
|
||||
@ -149,15 +149,53 @@ def _escape_decode(value: Any) -> Any:
|
||||
return value
|
||||
|
||||
|
||||
def _chat_message_convert(
|
||||
message: conversation.Content | conversation.NativeContent[genai_types.ContentDict],
|
||||
) -> genai_types.ContentDict:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
if message.role == "native":
|
||||
return message.content
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> protos.Content:
|
||||
"""Create a Google tool response content."""
|
||||
return protos.Content(
|
||||
parts=[
|
||||
protos.Part(
|
||||
function_response=protos.FunctionResponse(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
)
|
||||
for tool_result in content
|
||||
]
|
||||
)
|
||||
|
||||
role = "model" if message.role == "assistant" else message.role
|
||||
return {"role": role, "parts": message.content}
|
||||
|
||||
def _convert_content(
|
||||
content: conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent,
|
||||
) -> genai_types.ContentDict:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr]
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return {"role": role, "parts": content.content}
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts = []
|
||||
|
||||
if content.content:
|
||||
parts.append(protos.Part(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
protos.Part(
|
||||
function_call=protos.FunctionCall(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return protos.Content({"role": "model", "parts": parts})
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
@ -220,7 +258,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
session: conversation.ChatLog[genai_types.ContentDict],
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> conversation.ConversationResult:
|
||||
"""Call the API."""
|
||||
|
||||
@ -228,7 +266,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await session.async_update_llm_data(
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
@ -238,10 +276,10 @@ class GoogleGenerativeAIConversationEntity(
|
||||
return err.as_conversation_result()
|
||||
|
||||
tools: list[dict[str, Any]] | None = None
|
||||
if session.llm_api:
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, session.llm_api.custom_serializer)
|
||||
for tool in session.llm_api.tools
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
@ -252,9 +290,36 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"gemini-1.0" not in model_name and "gemini-pro" not in model_name
|
||||
)
|
||||
|
||||
prompt, *messages = [
|
||||
_chat_message_convert(message) for message in session.async_get_messages()
|
||||
]
|
||||
prompt = chat_log.content[0].content # type: ignore[union-attr]
|
||||
messages: list[genai_types.ContentDict] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:]:
|
||||
if chat_content.role == "tool_result":
|
||||
# mypy doesn't like picking a type based on checking shared property 'role'
|
||||
tool_results.append(cast(conversation.ToolResultContent, chat_content))
|
||||
continue
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(
|
||||
_convert_content(
|
||||
cast(
|
||||
conversation.UserContent
|
||||
| conversation.SystemContent
|
||||
| conversation.AssistantContent,
|
||||
chat_content,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
|
||||
model = genai.GenerativeModel(
|
||||
model_name=model_name,
|
||||
generation_config={
|
||||
@ -282,12 +347,12 @@ class GoogleGenerativeAIConversationEntity(
|
||||
),
|
||||
},
|
||||
tools=tools or None,
|
||||
system_instruction=prompt["parts"] if supports_system_instruction else None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
{"role": "user", "parts": prompt["parts"]},
|
||||
{"role": "user", "parts": prompt},
|
||||
{"role": "model", "parts": "Ok"},
|
||||
*messages,
|
||||
]
|
||||
@ -325,50 +390,40 @@ class GoogleGenerativeAIConversationEntity(
|
||||
content = " ".join(
|
||||
[part.text.strip() for part in chat_response.parts if part.text]
|
||||
)
|
||||
if content:
|
||||
session.async_add_message(
|
||||
conversation.Content(
|
||||
role="assistant",
|
||||
agent_id=user_input.agent_id,
|
||||
content=content,
|
||||
)
|
||||
)
|
||||
|
||||
function_calls = [
|
||||
part.function_call for part in chat_response.parts if part.function_call
|
||||
]
|
||||
|
||||
if not function_calls or not session.llm_api:
|
||||
break
|
||||
|
||||
tool_responses = []
|
||||
for function_call in function_calls:
|
||||
tool_call = MessageToDict(function_call._pb) # noqa: SLF001
|
||||
tool_calls = []
|
||||
for part in chat_response.parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = MessageToDict(part.function_call._pb) # noqa: SLF001
|
||||
tool_name = tool_call["name"]
|
||||
tool_args = _escape_decode(tool_call["args"])
|
||||
tool_input = llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
function_response = await session.async_call_tool(tool_input)
|
||||
tool_responses.append(
|
||||
protos.Part(
|
||||
function_response=protos.FunctionResponse(
|
||||
name=tool_name, response=function_response
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
chat_request = _create_google_tool_response_content(
|
||||
[
|
||||
tool_response
|
||||
async for tool_response in chat_log.async_add_assistant_content(
|
||||
conversation.AssistantContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=content,
|
||||
tool_calls=tool_calls or None,
|
||||
)
|
||||
)
|
||||
)
|
||||
chat_request = protos.Content(parts=tool_responses)
|
||||
session.async_add_message(
|
||||
conversation.NativeContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=chat_request,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
if not tool_calls:
|
||||
break
|
||||
|
||||
response = intent.IntentResponse(language=user_input.language)
|
||||
response.async_set_speech(
|
||||
" ".join([part.text.strip() for part in chat_response.parts if part.text])
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=response, conversation_id=session.conversation_id
|
||||
response=response, conversation_id=chat_log.conversation_id
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
|
@ -131,5 +131,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-ble==0.42.0"]
|
||||
"requirements": ["govee-ble==0.42.1"]
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from govee_local_api import GoveeDevice, GoveeLightCapability
|
||||
from govee_local_api import GoveeDevice, GoveeLightFeatures
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
@ -71,13 +71,13 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
capabilities = device.capabilities
|
||||
color_modes = {ColorMode.ONOFF}
|
||||
if capabilities:
|
||||
if GoveeLightCapability.COLOR_RGB in capabilities:
|
||||
if GoveeLightFeatures.COLOR_RGB & capabilities.features:
|
||||
color_modes.add(ColorMode.RGB)
|
||||
if GoveeLightCapability.COLOR_KELVIN_TEMPERATURE in capabilities:
|
||||
if GoveeLightFeatures.COLOR_KELVIN_TEMPERATURE & capabilities.features:
|
||||
color_modes.add(ColorMode.COLOR_TEMP)
|
||||
self._attr_max_color_temp_kelvin = 9000
|
||||
self._attr_min_color_temp_kelvin = 2000
|
||||
if GoveeLightCapability.BRIGHTNESS in capabilities:
|
||||
if GoveeLightFeatures.BRIGHTNESS & capabilities.features:
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_light_local",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-local-api==1.5.3"]
|
||||
"requirements": ["govee-local-api==2.0.0"]
|
||||
}
|
||||
|
@ -16,6 +16,12 @@
|
||||
},
|
||||
"elevation": {
|
||||
"default": "mdi:arrow-up-down"
|
||||
},
|
||||
"total_satellites": {
|
||||
"default": "mdi:satellite-variant"
|
||||
},
|
||||
"used_satellites": {
|
||||
"default": "mdi:satellite-variant"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
@ -39,12 +40,31 @@ ATTR_CLIMB = "climb"
|
||||
ATTR_ELEVATION = "elevation"
|
||||
ATTR_GPS_TIME = "gps_time"
|
||||
ATTR_SPEED = "speed"
|
||||
ATTR_TOTAL_SATELLITES = "total_satellites"
|
||||
ATTR_USED_SATELLITES = "used_satellites"
|
||||
|
||||
DEFAULT_NAME = "GPS"
|
||||
|
||||
_MODE_VALUES = {2: "2d_fix", 3: "3d_fix"}
|
||||
|
||||
|
||||
def count_total_satellites_fn(agps_thread: AGPS3mechanism) -> int | None:
|
||||
"""Count the number of total satellites."""
|
||||
satellites = agps_thread.data_stream.satellites
|
||||
return None if satellites == "n/a" else len(satellites)
|
||||
|
||||
|
||||
def count_used_satellites_fn(agps_thread: AGPS3mechanism) -> int | None:
|
||||
"""Count the number of used satellites."""
|
||||
satellites = agps_thread.data_stream.satellites
|
||||
if satellites == "n/a":
|
||||
return None
|
||||
|
||||
return sum(
|
||||
1 for sat in satellites if isinstance(sat, dict) and sat.get("used", False)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GpsdSensorDescription(SensorEntityDescription):
|
||||
"""Class describing GPSD sensor entities."""
|
||||
@ -116,6 +136,22 @@ SENSOR_TYPES: tuple[GpsdSensorDescription, ...] = (
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
GpsdSensorDescription(
|
||||
key=ATTR_TOTAL_SATELLITES,
|
||||
translation_key=ATTR_TOTAL_SATELLITES,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=count_total_satellites_fn,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
GpsdSensorDescription(
|
||||
key=ATTR_USED_SATELLITES,
|
||||
translation_key=ATTR_USED_SATELLITES,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=count_used_satellites_fn,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -50,6 +50,14 @@
|
||||
},
|
||||
"mode": { "name": "[%key:common::config_flow::data::mode%]" }
|
||||
}
|
||||
},
|
||||
"total_satellites": {
|
||||
"name": "Total satellites",
|
||||
"unit_of_measurement": "satellites"
|
||||
},
|
||||
"used_satellites": {
|
||||
"name": "Used satellites",
|
||||
"unit_of_measurement": "satellites"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ from aiohasupervisor.models import (
|
||||
backups as supervisor_backups,
|
||||
mounts as supervisor_mounts,
|
||||
)
|
||||
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER,
|
||||
@ -27,15 +28,19 @@ from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupManagerError,
|
||||
BackupNotFound,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CreateBackupEvent,
|
||||
CreateBackupStage,
|
||||
CreateBackupState,
|
||||
Folder,
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
RestoreBackupState,
|
||||
WrittenBackup,
|
||||
async_get_manager as async_get_backup_manager,
|
||||
@ -47,12 +52,11 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
LOCATION_CLOUD_BACKUP = ".cloud_backup"
|
||||
LOCATION_LOCAL = ".local"
|
||||
MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount")
|
||||
RESTORE_JOB_ID_ENV = "SUPERVISOR_RESTORE_JOB_ID"
|
||||
# Set on backups automatically created when updating an addon
|
||||
@ -67,7 +71,9 @@ async def async_get_backup_agents(
|
||||
"""Return the hassio backup agents."""
|
||||
client = get_supervisor_client(hass)
|
||||
mounts = await client.mounts.info()
|
||||
agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)]
|
||||
agents: list[BackupAgent] = [
|
||||
SupervisorBackupAgent(hass, "local", LOCATION_LOCAL_STORAGE)
|
||||
]
|
||||
for mount in mounts.mounts:
|
||||
if mount.usage is not supervisor_mounts.MountUsage.BACKUP:
|
||||
continue
|
||||
@ -107,7 +113,7 @@ def async_register_backup_agents_listener(
|
||||
|
||||
|
||||
def _backup_details_to_agent_backup(
|
||||
details: supervisor_backups.BackupComplete, location: str | None
|
||||
details: supervisor_backups.BackupComplete, location: str
|
||||
) -> AgentBackup:
|
||||
"""Convert a supervisor backup details object to an agent backup."""
|
||||
homeassistant_included = details.homeassistant is not None
|
||||
@ -120,7 +126,6 @@ def _backup_details_to_agent_backup(
|
||||
for addon in details.addons
|
||||
]
|
||||
extra_metadata = details.extra or {}
|
||||
location = location or LOCATION_LOCAL
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=details.slug,
|
||||
@ -143,7 +148,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None:
|
||||
def __init__(self, hass: HomeAssistant, name: str, location: str) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
@ -158,10 +163,15 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(location=self.location),
|
||||
)
|
||||
try:
|
||||
return await self._client.backups.download_backup(
|
||||
backup_id,
|
||||
options=supervisor_backups.DownloadBackupOptions(
|
||||
location=self.location
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@ -196,7 +206,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
backup_list = await self._client.backups.list()
|
||||
result = []
|
||||
for backup in backup_list:
|
||||
if not backup.locations or self.location not in backup.locations:
|
||||
if self.location not in backup.location_attributes:
|
||||
continue
|
||||
details = await self._client.backups.backup_info(backup.slug)
|
||||
result.append(_backup_details_to_agent_backup(details, self.location))
|
||||
@ -212,7 +222,7 @@ class SupervisorBackupAgent(BackupAgent):
|
||||
details = await self._client.backups.backup_info(backup_id)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
if self.location not in details.locations:
|
||||
if self.location not in details.location_attributes:
|
||||
return None
|
||||
return _backup_details_to_agent_backup(details, self.location)
|
||||
|
||||
@ -285,8 +295,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
# will be handled by async_upload_backup.
|
||||
# If the lists are the same length, it does not matter which one we send,
|
||||
# we send the encrypted list to have a well defined behavior.
|
||||
encrypted_locations: list[str | None] = []
|
||||
decrypted_locations: list[str | None] = []
|
||||
encrypted_locations: list[str] = []
|
||||
decrypted_locations: list[str] = []
|
||||
agents_settings = manager.config.data.agents
|
||||
for hassio_agent in hassio_agents:
|
||||
if password is not None:
|
||||
@ -336,31 +346,43 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
self._async_wait_for_backup(
|
||||
backup,
|
||||
locations,
|
||||
on_progress=on_progress,
|
||||
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
|
||||
),
|
||||
name="backup_manager_create_backup",
|
||||
eager_start=False, # To ensure the task is not started before we return
|
||||
)
|
||||
|
||||
return (NewBackup(backup_job_id=backup.job_id), backup_task)
|
||||
return (NewBackup(backup_job_id=backup.job_id.hex), backup_task)
|
||||
|
||||
async def _async_wait_for_backup(
|
||||
self,
|
||||
backup: supervisor_backups.NewBackup,
|
||||
locations: list[str | None],
|
||||
locations: list[str],
|
||||
*,
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
remove_after_upload: bool,
|
||||
) -> WrittenBackup:
|
||||
"""Wait for a backup to complete."""
|
||||
backup_complete = asyncio.Event()
|
||||
backup_id: str | None = None
|
||||
create_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup progress."""
|
||||
nonlocal backup_id
|
||||
if not (stage := try_parse_enum(CreateBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown create stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
CreateBackupEvent(
|
||||
reason=None, stage=stage, state=CreateBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
backup_id = data.get("reference")
|
||||
create_errors.extend(data.get("errors", []))
|
||||
backup_complete.set()
|
||||
|
||||
unsub = self._async_listen_job_events(backup.job_id, on_job_progress)
|
||||
@ -369,8 +391,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
await backup_complete.wait()
|
||||
finally:
|
||||
unsub()
|
||||
if not backup_id:
|
||||
raise BackupReaderWriterError("Backup failed")
|
||||
if not backup_id or create_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(
|
||||
f"Backup failed: {create_errors or 'no backup_id'}"
|
||||
)
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
try:
|
||||
@ -483,7 +508,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
else None
|
||||
)
|
||||
|
||||
restore_location: str | None
|
||||
restore_location: str
|
||||
if manager.backup_agents[agent_id].domain != DOMAIN:
|
||||
# Download the backup to the supervisor. Supervisor will clean up the backup
|
||||
# two days after the restore is done.
|
||||
@ -509,6 +534,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
location=restore_location,
|
||||
),
|
||||
)
|
||||
except SupervisorNotFoundError as err:
|
||||
raise BackupNotFound from err
|
||||
except SupervisorBadRequestError as err:
|
||||
# Supervisor currently does not transmit machine parsable error types
|
||||
message = err.args[0]
|
||||
@ -517,17 +544,30 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
raise HomeAssistantError(message) from err
|
||||
|
||||
restore_complete = asyncio.Event()
|
||||
restore_errors: list[dict[str, str]] = []
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup restore progress."""
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
|
||||
)
|
||||
)
|
||||
if data.get("done") is True:
|
||||
restore_complete.set()
|
||||
restore_errors.extend(data.get("errors", []))
|
||||
|
||||
unsub = self._async_listen_job_events(job.job_id, on_job_progress)
|
||||
try:
|
||||
await self._get_job_state(job.job_id, on_job_progress)
|
||||
await restore_complete.wait()
|
||||
if restore_errors:
|
||||
# We should add more specific error handling here in the future
|
||||
raise BackupReaderWriterError(f"Restore failed: {restore_errors}")
|
||||
finally:
|
||||
unsub()
|
||||
|
||||
@ -537,28 +577,52 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
on_progress: Callable[[RestoreBackupEvent | IdleEvent], None],
|
||||
) -> None:
|
||||
"""Check restore status after core restart."""
|
||||
if not (restore_job_id := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
if not (restore_job_str := os.environ.get(RESTORE_JOB_ID_ENV)):
|
||||
_LOGGER.debug("No restore job ID found in environment")
|
||||
return
|
||||
|
||||
restore_job_id = UUID(restore_job_str)
|
||||
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
||||
|
||||
sent_event = False
|
||||
|
||||
@callback
|
||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||
"""Handle backup restore progress."""
|
||||
nonlocal sent_event
|
||||
|
||||
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||
|
||||
if data.get("done") is not True:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
|
||||
if stage or not sent_event:
|
||||
sent_event = True
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None,
|
||||
stage=stage,
|
||||
state=RestoreBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="", stage=None, state=RestoreBackupState.COMPLETED
|
||||
restore_errors = data.get("errors", [])
|
||||
if restore_errors:
|
||||
_LOGGER.warning("Restore backup failed: %s", restore_errors)
|
||||
# We should add more specific error handling here in the future
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason="unknown_error",
|
||||
stage=stage,
|
||||
state=RestoreBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
else:
|
||||
on_progress(
|
||||
RestoreBackupEvent(
|
||||
reason=None, stage=stage, state=RestoreBackupState.COMPLETED
|
||||
)
|
||||
)
|
||||
)
|
||||
on_progress(IdleEvent())
|
||||
unsub()
|
||||
|
||||
@ -571,7 +635,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
|
||||
@callback
|
||||
def _async_listen_job_events(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for job events."""
|
||||
|
||||
@ -586,7 +650,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
if (
|
||||
data.get("event") != "job"
|
||||
or not (event_data := data.get("data"))
|
||||
or event_data.get("uuid") != job_id
|
||||
or event_data.get("uuid") != job_id.hex
|
||||
):
|
||||
return
|
||||
on_event(event_data)
|
||||
@ -597,10 +661,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
||||
return unsub
|
||||
|
||||
async def _get_job_state(
|
||||
self, job_id: str, on_event: Callable[[Mapping[str, Any]], None]
|
||||
self, job_id: UUID, on_event: Callable[[Mapping[str, Any]], None]
|
||||
) -> None:
|
||||
"""Poll a job for its state."""
|
||||
job = await self._client.jobs.get_job(UUID(job_id))
|
||||
job = await self._client.jobs.get_job(job_id)
|
||||
_LOGGER.debug("Job state: %s", job)
|
||||
on_event(job.to_dict())
|
||||
|
||||
|
@ -295,6 +295,8 @@ def async_remove_addons_from_dev_reg(
|
||||
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to retrieve Hass.io status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: ConfigEntry, dev_reg: dr.DeviceRegistry
|
||||
) -> None:
|
||||
@ -302,6 +304,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_UPDATE_INTERVAL,
|
||||
# We don't want an immediate refresh since we want to avoid
|
||||
|
@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.2.2b6"],
|
||||
"requirements": ["aiohasupervisor==0.3.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
1
homeassistant/components/heicko/__init__.py
Normal file
1
homeassistant/components/heicko/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
"""Virtual integration: Heicko."""
|
6
homeassistant/components/heicko/manifest.json
Normal file
6
homeassistant/components/heicko/manifest.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "heicko",
|
||||
"name": "Heicko",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "motion_blinds"
|
||||
}
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.65", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.66", "babel==2.15.0"]
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Constants for the homee integration."""
|
||||
|
||||
from homeassistant.const import (
|
||||
DEGREE,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
@ -32,6 +33,7 @@ HOMEE_UNIT_TO_HA_UNIT = {
|
||||
"W": UnitOfPower.WATT,
|
||||
"m/s": UnitOfSpeed.METERS_PER_SECOND,
|
||||
"km/h": UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
"°": DEGREE,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"K": UnitOfTemperature.KELVIN,
|
||||
@ -51,7 +53,7 @@ OPEN_CLOSE_MAP_REVERSED = {
|
||||
0.0: "closed",
|
||||
1.0: "open",
|
||||
2.0: "partial",
|
||||
3.0: "cosing",
|
||||
3.0: "closing",
|
||||
4.0: "opening",
|
||||
}
|
||||
WINDOW_MAP = {
|
||||
|
@ -25,7 +25,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
|
||||
api: HomeWizardEnergy
|
||||
|
||||
if token := entry.data.get(CONF_TOKEN):
|
||||
is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
|
||||
|
||||
if (token := entry.data.get(CONF_TOKEN)) and is_battery:
|
||||
api = HomeWizardEnergyV2(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
token=token,
|
||||
@ -37,7 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
clientsession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
if is_battery:
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
|
||||
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
|
||||
try:
|
||||
|
@ -272,9 +272,14 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
device_info = await async_try_connect(user_input[CONF_IP_ADDRESS])
|
||||
device_info = await async_try_connect(
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
token=reconfigure_entry.data.get(CONF_TOKEN),
|
||||
)
|
||||
|
||||
except RecoverableError as ex:
|
||||
LOGGER.error(ex)
|
||||
@ -288,7 +293,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
@ -306,7 +310,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
async def async_try_connect(ip_address: str) -> Device:
|
||||
async def async_try_connect(ip_address: str, token: str | None = None) -> Device:
|
||||
"""Try to connect.
|
||||
|
||||
Make connection with device to test the connection
|
||||
@ -317,7 +321,7 @@ async def async_try_connect(ip_address: str) -> Device:
|
||||
|
||||
# Determine if device is v1 or v2 capable
|
||||
if await has_v2_api(ip_address):
|
||||
energy_api = HomeWizardEnergyV2(ip_address)
|
||||
energy_api = HomeWizardEnergyV2(ip_address, token=token)
|
||||
else:
|
||||
energy_api = HomeWizardEnergyV1(ip_address)
|
||||
|
||||
|
@ -408,7 +408,7 @@ class HueLight(CoordinatorEntity, LightEntity):
|
||||
if self._fixed_color_mode:
|
||||
return self._fixed_color_mode
|
||||
|
||||
# The light supports both hs/xy and white with adjustabe color_temperature
|
||||
# The light supports both hs/xy and white with adjustable color_temperature
|
||||
mode = self._color_mode
|
||||
if mode in ("xy", "hs"):
|
||||
return ColorMode.HS
|
||||
|
@ -5,7 +5,7 @@
|
||||
"title": "Connect to the PowerView Hub",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::ip%]",
|
||||
"api_version": "Hub Generation"
|
||||
"api_version": "Hub generation"
|
||||
},
|
||||
"data_description": {
|
||||
"api_version": "API version is detectable, but you can override and force a specific version"
|
||||
@ -19,7 +19,7 @@
|
||||
"flow_title": "{name} ({host})",
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unsupported_device": "Only the primary powerview hub can be added",
|
||||
"unsupported_device": "Only the primary PowerView Hub can be added",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
|
@ -87,7 +87,7 @@ class IdasenDeskConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
39
homeassistant/components/iometer/__init__.py
Normal file
39
homeassistant/components/iometer/__init__.py
Normal file
@ -0,0 +1,39 @@
|
||||
"""The IOmeter integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from iometer import IOmeterClient, IOmeterConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import IOmeterConfigEntry, IOMeterCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: IOmeterConfigEntry) -> bool:
|
||||
"""Set up IOmeter from a config entry."""
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
session = async_get_clientsession(hass)
|
||||
client = IOmeterClient(host=host, session=session)
|
||||
try:
|
||||
await client.get_current_status()
|
||||
except IOmeterConnectionError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
coordinator = IOMeterCoordinator(hass, client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
91
homeassistant/components/iometer/config_flow.py
Normal file
91
homeassistant/components/iometer/config_flow.py
Normal file
@ -0,0 +1,91 @@
|
||||
"""Config flow for the IOmeter integration."""
|
||||
|
||||
from typing import Any, Final
|
||||
|
||||
from iometer import IOmeterClient, IOmeterConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONFIG_SCHEMA: Final = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
class IOMeterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handles the config flow for a IOmeter bridge and core."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._host: str
|
||||
self._meter_number: str
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self._host = host = discovery_info.host
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = IOmeterClient(host=host, session=session)
|
||||
try:
|
||||
status = await client.get_current_status()
|
||||
except IOmeterConnectionError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
self._meter_number = status.meter.number
|
||||
|
||||
await self.async_set_unique_id(status.device.id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
self.context["title_placeholders"] = {"name": f"IOmeter {self._meter_number}"}
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
async def async_step_zeroconf_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
if user_input is not None:
|
||||
return await self._async_create_entry()
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
description_placeholders={"meter_number": self._meter_number},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._host = user_input[CONF_HOST]
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = IOmeterClient(host=self._host, session=session)
|
||||
try:
|
||||
status = await client.get_current_status()
|
||||
except IOmeterConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
self._meter_number = status.meter.number
|
||||
await self.async_set_unique_id(status.device.id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return await self._async_create_entry()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=CONFIG_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create entry."""
|
||||
return self.async_create_entry(
|
||||
title=f"IOmeter {self._meter_number}",
|
||||
data={CONF_HOST: self._host},
|
||||
)
|
5
homeassistant/components/iometer/const.py
Normal file
5
homeassistant/components/iometer/const.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""Constants for the IOmeter integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "iometer"
|
55
homeassistant/components/iometer/coordinator.py
Normal file
55
homeassistant/components/iometer/coordinator.py
Normal file
@ -0,0 +1,55 @@
|
||||
"""DataUpdateCoordinator for IOmeter."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from iometer import IOmeterClient, IOmeterConnectionError, Reading, Status
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
type IOmeterConfigEntry = ConfigEntry[IOMeterCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class IOmeterData:
|
||||
"""Class for data update."""
|
||||
|
||||
reading: Reading
|
||||
status: Status
|
||||
|
||||
|
||||
class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]):
|
||||
"""Class to manage fetching IOmeter data."""
|
||||
|
||||
config_entry: IOmeterConfigEntry
|
||||
client: IOmeterClient
|
||||
|
||||
def __init__(self, hass: HomeAssistant, client: IOmeterClient) -> None:
|
||||
"""Initialize coordinator."""
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||
)
|
||||
self.client = client
|
||||
self.identifier = self.config_entry.entry_id
|
||||
|
||||
async def _async_update_data(self) -> IOmeterData:
|
||||
"""Update data async."""
|
||||
try:
|
||||
reading = await self.client.get_current_reading()
|
||||
status = await self.client.get_current_status()
|
||||
except IOmeterConnectionError as error:
|
||||
raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error
|
||||
|
||||
return IOmeterData(reading=reading, status=status)
|
24
homeassistant/components/iometer/entity.py
Normal file
24
homeassistant/components/iometer/entity.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""Base class for IOmeter entities."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import IOMeterCoordinator
|
||||
|
||||
|
||||
class IOmeterEntity(CoordinatorEntity[IOMeterCoordinator]):
|
||||
"""Defines a base IOmeter entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: IOMeterCoordinator) -> None:
|
||||
"""Initialize IOmeter entity."""
|
||||
super().__init__(coordinator)
|
||||
status = coordinator.data.status
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, status.device.id)},
|
||||
manufacturer="IOmeter GmbH",
|
||||
model="IOmeter",
|
||||
sw_version=f"{status.device.core.version}/{status.device.bridge.version}",
|
||||
)
|
38
homeassistant/components/iometer/icons.json
Normal file
38
homeassistant/components/iometer/icons.json
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"attachment_status": {
|
||||
"default": "mdi:eye",
|
||||
"state": {
|
||||
"attached": "mdi:check-bold",
|
||||
"detached": "mdi:close",
|
||||
"unknown": "mdi:help"
|
||||
}
|
||||
},
|
||||
"connection_status": {
|
||||
"default": "mdi:eye",
|
||||
"state": {
|
||||
"connected": "mdi:check-bold",
|
||||
"disconnected": "mdi:close",
|
||||
"unknown": "mdi:help"
|
||||
}
|
||||
},
|
||||
"pin_status": {
|
||||
"default": "mdi:eye",
|
||||
"state": {
|
||||
"entered": "mdi:lock-open",
|
||||
"pending": "mdi:lock-clock",
|
||||
"missing": "mdi:lock",
|
||||
"unknown": "mdi:help"
|
||||
}
|
||||
},
|
||||
"power_status": {
|
||||
"default": "mdi:eye",
|
||||
"state": {
|
||||
"battery": "mdi:battery",
|
||||
"wired": "mdi:power-plug"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user