mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 16:57:19 +00:00
Merge branch 'dev' into dev
This commit is contained in:
commit
e6bb99fe40
@ -8,7 +8,7 @@ repos:
|
|||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.3.0
|
rev: v2.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
args:
|
args:
|
||||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@ -765,8 +765,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ituran/ @shmuelzon
|
/tests/components/ituran/ @shmuelzon
|
||||||
/homeassistant/components/izone/ @Swamp-Ig
|
/homeassistant/components/izone/ @Swamp-Ig
|
||||||
/tests/components/izone/ @Swamp-Ig
|
/tests/components/izone/ @Swamp-Ig
|
||||||
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
|
/homeassistant/components/jellyfin/ @RunC0deRun @ctalkington
|
||||||
/tests/components/jellyfin/ @j-stienstra @ctalkington
|
/tests/components/jellyfin/ @RunC0deRun @ctalkington
|
||||||
/homeassistant/components/jewish_calendar/ @tsvi
|
/homeassistant/components/jewish_calendar/ @tsvi
|
||||||
/tests/components/jewish_calendar/ @tsvi
|
/tests/components/jewish_calendar/ @tsvi
|
||||||
/homeassistant/components/juicenet/ @jesserockz
|
/homeassistant/components/juicenet/ @jesserockz
|
||||||
|
2
Dockerfile
generated
2
Dockerfile
generated
@ -13,7 +13,7 @@ ENV \
|
|||||||
ARG QEMU_CPU
|
ARG QEMU_CPU
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.5.21
|
RUN pip3 install uv==0.5.27
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
unique_id for said entry. When a new (zeroconf) service or device is
|
unique_id for said entry. When a new (zeroconf) service or device is
|
||||||
discovered, the identifier is first used to look up if it belongs to an
|
discovered, the identifier is first used to look up if it belongs to an
|
||||||
existing config entry. If that's the case, the unique_id from that entry is
|
existing config entry. If that's the case, the unique_id from that entry is
|
||||||
re-used, otherwise the newly discovered identifier is used instead.
|
reused, otherwise the newly discovered identifier is used instead.
|
||||||
"""
|
"""
|
||||||
assert self.atv
|
assert self.atv
|
||||||
all_identifiers = set(self.atv.all_identifiers)
|
all_identifiers = set(self.atv.all_identifiers)
|
||||||
|
@ -26,11 +26,14 @@ from .manager import (
|
|||||||
BackupReaderWriterError,
|
BackupReaderWriterError,
|
||||||
CoreBackupReaderWriter,
|
CoreBackupReaderWriter,
|
||||||
CreateBackupEvent,
|
CreateBackupEvent,
|
||||||
|
CreateBackupStage,
|
||||||
|
CreateBackupState,
|
||||||
IdleEvent,
|
IdleEvent,
|
||||||
IncorrectPasswordError,
|
IncorrectPasswordError,
|
||||||
ManagerBackup,
|
ManagerBackup,
|
||||||
NewBackup,
|
NewBackup,
|
||||||
RestoreBackupEvent,
|
RestoreBackupEvent,
|
||||||
|
RestoreBackupStage,
|
||||||
RestoreBackupState,
|
RestoreBackupState,
|
||||||
WrittenBackup,
|
WrittenBackup,
|
||||||
)
|
)
|
||||||
@ -49,6 +52,8 @@ __all__ = [
|
|||||||
"BackupReaderWriter",
|
"BackupReaderWriter",
|
||||||
"BackupReaderWriterError",
|
"BackupReaderWriterError",
|
||||||
"CreateBackupEvent",
|
"CreateBackupEvent",
|
||||||
|
"CreateBackupStage",
|
||||||
|
"CreateBackupState",
|
||||||
"Folder",
|
"Folder",
|
||||||
"IdleEvent",
|
"IdleEvent",
|
||||||
"IncorrectPasswordError",
|
"IncorrectPasswordError",
|
||||||
@ -56,6 +61,7 @@ __all__ = [
|
|||||||
"ManagerBackup",
|
"ManagerBackup",
|
||||||
"NewBackup",
|
"NewBackup",
|
||||||
"RestoreBackupEvent",
|
"RestoreBackupEvent",
|
||||||
|
"RestoreBackupStage",
|
||||||
"RestoreBackupState",
|
"RestoreBackupState",
|
||||||
"WrittenBackup",
|
"WrittenBackup",
|
||||||
"async_get_manager",
|
"async_get_manager",
|
||||||
|
@ -41,12 +41,6 @@ class BaseBackup:
|
|||||||
homeassistant_version: str | None # None if homeassistant_included is False
|
homeassistant_version: str | None # None if homeassistant_included is False
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
def as_frontend_json(self) -> dict:
|
|
||||||
"""Return a dict representation of this backup for sending to frontend."""
|
|
||||||
return {
|
|
||||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class AgentBackup(BaseBackup):
|
class AgentBackup(BaseBackup):
|
||||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||||
|
from concurrent.futures import CancelledError, Future
|
||||||
import copy
|
import copy
|
||||||
from dataclasses import dataclass, replace
|
from dataclasses import dataclass, replace
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
@ -12,6 +13,7 @@ import os
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from queue import SimpleQueue
|
from queue import SimpleQueue
|
||||||
import tarfile
|
import tarfile
|
||||||
|
import threading
|
||||||
from typing import IO, Any, Self, cast
|
from typing import IO, Any, Self, cast
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@ -22,7 +24,6 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||||
from homeassistant.util.thread import ThreadWithException
|
|
||||||
|
|
||||||
from .const import BUF_SIZE, LOGGER
|
from .const import BUF_SIZE, LOGGER
|
||||||
from .models import AddonInfo, AgentBackup, Folder
|
from .models import AddonInfo, AgentBackup, Folder
|
||||||
@ -167,23 +168,38 @@ class AsyncIteratorReader:
|
|||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||||
"""Initialize the wrapper."""
|
"""Initialize the wrapper."""
|
||||||
|
self._aborted = False
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
self._stream = stream
|
self._stream = stream
|
||||||
self._buffer: bytes | None = None
|
self._buffer: bytes | None = None
|
||||||
|
self._next_future: Future[bytes | None] | None = None
|
||||||
self._pos: int = 0
|
self._pos: int = 0
|
||||||
|
|
||||||
async def _next(self) -> bytes | None:
|
async def _next(self) -> bytes | None:
|
||||||
"""Get the next chunk from the iterator."""
|
"""Get the next chunk from the iterator."""
|
||||||
return await anext(self._stream, None)
|
return await anext(self._stream, None)
|
||||||
|
|
||||||
|
def abort(self) -> None:
|
||||||
|
"""Abort the reader."""
|
||||||
|
self._aborted = True
|
||||||
|
if self._next_future is not None:
|
||||||
|
self._next_future.cancel()
|
||||||
|
|
||||||
def read(self, n: int = -1, /) -> bytes:
|
def read(self, n: int = -1, /) -> bytes:
|
||||||
"""Read data from the iterator."""
|
"""Read data from the iterator."""
|
||||||
result = bytearray()
|
result = bytearray()
|
||||||
while n < 0 or len(result) < n:
|
while n < 0 or len(result) < n:
|
||||||
if not self._buffer:
|
if not self._buffer:
|
||||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||||
self._next(), self._hass.loop
|
self._next(), self._hass.loop
|
||||||
).result()
|
)
|
||||||
|
if self._aborted:
|
||||||
|
self._next_future.cancel()
|
||||||
|
raise AbortCipher
|
||||||
|
try:
|
||||||
|
self._buffer = self._next_future.result()
|
||||||
|
except CancelledError as err:
|
||||||
|
raise AbortCipher from err
|
||||||
self._pos = 0
|
self._pos = 0
|
||||||
if not self._buffer:
|
if not self._buffer:
|
||||||
# The stream is exhausted
|
# The stream is exhausted
|
||||||
@ -205,9 +221,11 @@ class AsyncIteratorWriter:
|
|||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(self, hass: HomeAssistant) -> None:
|
||||||
"""Initialize the wrapper."""
|
"""Initialize the wrapper."""
|
||||||
|
self._aborted = False
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
self._pos: int = 0
|
self._pos: int = 0
|
||||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||||
|
self._write_future: Future[bytes | None] | None = None
|
||||||
|
|
||||||
def __aiter__(self) -> Self:
|
def __aiter__(self) -> Self:
|
||||||
"""Return the iterator."""
|
"""Return the iterator."""
|
||||||
@ -219,13 +237,28 @@ class AsyncIteratorWriter:
|
|||||||
return data
|
return data
|
||||||
raise StopAsyncIteration
|
raise StopAsyncIteration
|
||||||
|
|
||||||
|
def abort(self) -> None:
|
||||||
|
"""Abort the writer."""
|
||||||
|
self._aborted = True
|
||||||
|
if self._write_future is not None:
|
||||||
|
self._write_future.cancel()
|
||||||
|
|
||||||
def tell(self) -> int:
|
def tell(self) -> int:
|
||||||
"""Return the current position in the iterator."""
|
"""Return the current position in the iterator."""
|
||||||
return self._pos
|
return self._pos
|
||||||
|
|
||||||
def write(self, s: bytes, /) -> int:
|
def write(self, s: bytes, /) -> int:
|
||||||
"""Write data to the iterator."""
|
"""Write data to the iterator."""
|
||||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||||
|
self._queue.put(s), self._hass.loop
|
||||||
|
)
|
||||||
|
if self._aborted:
|
||||||
|
self._write_future.cancel()
|
||||||
|
raise AbortCipher
|
||||||
|
try:
|
||||||
|
self._write_future.result()
|
||||||
|
except CancelledError as err:
|
||||||
|
raise AbortCipher from err
|
||||||
self._pos += len(s)
|
self._pos += len(s)
|
||||||
return len(s)
|
return len(s)
|
||||||
|
|
||||||
@ -415,7 +448,9 @@ def _encrypt_backup(
|
|||||||
class _CipherWorkerStatus:
|
class _CipherWorkerStatus:
|
||||||
done: asyncio.Event
|
done: asyncio.Event
|
||||||
error: Exception | None = None
|
error: Exception | None = None
|
||||||
thread: ThreadWithException
|
reader: AsyncIteratorReader
|
||||||
|
thread: threading.Thread
|
||||||
|
writer: AsyncIteratorWriter
|
||||||
|
|
||||||
|
|
||||||
class _CipherBackupStreamer:
|
class _CipherBackupStreamer:
|
||||||
@ -468,11 +503,13 @@ class _CipherBackupStreamer:
|
|||||||
stream = await self._open_stream()
|
stream = await self._open_stream()
|
||||||
reader = AsyncIteratorReader(self._hass, stream)
|
reader = AsyncIteratorReader(self._hass, stream)
|
||||||
writer = AsyncIteratorWriter(self._hass)
|
writer = AsyncIteratorWriter(self._hass)
|
||||||
worker = ThreadWithException(
|
worker = threading.Thread(
|
||||||
target=self._cipher_func,
|
target=self._cipher_func,
|
||||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||||
)
|
)
|
||||||
worker_status = _CipherWorkerStatus(done=asyncio.Event(), thread=worker)
|
worker_status = _CipherWorkerStatus(
|
||||||
|
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||||
|
)
|
||||||
self._workers.append(worker_status)
|
self._workers.append(worker_status)
|
||||||
worker.start()
|
worker.start()
|
||||||
return writer
|
return writer
|
||||||
@ -480,9 +517,8 @@ class _CipherBackupStreamer:
|
|||||||
async def wait(self) -> None:
|
async def wait(self) -> None:
|
||||||
"""Wait for the worker threads to finish."""
|
"""Wait for the worker threads to finish."""
|
||||||
for worker in self._workers:
|
for worker in self._workers:
|
||||||
if not worker.thread.is_alive():
|
worker.reader.abort()
|
||||||
continue
|
worker.writer.abort()
|
||||||
worker.thread.raise_exc(AbortCipher)
|
|
||||||
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
|
await asyncio.gather(*(worker.done.wait() for worker in self._workers))
|
||||||
|
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ async def handle_info(
|
|||||||
"agent_errors": {
|
"agent_errors": {
|
||||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||||
},
|
},
|
||||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
"backups": list(backups.values()),
|
||||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||||
"last_non_idle_event": manager.last_non_idle_event,
|
"last_non_idle_event": manager.last_non_idle_event,
|
||||||
@ -91,7 +91,7 @@ async def handle_details(
|
|||||||
"agent_errors": {
|
"agent_errors": {
|
||||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||||
},
|
},
|
||||||
"backup": backup.as_frontend_json() if backup else None,
|
"backup": backup,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"join": {
|
"join": {
|
||||||
"name": "Join",
|
"name": "Join",
|
||||||
"description": "Group player together.",
|
"description": "Groups players together under a single master speaker.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"master": {
|
"master": {
|
||||||
"name": "Master",
|
"name": "Master",
|
||||||
@ -36,23 +36,23 @@
|
|||||||
},
|
},
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
"description": "Name of entity that will coordinate the grouping. Platform dependent."
|
"description": "Name of entity that will group to master speaker. Platform dependent."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"unjoin": {
|
"unjoin": {
|
||||||
"name": "Unjoin",
|
"name": "Unjoin",
|
||||||
"description": "Unjoin the player from a group.",
|
"description": "Separates a player from a group.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
"description": "Name of entity that will be unjoined from their group. Platform dependent."
|
"description": "Name of entity that will be separated from their group. Platform dependent."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"set_sleep_timer": {
|
"set_sleep_timer": {
|
||||||
"name": "Set sleep timer",
|
"name": "Set sleep timer",
|
||||||
"description": "Set a Bluesound timer. It will increase timer in steps: 15, 30, 45, 60, 90, 0.",
|
"description": "Sets a Bluesound timer that will turn off the speaker. It will increase in steps: 15, 30, 45, 60, 90, 0.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
@ -62,7 +62,7 @@
|
|||||||
},
|
},
|
||||||
"clear_sleep_timer": {
|
"clear_sleep_timer": {
|
||||||
"name": "Clear sleep timer",
|
"name": "Clear sleep timer",
|
||||||
"description": "Clear a Bluesound timer.",
|
"description": "Clears a Bluesound timer.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
|
@ -67,6 +67,11 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
),
|
),
|
||||||
|
# Channel (-)
|
||||||
|
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||||
|
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
# Conductivity (µS/cm)
|
# Conductivity (µS/cm)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
||||||
|
@ -13,6 +13,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["hass_nabucasa"],
|
"loggers": ["hass_nabucasa"],
|
||||||
"requirements": ["hass-nabucasa==0.88.1"],
|
"requirements": ["hass-nabucasa==0.89.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,156 @@ ATTR_GENDER = "gender"
|
|||||||
DEPRECATED_VOICES = {"XiaoxuanNeural": "XiaozhenNeural"}
|
DEPRECATED_VOICES = {"XiaoxuanNeural": "XiaozhenNeural"}
|
||||||
SUPPORT_LANGUAGES = list(TTS_VOICES)
|
SUPPORT_LANGUAGES = list(TTS_VOICES)
|
||||||
|
|
||||||
|
DEFAULT_VOICES = {
|
||||||
|
"af-ZA": "AdriNeural",
|
||||||
|
"am-ET": "MekdesNeural",
|
||||||
|
"ar-AE": "FatimaNeural",
|
||||||
|
"ar-BH": "LailaNeural",
|
||||||
|
"ar-DZ": "AminaNeural",
|
||||||
|
"ar-EG": "SalmaNeural",
|
||||||
|
"ar-IQ": "RanaNeural",
|
||||||
|
"ar-JO": "SanaNeural",
|
||||||
|
"ar-KW": "NouraNeural",
|
||||||
|
"ar-LB": "LaylaNeural",
|
||||||
|
"ar-LY": "ImanNeural",
|
||||||
|
"ar-MA": "MounaNeural",
|
||||||
|
"ar-OM": "AbdullahNeural",
|
||||||
|
"ar-QA": "AmalNeural",
|
||||||
|
"ar-SA": "ZariyahNeural",
|
||||||
|
"ar-SY": "AmanyNeural",
|
||||||
|
"ar-TN": "ReemNeural",
|
||||||
|
"ar-YE": "MaryamNeural",
|
||||||
|
"az-AZ": "BabekNeural",
|
||||||
|
"bg-BG": "KalinaNeural",
|
||||||
|
"bn-BD": "NabanitaNeural",
|
||||||
|
"bn-IN": "TanishaaNeural",
|
||||||
|
"bs-BA": "GoranNeural",
|
||||||
|
"ca-ES": "JoanaNeural",
|
||||||
|
"cs-CZ": "VlastaNeural",
|
||||||
|
"cy-GB": "NiaNeural",
|
||||||
|
"da-DK": "ChristelNeural",
|
||||||
|
"de-AT": "IngridNeural",
|
||||||
|
"de-CH": "LeniNeural",
|
||||||
|
"de-DE": "KatjaNeural",
|
||||||
|
"el-GR": "AthinaNeural",
|
||||||
|
"en-AU": "NatashaNeural",
|
||||||
|
"en-CA": "ClaraNeural",
|
||||||
|
"en-GB": "LibbyNeural",
|
||||||
|
"en-HK": "YanNeural",
|
||||||
|
"en-IE": "EmilyNeural",
|
||||||
|
"en-IN": "NeerjaNeural",
|
||||||
|
"en-KE": "AsiliaNeural",
|
||||||
|
"en-NG": "EzinneNeural",
|
||||||
|
"en-NZ": "MollyNeural",
|
||||||
|
"en-PH": "RosaNeural",
|
||||||
|
"en-SG": "LunaNeural",
|
||||||
|
"en-TZ": "ImaniNeural",
|
||||||
|
"en-US": "JennyNeural",
|
||||||
|
"en-ZA": "LeahNeural",
|
||||||
|
"es-AR": "ElenaNeural",
|
||||||
|
"es-BO": "SofiaNeural",
|
||||||
|
"es-CL": "CatalinaNeural",
|
||||||
|
"es-CO": "SalomeNeural",
|
||||||
|
"es-CR": "MariaNeural",
|
||||||
|
"es-CU": "BelkysNeural",
|
||||||
|
"es-DO": "RamonaNeural",
|
||||||
|
"es-EC": "AndreaNeural",
|
||||||
|
"es-ES": "ElviraNeural",
|
||||||
|
"es-GQ": "TeresaNeural",
|
||||||
|
"es-GT": "MartaNeural",
|
||||||
|
"es-HN": "KarlaNeural",
|
||||||
|
"es-MX": "DaliaNeural",
|
||||||
|
"es-NI": "YolandaNeural",
|
||||||
|
"es-PA": "MargaritaNeural",
|
||||||
|
"es-PE": "CamilaNeural",
|
||||||
|
"es-PR": "KarinaNeural",
|
||||||
|
"es-PY": "TaniaNeural",
|
||||||
|
"es-SV": "LorenaNeural",
|
||||||
|
"es-US": "PalomaNeural",
|
||||||
|
"es-UY": "ValentinaNeural",
|
||||||
|
"es-VE": "PaolaNeural",
|
||||||
|
"et-EE": "AnuNeural",
|
||||||
|
"eu-ES": "AinhoaNeural",
|
||||||
|
"fa-IR": "DilaraNeural",
|
||||||
|
"fi-FI": "SelmaNeural",
|
||||||
|
"fil-PH": "BlessicaNeural",
|
||||||
|
"fr-BE": "CharlineNeural",
|
||||||
|
"fr-CA": "SylvieNeural",
|
||||||
|
"fr-CH": "ArianeNeural",
|
||||||
|
"fr-FR": "DeniseNeural",
|
||||||
|
"ga-IE": "OrlaNeural",
|
||||||
|
"gl-ES": "SabelaNeural",
|
||||||
|
"gu-IN": "DhwaniNeural",
|
||||||
|
"he-IL": "HilaNeural",
|
||||||
|
"hi-IN": "SwaraNeural",
|
||||||
|
"hr-HR": "GabrijelaNeural",
|
||||||
|
"hu-HU": "NoemiNeural",
|
||||||
|
"hy-AM": "AnahitNeural",
|
||||||
|
"id-ID": "GadisNeural",
|
||||||
|
"is-IS": "GudrunNeural",
|
||||||
|
"it-IT": "ElsaNeural",
|
||||||
|
"ja-JP": "NanamiNeural",
|
||||||
|
"jv-ID": "SitiNeural",
|
||||||
|
"ka-GE": "EkaNeural",
|
||||||
|
"kk-KZ": "AigulNeural",
|
||||||
|
"km-KH": "SreymomNeural",
|
||||||
|
"kn-IN": "SapnaNeural",
|
||||||
|
"ko-KR": "SunHiNeural",
|
||||||
|
"lo-LA": "KeomanyNeural",
|
||||||
|
"lt-LT": "OnaNeural",
|
||||||
|
"lv-LV": "EveritaNeural",
|
||||||
|
"mk-MK": "MarijaNeural",
|
||||||
|
"ml-IN": "SobhanaNeural",
|
||||||
|
"mn-MN": "BataaNeural",
|
||||||
|
"mr-IN": "AarohiNeural",
|
||||||
|
"ms-MY": "YasminNeural",
|
||||||
|
"mt-MT": "GraceNeural",
|
||||||
|
"my-MM": "NilarNeural",
|
||||||
|
"nb-NO": "IselinNeural",
|
||||||
|
"ne-NP": "HemkalaNeural",
|
||||||
|
"nl-BE": "DenaNeural",
|
||||||
|
"nl-NL": "ColetteNeural",
|
||||||
|
"pl-PL": "AgnieszkaNeural",
|
||||||
|
"ps-AF": "LatifaNeural",
|
||||||
|
"pt-BR": "FranciscaNeural",
|
||||||
|
"pt-PT": "RaquelNeural",
|
||||||
|
"ro-RO": "AlinaNeural",
|
||||||
|
"ru-RU": "SvetlanaNeural",
|
||||||
|
"si-LK": "ThiliniNeural",
|
||||||
|
"sk-SK": "ViktoriaNeural",
|
||||||
|
"sl-SI": "PetraNeural",
|
||||||
|
"so-SO": "UbaxNeural",
|
||||||
|
"sq-AL": "AnilaNeural",
|
||||||
|
"sr-RS": "SophieNeural",
|
||||||
|
"su-ID": "TutiNeural",
|
||||||
|
"sv-SE": "SofieNeural",
|
||||||
|
"sw-KE": "ZuriNeural",
|
||||||
|
"sw-TZ": "RehemaNeural",
|
||||||
|
"ta-IN": "PallaviNeural",
|
||||||
|
"ta-LK": "SaranyaNeural",
|
||||||
|
"ta-MY": "KaniNeural",
|
||||||
|
"ta-SG": "VenbaNeural",
|
||||||
|
"te-IN": "ShrutiNeural",
|
||||||
|
"th-TH": "AcharaNeural",
|
||||||
|
"tr-TR": "EmelNeural",
|
||||||
|
"uk-UA": "PolinaNeural",
|
||||||
|
"ur-IN": "GulNeural",
|
||||||
|
"ur-PK": "UzmaNeural",
|
||||||
|
"uz-UZ": "MadinaNeural",
|
||||||
|
"vi-VN": "HoaiMyNeural",
|
||||||
|
"wuu-CN": "XiaotongNeural",
|
||||||
|
"yue-CN": "XiaoMinNeural",
|
||||||
|
"zh-CN": "XiaoxiaoNeural",
|
||||||
|
"zh-CN-henan": "YundengNeural",
|
||||||
|
"zh-CN-liaoning": "XiaobeiNeural",
|
||||||
|
"zh-CN-shaanxi": "XiaoniNeural",
|
||||||
|
"zh-CN-shandong": "YunxiangNeural",
|
||||||
|
"zh-CN-sichuan": "YunxiNeural",
|
||||||
|
"zh-HK": "HiuMaanNeural",
|
||||||
|
"zh-TW": "HsiaoChenNeural",
|
||||||
|
"zu-ZA": "ThandoNeural",
|
||||||
|
}
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -186,12 +336,13 @@ class CloudTTSEntity(TextToSpeechEntity):
|
|||||||
"""Load TTS from Home Assistant Cloud."""
|
"""Load TTS from Home Assistant Cloud."""
|
||||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||||
gender = handle_deprecated_gender(self.hass, gender)
|
gender = handle_deprecated_gender(self.hass, gender)
|
||||||
original_voice: str | None = options.get(ATTR_VOICE)
|
original_voice: str = options.get(
|
||||||
if original_voice is None and language == self._language:
|
ATTR_VOICE,
|
||||||
original_voice = self._voice
|
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||||
|
)
|
||||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||||
if voice not in TTS_VOICES[language]:
|
if voice not in TTS_VOICES[language]:
|
||||||
default_voice = TTS_VOICES[language][0]
|
default_voice = DEFAULT_VOICES[language]
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||||
voice,
|
voice,
|
||||||
@ -266,12 +417,13 @@ class CloudProvider(Provider):
|
|||||||
assert self.hass is not None
|
assert self.hass is not None
|
||||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||||
gender = handle_deprecated_gender(self.hass, gender)
|
gender = handle_deprecated_gender(self.hass, gender)
|
||||||
original_voice: str | None = options.get(ATTR_VOICE)
|
original_voice: str = options.get(
|
||||||
if original_voice is None and language == self._language:
|
ATTR_VOICE,
|
||||||
original_voice = self._voice
|
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||||
|
)
|
||||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||||
if voice not in TTS_VOICES[language]:
|
if voice not in TTS_VOICES[language]:
|
||||||
default_voice = TTS_VOICES[language][0]
|
default_voice = DEFAULT_VOICES[language]
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||||
voice,
|
voice,
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
Data is fetched from DWD:
|
Data is fetched from DWD:
|
||||||
https://rcccm.dwd.de/DE/wetter/warnungen_aktuell/objekt_einbindung/objekteinbindung.html
|
https://rcccm.dwd.de/DE/wetter/warnungen_aktuell/objekt_einbindung/objekteinbindung.html
|
||||||
|
|
||||||
Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor
|
Warnungen vor extremem Unwetter (Stufe 4) # codespell:ignore vor,extremem
|
||||||
Unwetterwarnungen (Stufe 3)
|
Unwetterwarnungen (Stufe 3)
|
||||||
Warnungen vor markantem Wetter (Stufe 2) # codespell:ignore vor
|
Warnungen vor markantem Wetter (Stufe 2) # codespell:ignore vor
|
||||||
Wetterwarnungen (Stufe 1)
|
Wetterwarnungen (Stufe 1)
|
||||||
|
@ -1,33 +1,27 @@
|
|||||||
"""The FAA Delays integration."""
|
"""The FAA Delays integration."""
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import CONF_ID, Platform
|
from homeassistant.const import CONF_ID, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .coordinator import FAAConfigEntry, FAADataUpdateCoordinator
|
||||||
from .coordinator import FAADataUpdateCoordinator
|
|
||||||
|
|
||||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
|
||||||
"""Set up FAA Delays from a config entry."""
|
"""Set up FAA Delays from a config entry."""
|
||||||
code = entry.data[CONF_ID]
|
code = entry.data[CONF_ID]
|
||||||
|
|
||||||
coordinator = FAADataUpdateCoordinator(hass, code)
|
coordinator = FAADataUpdateCoordinator(hass, entry, code)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
entry.runtime_data = coordinator
|
||||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: FAAConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
if unload_ok:
|
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
|
||||||
return unload_ok
|
|
||||||
|
@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import (
|
|||||||
BinarySensorEntity,
|
BinarySensorEntity,
|
||||||
BinarySensorEntityDescription,
|
BinarySensorEntityDescription,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from . import FAADataUpdateCoordinator
|
from . import FAAConfigEntry, FAADataUpdateCoordinator
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
@ -84,10 +83,10 @@ FAA_BINARY_SENSORS: tuple[FaaDelaysBinarySensorEntityDescription, ...] = (
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant, entry: FAAConfigEntry, async_add_entities: AddEntitiesCallback
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up a FAA sensor based on a config entry."""
|
"""Set up a FAA sensor based on a config entry."""
|
||||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
entities = [
|
entities = [
|
||||||
FAABinarySensor(coordinator, entry.entry_id, description)
|
FAABinarySensor(coordinator, entry.entry_id, description)
|
||||||
|
@ -7,6 +7,7 @@ import logging
|
|||||||
from aiohttp import ClientConnectionError
|
from aiohttp import ClientConnectionError
|
||||||
from faadelays import Airport
|
from faadelays import Airport
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import aiohttp_client
|
from homeassistant.helpers import aiohttp_client
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
@ -15,14 +16,20 @@ from .const import DOMAIN
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type FAAConfigEntry = ConfigEntry[FAADataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
class FAADataUpdateCoordinator(DataUpdateCoordinator[Airport]):
|
class FAADataUpdateCoordinator(DataUpdateCoordinator[Airport]):
|
||||||
"""Class to manage fetching FAA API data from a single endpoint."""
|
"""Class to manage fetching FAA API data from a single endpoint."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, code: str) -> None:
|
def __init__(self, hass: HomeAssistant, entry: FAAConfigEntry, code: str) -> None:
|
||||||
"""Initialize the coordinator."""
|
"""Initialize the coordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=1)
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=timedelta(minutes=1),
|
||||||
)
|
)
|
||||||
self.session = aiohttp_client.async_get_clientsession(hass)
|
self.session = aiohttp_client.async_get_clientsession(hass)
|
||||||
self.data = Airport(code, self.session)
|
self.data = Airport(code, self.session)
|
||||||
|
@ -4,20 +4,20 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.start import async_at_started
|
from homeassistant.helpers.start import async_at_started
|
||||||
|
|
||||||
from .const import DOMAIN, PLATFORMS
|
from .const import PLATFORMS
|
||||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
|
||||||
"""Set up Fast.com from a config entry."""
|
"""Set up Fast.com from a config entry."""
|
||||||
coordinator = FastdotcomDataUpdateCoordinator(hass)
|
coordinator = FastdotcomDataUpdateCoordinator(hass, entry)
|
||||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(
|
await hass.config_entries.async_forward_entry_setups(
|
||||||
entry,
|
entry,
|
||||||
@ -36,8 +36,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: FastdotcomConfigEntry) -> bool:
|
||||||
"""Unload Fast.com config entry."""
|
"""Unload Fast.com config entry."""
|
||||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
|
||||||
return unload_ok
|
|
||||||
|
@ -6,20 +6,24 @@ from datetime import timedelta
|
|||||||
|
|
||||||
from fastdotcom import fast_com
|
from fastdotcom import fast_com
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DEFAULT_INTERVAL, DOMAIN, LOGGER
|
from .const import DEFAULT_INTERVAL, DOMAIN, LOGGER
|
||||||
|
|
||||||
|
type FastdotcomConfigEntry = ConfigEntry[FastdotcomDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
|
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
|
||||||
"""Class to manage fetching Fast.com data API."""
|
"""Class to manage fetching Fast.com data API."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(self, hass: HomeAssistant, entry: FastdotcomConfigEntry) -> None:
|
||||||
"""Initialize the coordinator for Fast.com."""
|
"""Initialize the coordinator for Fast.com."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=timedelta(hours=DEFAULT_INTERVAL),
|
update_interval=timedelta(hours=DEFAULT_INTERVAL),
|
||||||
)
|
)
|
||||||
|
@ -4,21 +4,13 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .coordinator import FastdotcomConfigEntry
|
||||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_config_entry_diagnostics(
|
async def async_get_config_entry_diagnostics(
|
||||||
hass: HomeAssistant, config_entry: ConfigEntry
|
hass: HomeAssistant, config_entry: FastdotcomConfigEntry
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Return diagnostics for the config entry."""
|
"""Return diagnostics for the config entry."""
|
||||||
coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][
|
return {"coordinator_data": config_entry.runtime_data.data}
|
||||||
config_entry.entry_id
|
|
||||||
]
|
|
||||||
|
|
||||||
return {
|
|
||||||
"coordinator_data": coordinator.data,
|
|
||||||
}
|
|
||||||
|
@ -7,7 +7,6 @@ from homeassistant.components.sensor import (
|
|||||||
SensorEntity,
|
SensorEntity,
|
||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import UnitOfDataRate
|
from homeassistant.const import UnitOfDataRate
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
@ -15,17 +14,16 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import FastdotcomDataUpdateCoordinator
|
from .coordinator import FastdotcomConfigEntry, FastdotcomDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: FastdotcomConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Fast.com sensor."""
|
"""Set up the Fast.com sensor."""
|
||||||
coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
async_add_entities([SpeedtestSensor(entry.entry_id, entry.runtime_data)])
|
||||||
async_add_entities([SpeedtestSensor(entry.entry_id, coordinator)])
|
|
||||||
|
|
||||||
|
|
||||||
class SpeedtestSensor(CoordinatorEntity[FastdotcomDataUpdateCoordinator], SensorEntity):
|
class SpeedtestSensor(CoordinatorEntity[FastdotcomDataUpdateCoordinator], SensorEntity):
|
||||||
|
@ -21,9 +21,11 @@ from homeassistant.core import Event, HomeAssistant, callback
|
|||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.util import raise_if_invalid_filename
|
from homeassistant.util import raise_if_invalid_filename
|
||||||
|
from homeassistant.util.hass_dict import HassKey
|
||||||
from homeassistant.util.ulid import ulid_hex
|
from homeassistant.util.ulid import ulid_hex
|
||||||
|
|
||||||
DOMAIN = "file_upload"
|
DOMAIN = "file_upload"
|
||||||
|
_DATA: HassKey[FileUploadData] = HassKey(DOMAIN)
|
||||||
|
|
||||||
ONE_MEGABYTE = 1024 * 1024
|
ONE_MEGABYTE = 1024 * 1024
|
||||||
MAX_SIZE = 100 * ONE_MEGABYTE
|
MAX_SIZE = 100 * ONE_MEGABYTE
|
||||||
@ -41,7 +43,7 @@ def process_uploaded_file(hass: HomeAssistant, file_id: str) -> Iterator[Path]:
|
|||||||
if DOMAIN not in hass.data:
|
if DOMAIN not in hass.data:
|
||||||
raise ValueError("File does not exist")
|
raise ValueError("File does not exist")
|
||||||
|
|
||||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
file_upload_data = hass.data[_DATA]
|
||||||
|
|
||||||
if not file_upload_data.has_file(file_id):
|
if not file_upload_data.has_file(file_id):
|
||||||
raise ValueError("File does not exist")
|
raise ValueError("File does not exist")
|
||||||
@ -149,10 +151,10 @@ class FileUploadView(HomeAssistantView):
|
|||||||
hass = request.app[KEY_HASS]
|
hass = request.app[KEY_HASS]
|
||||||
file_id = ulid_hex()
|
file_id = ulid_hex()
|
||||||
|
|
||||||
if DOMAIN not in hass.data:
|
if _DATA not in hass.data:
|
||||||
hass.data[DOMAIN] = await FileUploadData.create(hass)
|
hass.data[_DATA] = await FileUploadData.create(hass)
|
||||||
|
|
||||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
file_upload_data = hass.data[_DATA]
|
||||||
file_dir = file_upload_data.file_dir(file_id)
|
file_dir = file_upload_data.file_dir(file_id)
|
||||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
||||||
SimpleQueue()
|
SimpleQueue()
|
||||||
@ -206,7 +208,7 @@ class FileUploadView(HomeAssistantView):
|
|||||||
raise web.HTTPNotFound
|
raise web.HTTPNotFound
|
||||||
|
|
||||||
file_id = data["file_id"]
|
file_id = data["file_id"]
|
||||||
file_upload_data: FileUploadData = hass.data[DOMAIN]
|
file_upload_data = hass.data[_DATA]
|
||||||
|
|
||||||
if file_upload_data.files.pop(file_id, None) is None:
|
if file_upload_data.files.pop(file_id, None) is None:
|
||||||
raise web.HTTPNotFound
|
raise web.HTTPNotFound
|
||||||
|
@ -244,7 +244,7 @@ class AFSAPIDevice(MediaPlayerEntity):
|
|||||||
"""Send volume up command."""
|
"""Send volume up command."""
|
||||||
volume = await self.fs_device.get_volume()
|
volume = await self.fs_device.get_volume()
|
||||||
volume = int(volume or 0) + 1
|
volume = int(volume or 0) + 1
|
||||||
await self.fs_device.set_volume(min(volume, self._max_volume))
|
await self.fs_device.set_volume(min(volume, self._max_volume or 1))
|
||||||
|
|
||||||
async def async_volume_down(self) -> None:
|
async def async_volume_down(self) -> None:
|
||||||
"""Send volume down command."""
|
"""Send volume down command."""
|
||||||
|
@ -30,12 +30,15 @@ from homeassistant.components.backup import (
|
|||||||
BackupReaderWriter,
|
BackupReaderWriter,
|
||||||
BackupReaderWriterError,
|
BackupReaderWriterError,
|
||||||
CreateBackupEvent,
|
CreateBackupEvent,
|
||||||
|
CreateBackupStage,
|
||||||
|
CreateBackupState,
|
||||||
Folder,
|
Folder,
|
||||||
IdleEvent,
|
IdleEvent,
|
||||||
IncorrectPasswordError,
|
IncorrectPasswordError,
|
||||||
ManagerBackup,
|
ManagerBackup,
|
||||||
NewBackup,
|
NewBackup,
|
||||||
RestoreBackupEvent,
|
RestoreBackupEvent,
|
||||||
|
RestoreBackupStage,
|
||||||
RestoreBackupState,
|
RestoreBackupState,
|
||||||
WrittenBackup,
|
WrittenBackup,
|
||||||
async_get_manager as async_get_backup_manager,
|
async_get_manager as async_get_backup_manager,
|
||||||
@ -47,6 +50,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
from homeassistant.util.enum import try_parse_enum
|
||||||
|
|
||||||
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
from .const import DOMAIN, EVENT_SUPERVISOR_EVENT
|
||||||
from .handler import get_supervisor_client
|
from .handler import get_supervisor_client
|
||||||
@ -336,6 +340,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
self._async_wait_for_backup(
|
self._async_wait_for_backup(
|
||||||
backup,
|
backup,
|
||||||
locations,
|
locations,
|
||||||
|
on_progress=on_progress,
|
||||||
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
|
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
|
||||||
),
|
),
|
||||||
name="backup_manager_create_backup",
|
name="backup_manager_create_backup",
|
||||||
@ -349,6 +354,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
backup: supervisor_backups.NewBackup,
|
backup: supervisor_backups.NewBackup,
|
||||||
locations: list[str | None],
|
locations: list[str | None],
|
||||||
*,
|
*,
|
||||||
|
on_progress: Callable[[CreateBackupEvent], None],
|
||||||
remove_after_upload: bool,
|
remove_after_upload: bool,
|
||||||
) -> WrittenBackup:
|
) -> WrittenBackup:
|
||||||
"""Wait for a backup to complete."""
|
"""Wait for a backup to complete."""
|
||||||
@ -360,6 +366,14 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||||
"""Handle backup progress."""
|
"""Handle backup progress."""
|
||||||
nonlocal backup_id
|
nonlocal backup_id
|
||||||
|
if not (stage := try_parse_enum(CreateBackupStage, data.get("stage"))):
|
||||||
|
_LOGGER.debug("Unknown create stage: %s", data.get("stage"))
|
||||||
|
else:
|
||||||
|
on_progress(
|
||||||
|
CreateBackupEvent(
|
||||||
|
reason=None, stage=stage, state=CreateBackupState.IN_PROGRESS
|
||||||
|
)
|
||||||
|
)
|
||||||
if data.get("done") is True:
|
if data.get("done") is True:
|
||||||
backup_id = data.get("reference")
|
backup_id = data.get("reference")
|
||||||
create_errors.extend(data.get("errors", []))
|
create_errors.extend(data.get("errors", []))
|
||||||
@ -527,6 +541,14 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
@callback
|
@callback
|
||||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||||
"""Handle backup restore progress."""
|
"""Handle backup restore progress."""
|
||||||
|
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||||
|
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||||
|
else:
|
||||||
|
on_progress(
|
||||||
|
RestoreBackupEvent(
|
||||||
|
reason=None, stage=stage, state=RestoreBackupState.IN_PROGRESS
|
||||||
|
)
|
||||||
|
)
|
||||||
if data.get("done") is True:
|
if data.get("done") is True:
|
||||||
restore_complete.set()
|
restore_complete.set()
|
||||||
restore_errors.extend(data.get("errors", []))
|
restore_errors.extend(data.get("errors", []))
|
||||||
@ -553,15 +575,26 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
|
|
||||||
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
_LOGGER.debug("Found restore job ID %s in environment", restore_job_id)
|
||||||
|
|
||||||
|
sent_event = False
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def on_job_progress(data: Mapping[str, Any]) -> None:
|
def on_job_progress(data: Mapping[str, Any]) -> None:
|
||||||
"""Handle backup restore progress."""
|
"""Handle backup restore progress."""
|
||||||
|
nonlocal sent_event
|
||||||
|
|
||||||
|
if not (stage := try_parse_enum(RestoreBackupStage, data.get("stage"))):
|
||||||
|
_LOGGER.debug("Unknown restore stage: %s", data.get("stage"))
|
||||||
|
|
||||||
if data.get("done") is not True:
|
if data.get("done") is not True:
|
||||||
on_progress(
|
if stage or not sent_event:
|
||||||
RestoreBackupEvent(
|
sent_event = True
|
||||||
reason="", stage=None, state=RestoreBackupState.IN_PROGRESS
|
on_progress(
|
||||||
|
RestoreBackupEvent(
|
||||||
|
reason=None,
|
||||||
|
stage=stage,
|
||||||
|
state=RestoreBackupState.IN_PROGRESS,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
restore_errors = data.get("errors", [])
|
restore_errors = data.get("errors", [])
|
||||||
@ -571,14 +604,14 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
|
|||||||
on_progress(
|
on_progress(
|
||||||
RestoreBackupEvent(
|
RestoreBackupEvent(
|
||||||
reason="unknown_error",
|
reason="unknown_error",
|
||||||
stage=None,
|
stage=stage,
|
||||||
state=RestoreBackupState.FAILED,
|
state=RestoreBackupState.FAILED,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
on_progress(
|
on_progress(
|
||||||
RestoreBackupEvent(
|
RestoreBackupEvent(
|
||||||
reason="", stage=None, state=RestoreBackupState.COMPLETED
|
reason=None, stage=stage, state=RestoreBackupState.COMPLETED
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
on_progress(IdleEvent())
|
on_progress(IdleEvent())
|
||||||
|
@ -25,7 +25,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
|||||||
|
|
||||||
api: HomeWizardEnergy
|
api: HomeWizardEnergy
|
||||||
|
|
||||||
if token := entry.data.get(CONF_TOKEN):
|
is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
|
||||||
|
|
||||||
|
if (token := entry.data.get(CONF_TOKEN)) and is_battery:
|
||||||
api = HomeWizardEnergyV2(
|
api = HomeWizardEnergyV2(
|
||||||
entry.data[CONF_IP_ADDRESS],
|
entry.data[CONF_IP_ADDRESS],
|
||||||
token=token,
|
token=token,
|
||||||
@ -37,7 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
|||||||
clientsession=async_get_clientsession(hass),
|
clientsession=async_get_clientsession(hass),
|
||||||
)
|
)
|
||||||
|
|
||||||
await async_check_v2_support_and_create_issue(hass, entry)
|
if is_battery:
|
||||||
|
await async_check_v2_support_and_create_issue(hass, entry)
|
||||||
|
|
||||||
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
|
coordinator = HWEnergyDeviceUpdateCoordinator(hass, api)
|
||||||
try:
|
try:
|
||||||
|
@ -408,7 +408,7 @@ class HueLight(CoordinatorEntity, LightEntity):
|
|||||||
if self._fixed_color_mode:
|
if self._fixed_color_mode:
|
||||||
return self._fixed_color_mode
|
return self._fixed_color_mode
|
||||||
|
|
||||||
# The light supports both hs/xy and white with adjustabe color_temperature
|
# The light supports both hs/xy and white with adjustable color_temperature
|
||||||
mode = self._color_mode
|
mode = self._color_mode
|
||||||
if mode in ("xy", "hs"):
|
if mode in ("xy", "hs"):
|
||||||
return ColorMode.HS
|
return ColorMode.HS
|
||||||
|
@ -73,7 +73,7 @@ ISY_CONTROL_TO_DEVICE_CLASS = {
|
|||||||
"CV": SensorDeviceClass.VOLTAGE,
|
"CV": SensorDeviceClass.VOLTAGE,
|
||||||
"DEWPT": SensorDeviceClass.TEMPERATURE,
|
"DEWPT": SensorDeviceClass.TEMPERATURE,
|
||||||
"DISTANC": SensorDeviceClass.DISTANCE,
|
"DISTANC": SensorDeviceClass.DISTANCE,
|
||||||
"ETO": SensorDeviceClass.PRECIPITATION_INTENSITY,
|
"ETO": SensorDeviceClass.PRECIPITATION_INTENSITY, # codespell:ignore eto
|
||||||
"FATM": SensorDeviceClass.WEIGHT,
|
"FATM": SensorDeviceClass.WEIGHT,
|
||||||
"FREQ": SensorDeviceClass.FREQUENCY,
|
"FREQ": SensorDeviceClass.FREQUENCY,
|
||||||
"MUSCLEM": SensorDeviceClass.WEIGHT,
|
"MUSCLEM": SensorDeviceClass.WEIGHT,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "jellyfin",
|
"domain": "jellyfin",
|
||||||
"name": "Jellyfin",
|
"name": "Jellyfin",
|
||||||
"codeowners": ["@j-stienstra", "@ctalkington"],
|
"codeowners": ["@RunC0deRun", "@ctalkington"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/jellyfin",
|
"documentation": "https://www.home-assistant.io/integrations/jellyfin",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["mill", "mill_local"],
|
"loggers": ["mill", "mill_local"],
|
||||||
"requirements": ["millheater==0.12.2", "mill-local==0.3.0"]
|
"requirements": ["millheater==0.12.3", "mill-local==0.3.0"]
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Callable
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any, cast
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
@ -24,7 +23,6 @@ from homeassistant.components.light import (
|
|||||||
ATTR_XY_COLOR,
|
ATTR_XY_COLOR,
|
||||||
DEFAULT_MAX_KELVIN,
|
DEFAULT_MAX_KELVIN,
|
||||||
DEFAULT_MIN_KELVIN,
|
DEFAULT_MIN_KELVIN,
|
||||||
DOMAIN as LIGHT_DOMAIN,
|
|
||||||
ENTITY_ID_FORMAT,
|
ENTITY_ID_FORMAT,
|
||||||
FLASH_LONG,
|
FLASH_LONG,
|
||||||
FLASH_SHORT,
|
FLASH_SHORT,
|
||||||
@ -34,7 +32,6 @@ from homeassistant.components.light import (
|
|||||||
LightEntityFeature,
|
LightEntityFeature,
|
||||||
brightness_supported,
|
brightness_supported,
|
||||||
color_supported,
|
color_supported,
|
||||||
filter_supported_color_modes,
|
|
||||||
valid_supported_color_modes,
|
valid_supported_color_modes,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@ -48,15 +45,13 @@ from homeassistant.const import (
|
|||||||
CONF_XY,
|
CONF_XY,
|
||||||
STATE_ON,
|
STATE_ON,
|
||||||
)
|
)
|
||||||
from homeassistant.core import async_get_hass, callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
|
||||||
from homeassistant.helpers.json import json_dumps
|
from homeassistant.helpers.json import json_dumps
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
from homeassistant.helpers.typing import ConfigType, VolSchemaType
|
from homeassistant.helpers.typing import ConfigType, VolSchemaType
|
||||||
from homeassistant.util import color as color_util
|
from homeassistant.util import color as color_util
|
||||||
from homeassistant.util.json import json_loads_object
|
from homeassistant.util.json import json_loads_object
|
||||||
from homeassistant.util.yaml import dump as yaml_dump
|
|
||||||
|
|
||||||
from .. import subscription
|
from .. import subscription
|
||||||
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
|
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
|
||||||
@ -68,7 +63,6 @@ from ..const import (
|
|||||||
CONF_QOS,
|
CONF_QOS,
|
||||||
CONF_RETAIN,
|
CONF_RETAIN,
|
||||||
CONF_STATE_TOPIC,
|
CONF_STATE_TOPIC,
|
||||||
DOMAIN as MQTT_DOMAIN,
|
|
||||||
)
|
)
|
||||||
from ..entity import MqttEntity
|
from ..entity import MqttEntity
|
||||||
from ..models import ReceiveMessage
|
from ..models import ReceiveMessage
|
||||||
@ -86,15 +80,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
DOMAIN = "mqtt_json"
|
DOMAIN = "mqtt_json"
|
||||||
|
|
||||||
DEFAULT_BRIGHTNESS = False
|
DEFAULT_BRIGHTNESS = False
|
||||||
DEFAULT_COLOR_MODE = False
|
|
||||||
DEFAULT_COLOR_TEMP = False
|
|
||||||
DEFAULT_EFFECT = False
|
DEFAULT_EFFECT = False
|
||||||
DEFAULT_FLASH_TIME_LONG = 10
|
DEFAULT_FLASH_TIME_LONG = 10
|
||||||
DEFAULT_FLASH_TIME_SHORT = 2
|
DEFAULT_FLASH_TIME_SHORT = 2
|
||||||
DEFAULT_NAME = "MQTT JSON Light"
|
DEFAULT_NAME = "MQTT JSON Light"
|
||||||
DEFAULT_RGB = False
|
|
||||||
DEFAULT_XY = False
|
|
||||||
DEFAULT_HS = False
|
|
||||||
DEFAULT_BRIGHTNESS_SCALE = 255
|
DEFAULT_BRIGHTNESS_SCALE = 255
|
||||||
DEFAULT_WHITE_SCALE = 255
|
DEFAULT_WHITE_SCALE = 255
|
||||||
|
|
||||||
@ -110,89 +99,6 @@ CONF_MAX_MIREDS = "max_mireds"
|
|||||||
CONF_MIN_MIREDS = "min_mireds"
|
CONF_MIN_MIREDS = "min_mireds"
|
||||||
|
|
||||||
|
|
||||||
def valid_color_configuration(
|
|
||||||
setup_from_yaml: bool,
|
|
||||||
) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
|
||||||
"""Test color_mode is not combined with deprecated config."""
|
|
||||||
|
|
||||||
def _valid_color_configuration(config: ConfigType) -> ConfigType:
|
|
||||||
deprecated = {CONF_COLOR_TEMP, CONF_HS, CONF_RGB, CONF_XY}
|
|
||||||
deprecated_flags_used = any(config.get(key) for key in deprecated)
|
|
||||||
if config.get(CONF_SUPPORTED_COLOR_MODES):
|
|
||||||
if deprecated_flags_used:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"supported_color_modes must not "
|
|
||||||
f"be combined with any of {deprecated}"
|
|
||||||
)
|
|
||||||
elif deprecated_flags_used:
|
|
||||||
deprecated_flags = ", ".join(key for key in deprecated if key in config)
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Deprecated flags [%s] used in MQTT JSON light config "
|
|
||||||
"for handling color mode, please use `supported_color_modes` instead. "
|
|
||||||
"Got: %s. This will stop working in Home Assistant Core 2025.3",
|
|
||||||
deprecated_flags,
|
|
||||||
config,
|
|
||||||
)
|
|
||||||
if not setup_from_yaml:
|
|
||||||
return config
|
|
||||||
issue_id = hex(hash(frozenset(config)))
|
|
||||||
yaml_config_str = yaml_dump(config)
|
|
||||||
learn_more_url = (
|
|
||||||
"https://www.home-assistant.io/integrations/"
|
|
||||||
f"{LIGHT_DOMAIN}.mqtt/#json-schema"
|
|
||||||
)
|
|
||||||
hass = async_get_hass()
|
|
||||||
async_create_issue(
|
|
||||||
hass,
|
|
||||||
MQTT_DOMAIN,
|
|
||||||
issue_id,
|
|
||||||
issue_domain=LIGHT_DOMAIN,
|
|
||||||
is_fixable=False,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
learn_more_url=learn_more_url,
|
|
||||||
translation_placeholders={
|
|
||||||
"deprecated_flags": deprecated_flags,
|
|
||||||
"config": yaml_config_str,
|
|
||||||
},
|
|
||||||
translation_key="deprecated_color_handling",
|
|
||||||
)
|
|
||||||
|
|
||||||
if CONF_COLOR_MODE in config:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Deprecated flag `color_mode` used in MQTT JSON light config "
|
|
||||||
", the `color_mode` flag is not used anymore and should be removed. "
|
|
||||||
"Got: %s. This will stop working in Home Assistant Core 2025.3",
|
|
||||||
config,
|
|
||||||
)
|
|
||||||
if not setup_from_yaml:
|
|
||||||
return config
|
|
||||||
issue_id = hex(hash(frozenset(config)))
|
|
||||||
yaml_config_str = yaml_dump(config)
|
|
||||||
learn_more_url = (
|
|
||||||
"https://www.home-assistant.io/integrations/"
|
|
||||||
f"{LIGHT_DOMAIN}.mqtt/#json-schema"
|
|
||||||
)
|
|
||||||
hass = async_get_hass()
|
|
||||||
async_create_issue(
|
|
||||||
hass,
|
|
||||||
MQTT_DOMAIN,
|
|
||||||
issue_id,
|
|
||||||
breaks_in_ha_version="2025.3.0",
|
|
||||||
issue_domain=LIGHT_DOMAIN,
|
|
||||||
is_fixable=False,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
learn_more_url=learn_more_url,
|
|
||||||
translation_placeholders={
|
|
||||||
"config": yaml_config_str,
|
|
||||||
},
|
|
||||||
translation_key="deprecated_color_mode_flag",
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
return _valid_color_configuration
|
|
||||||
|
|
||||||
|
|
||||||
_PLATFORM_SCHEMA_BASE = (
|
_PLATFORM_SCHEMA_BASE = (
|
||||||
MQTT_RW_SCHEMA.extend(
|
MQTT_RW_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
@ -200,12 +106,6 @@ _PLATFORM_SCHEMA_BASE = (
|
|||||||
vol.Optional(
|
vol.Optional(
|
||||||
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
|
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
|
||||||
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||||
# CONF_COLOR_MODE was deprecated with HA Core 2024.4 and will be
|
|
||||||
# removed with HA Core 2025.3
|
|
||||||
vol.Optional(CONF_COLOR_MODE): cv.boolean,
|
|
||||||
# CONF_COLOR_TEMP was deprecated with HA Core 2024.4 and will be
|
|
||||||
# removed with HA Core 2025.3
|
|
||||||
vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
|
|
||||||
vol.Optional(CONF_COLOR_TEMP_KELVIN, default=False): cv.boolean,
|
vol.Optional(CONF_COLOR_TEMP_KELVIN, default=False): cv.boolean,
|
||||||
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
|
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
|
||||||
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
|
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
|
||||||
@ -215,9 +115,6 @@ _PLATFORM_SCHEMA_BASE = (
|
|||||||
vol.Optional(
|
vol.Optional(
|
||||||
CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT
|
CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT
|
||||||
): cv.positive_int,
|
): cv.positive_int,
|
||||||
# CONF_HS was deprecated with HA Core 2024.4 and will be
|
|
||||||
# removed with HA Core 2025.3
|
|
||||||
vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
|
|
||||||
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
|
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
|
||||||
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
|
||||||
vol.Optional(CONF_MAX_KELVIN): cv.positive_int,
|
vol.Optional(CONF_MAX_KELVIN): cv.positive_int,
|
||||||
@ -227,9 +124,6 @@ _PLATFORM_SCHEMA_BASE = (
|
|||||||
vol.Coerce(int), vol.In([0, 1, 2])
|
vol.Coerce(int), vol.In([0, 1, 2])
|
||||||
),
|
),
|
||||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||||
# CONF_RGB was deprecated with HA Core 2024.4 and will be
|
|
||||||
# removed with HA Core 2025.3
|
|
||||||
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
|
|
||||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||||
vol.Optional(CONF_SUPPORTED_COLOR_MODES): vol.All(
|
vol.Optional(CONF_SUPPORTED_COLOR_MODES): vol.All(
|
||||||
cv.ensure_list,
|
cv.ensure_list,
|
||||||
@ -240,22 +134,29 @@ _PLATFORM_SCHEMA_BASE = (
|
|||||||
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
|
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
|
||||||
vol.Coerce(int), vol.Range(min=1)
|
vol.Coerce(int), vol.Range(min=1)
|
||||||
),
|
),
|
||||||
# CONF_XY was deprecated with HA Core 2024.4 and will be
|
|
||||||
# removed with HA Core 2025.3
|
|
||||||
vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
.extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
|
||||||
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
|
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Support for legacy color_mode handling was removed with HA Core 2025.3
|
||||||
|
# The removed attributes can be removed from the schema's from HA Core 2026.3
|
||||||
DISCOVERY_SCHEMA_JSON = vol.All(
|
DISCOVERY_SCHEMA_JSON = vol.All(
|
||||||
valid_color_configuration(False),
|
cv.removed(CONF_COLOR_MODE, raise_if_present=False),
|
||||||
|
cv.removed(CONF_COLOR_TEMP, raise_if_present=False),
|
||||||
|
cv.removed(CONF_HS, raise_if_present=False),
|
||||||
|
cv.removed(CONF_RGB, raise_if_present=False),
|
||||||
|
cv.removed(CONF_XY, raise_if_present=False),
|
||||||
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
|
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
|
||||||
)
|
)
|
||||||
|
|
||||||
PLATFORM_SCHEMA_MODERN_JSON = vol.All(
|
PLATFORM_SCHEMA_MODERN_JSON = vol.All(
|
||||||
valid_color_configuration(True),
|
cv.removed(CONF_COLOR_MODE),
|
||||||
|
cv.removed(CONF_COLOR_TEMP),
|
||||||
|
cv.removed(CONF_HS),
|
||||||
|
cv.removed(CONF_RGB),
|
||||||
|
cv.removed(CONF_XY),
|
||||||
_PLATFORM_SCHEMA_BASE,
|
_PLATFORM_SCHEMA_BASE,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -272,8 +173,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
_topic: dict[str, str | None]
|
_topic: dict[str, str | None]
|
||||||
_optimistic: bool
|
_optimistic: bool
|
||||||
|
|
||||||
_deprecated_color_handling: bool = False
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def config_schema() -> VolSchemaType:
|
def config_schema() -> VolSchemaType:
|
||||||
"""Return the config schema."""
|
"""Return the config schema."""
|
||||||
@ -318,122 +217,65 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
self._attr_color_mode = next(iter(self.supported_color_modes))
|
self._attr_color_mode = next(iter(self.supported_color_modes))
|
||||||
else:
|
else:
|
||||||
self._attr_color_mode = ColorMode.UNKNOWN
|
self._attr_color_mode = ColorMode.UNKNOWN
|
||||||
else:
|
|
||||||
self._deprecated_color_handling = True
|
|
||||||
color_modes = {ColorMode.ONOFF}
|
|
||||||
if config[CONF_BRIGHTNESS]:
|
|
||||||
color_modes.add(ColorMode.BRIGHTNESS)
|
|
||||||
if config[CONF_COLOR_TEMP]:
|
|
||||||
color_modes.add(ColorMode.COLOR_TEMP)
|
|
||||||
if config[CONF_HS] or config[CONF_RGB] or config[CONF_XY]:
|
|
||||||
color_modes.add(ColorMode.HS)
|
|
||||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
|
||||||
if self.supported_color_modes and len(self.supported_color_modes) == 1:
|
|
||||||
self._fixed_color_mode = next(iter(self.supported_color_modes))
|
|
||||||
|
|
||||||
def _update_color(self, values: dict[str, Any]) -> None:
|
def _update_color(self, values: dict[str, Any]) -> None:
|
||||||
if self._deprecated_color_handling:
|
color_mode: str = values["color_mode"]
|
||||||
# Deprecated color handling
|
if not self._supports_color_mode(color_mode):
|
||||||
try:
|
_LOGGER.warning(
|
||||||
red = int(values["color"]["r"])
|
"Invalid color mode '%s' received for entity %s",
|
||||||
green = int(values["color"]["g"])
|
color_mode,
|
||||||
blue = int(values["color"]["b"])
|
self.entity_id,
|
||||||
self._attr_hs_color = color_util.color_RGB_to_hs(red, green, blue)
|
)
|
||||||
except KeyError:
|
return
|
||||||
pass
|
try:
|
||||||
except ValueError:
|
if color_mode == ColorMode.COLOR_TEMP:
|
||||||
_LOGGER.warning(
|
self._attr_color_temp_kelvin = (
|
||||||
"Invalid RGB color value '%s' received for entity %s",
|
values["color_temp"]
|
||||||
values,
|
if self._color_temp_kelvin
|
||||||
self.entity_id,
|
else color_util.color_temperature_mired_to_kelvin(
|
||||||
|
values["color_temp"]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return
|
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||||
|
elif color_mode == ColorMode.HS:
|
||||||
try:
|
|
||||||
x_color = float(values["color"]["x"])
|
|
||||||
y_color = float(values["color"]["y"])
|
|
||||||
self._attr_hs_color = color_util.color_xy_to_hs(x_color, y_color)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
except ValueError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Invalid XY color value '%s' received for entity %s",
|
|
||||||
values,
|
|
||||||
self.entity_id,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
hue = float(values["color"]["h"])
|
hue = float(values["color"]["h"])
|
||||||
saturation = float(values["color"]["s"])
|
saturation = float(values["color"]["s"])
|
||||||
|
self._attr_color_mode = ColorMode.HS
|
||||||
self._attr_hs_color = (hue, saturation)
|
self._attr_hs_color = (hue, saturation)
|
||||||
except KeyError:
|
elif color_mode == ColorMode.RGB:
|
||||||
pass
|
r = int(values["color"]["r"])
|
||||||
except ValueError:
|
g = int(values["color"]["g"])
|
||||||
_LOGGER.warning(
|
b = int(values["color"]["b"])
|
||||||
"Invalid HS color value '%s' received for entity %s",
|
self._attr_color_mode = ColorMode.RGB
|
||||||
values,
|
self._attr_rgb_color = (r, g, b)
|
||||||
self.entity_id,
|
elif color_mode == ColorMode.RGBW:
|
||||||
)
|
r = int(values["color"]["r"])
|
||||||
return
|
g = int(values["color"]["g"])
|
||||||
else:
|
b = int(values["color"]["b"])
|
||||||
color_mode: str = values["color_mode"]
|
w = int(values["color"]["w"])
|
||||||
if not self._supports_color_mode(color_mode):
|
self._attr_color_mode = ColorMode.RGBW
|
||||||
_LOGGER.warning(
|
self._attr_rgbw_color = (r, g, b, w)
|
||||||
"Invalid color mode '%s' received for entity %s",
|
elif color_mode == ColorMode.RGBWW:
|
||||||
color_mode,
|
r = int(values["color"]["r"])
|
||||||
self.entity_id,
|
g = int(values["color"]["g"])
|
||||||
)
|
b = int(values["color"]["b"])
|
||||||
return
|
c = int(values["color"]["c"])
|
||||||
try:
|
w = int(values["color"]["w"])
|
||||||
if color_mode == ColorMode.COLOR_TEMP:
|
self._attr_color_mode = ColorMode.RGBWW
|
||||||
self._attr_color_temp_kelvin = (
|
self._attr_rgbww_color = (r, g, b, c, w)
|
||||||
values["color_temp"]
|
elif color_mode == ColorMode.WHITE:
|
||||||
if self._color_temp_kelvin
|
self._attr_color_mode = ColorMode.WHITE
|
||||||
else color_util.color_temperature_mired_to_kelvin(
|
elif color_mode == ColorMode.XY:
|
||||||
values["color_temp"]
|
x = float(values["color"]["x"])
|
||||||
)
|
y = float(values["color"]["y"])
|
||||||
)
|
self._attr_color_mode = ColorMode.XY
|
||||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
self._attr_xy_color = (x, y)
|
||||||
elif color_mode == ColorMode.HS:
|
except (KeyError, TypeError, ValueError):
|
||||||
hue = float(values["color"]["h"])
|
_LOGGER.warning(
|
||||||
saturation = float(values["color"]["s"])
|
"Invalid or incomplete color value '%s' received for entity %s",
|
||||||
self._attr_color_mode = ColorMode.HS
|
values,
|
||||||
self._attr_hs_color = (hue, saturation)
|
self.entity_id,
|
||||||
elif color_mode == ColorMode.RGB:
|
)
|
||||||
r = int(values["color"]["r"])
|
|
||||||
g = int(values["color"]["g"])
|
|
||||||
b = int(values["color"]["b"])
|
|
||||||
self._attr_color_mode = ColorMode.RGB
|
|
||||||
self._attr_rgb_color = (r, g, b)
|
|
||||||
elif color_mode == ColorMode.RGBW:
|
|
||||||
r = int(values["color"]["r"])
|
|
||||||
g = int(values["color"]["g"])
|
|
||||||
b = int(values["color"]["b"])
|
|
||||||
w = int(values["color"]["w"])
|
|
||||||
self._attr_color_mode = ColorMode.RGBW
|
|
||||||
self._attr_rgbw_color = (r, g, b, w)
|
|
||||||
elif color_mode == ColorMode.RGBWW:
|
|
||||||
r = int(values["color"]["r"])
|
|
||||||
g = int(values["color"]["g"])
|
|
||||||
b = int(values["color"]["b"])
|
|
||||||
c = int(values["color"]["c"])
|
|
||||||
w = int(values["color"]["w"])
|
|
||||||
self._attr_color_mode = ColorMode.RGBWW
|
|
||||||
self._attr_rgbww_color = (r, g, b, c, w)
|
|
||||||
elif color_mode == ColorMode.WHITE:
|
|
||||||
self._attr_color_mode = ColorMode.WHITE
|
|
||||||
elif color_mode == ColorMode.XY:
|
|
||||||
x = float(values["color"]["x"])
|
|
||||||
y = float(values["color"]["y"])
|
|
||||||
self._attr_color_mode = ColorMode.XY
|
|
||||||
self._attr_xy_color = (x, y)
|
|
||||||
except (KeyError, ValueError):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Invalid or incomplete color value '%s' received for entity %s",
|
|
||||||
values,
|
|
||||||
self.entity_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _state_received(self, msg: ReceiveMessage) -> None:
|
def _state_received(self, msg: ReceiveMessage) -> None:
|
||||||
@ -447,18 +289,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
elif values["state"] is None:
|
elif values["state"] is None:
|
||||||
self._attr_is_on = None
|
self._attr_is_on = None
|
||||||
|
|
||||||
if (
|
if color_supported(self.supported_color_modes) and "color_mode" in values:
|
||||||
self._deprecated_color_handling
|
|
||||||
and color_supported(self.supported_color_modes)
|
|
||||||
and "color" in values
|
|
||||||
):
|
|
||||||
# Deprecated color handling
|
|
||||||
if values["color"] is None:
|
|
||||||
self._attr_hs_color = None
|
|
||||||
else:
|
|
||||||
self._update_color(values)
|
|
||||||
|
|
||||||
if not self._deprecated_color_handling and "color_mode" in values:
|
|
||||||
self._update_color(values)
|
self._update_color(values)
|
||||||
|
|
||||||
if brightness_supported(self.supported_color_modes):
|
if brightness_supported(self.supported_color_modes):
|
||||||
@ -484,35 +315,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
self.entity_id,
|
self.entity_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
|
||||||
self._deprecated_color_handling
|
|
||||||
and self.supported_color_modes
|
|
||||||
and ColorMode.COLOR_TEMP in self.supported_color_modes
|
|
||||||
):
|
|
||||||
# Deprecated color handling
|
|
||||||
try:
|
|
||||||
if values["color_temp"] is None:
|
|
||||||
self._attr_color_temp_kelvin = None
|
|
||||||
else:
|
|
||||||
self._attr_color_temp_kelvin = (
|
|
||||||
values["color_temp"] # type: ignore[assignment]
|
|
||||||
if self._color_temp_kelvin
|
|
||||||
else color_util.color_temperature_mired_to_kelvin(
|
|
||||||
values["color_temp"] # type: ignore[arg-type]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Invalid color temp value '%s' received for entity %s",
|
|
||||||
values["color_temp"],
|
|
||||||
self.entity_id,
|
|
||||||
)
|
|
||||||
# Allow to switch back to color_temp
|
|
||||||
if "color" not in values:
|
|
||||||
self._attr_hs_color = None
|
|
||||||
|
|
||||||
if self.supported_features and LightEntityFeature.EFFECT:
|
if self.supported_features and LightEntityFeature.EFFECT:
|
||||||
with suppress(KeyError):
|
with suppress(KeyError):
|
||||||
self._attr_effect = cast(str, values["effect"])
|
self._attr_effect = cast(str, values["effect"])
|
||||||
@ -565,19 +367,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
)
|
)
|
||||||
self._attr_xy_color = last_attributes.get(ATTR_XY_COLOR, self.xy_color)
|
self._attr_xy_color = last_attributes.get(ATTR_XY_COLOR, self.xy_color)
|
||||||
|
|
||||||
@property
|
|
||||||
def color_mode(self) -> ColorMode | str | None:
|
|
||||||
"""Return current color mode."""
|
|
||||||
if not self._deprecated_color_handling:
|
|
||||||
return self._attr_color_mode
|
|
||||||
if self._fixed_color_mode:
|
|
||||||
# Legacy light with support for a single color mode
|
|
||||||
return self._fixed_color_mode
|
|
||||||
# Legacy light with support for ct + hs, prioritize hs
|
|
||||||
if self.hs_color is not None:
|
|
||||||
return ColorMode.HS
|
|
||||||
return ColorMode.COLOR_TEMP
|
|
||||||
|
|
||||||
def _set_flash_and_transition(self, message: dict[str, Any], **kwargs: Any) -> None:
|
def _set_flash_and_transition(self, message: dict[str, Any], **kwargs: Any) -> None:
|
||||||
if ATTR_TRANSITION in kwargs:
|
if ATTR_TRANSITION in kwargs:
|
||||||
message["transition"] = kwargs[ATTR_TRANSITION]
|
message["transition"] = kwargs[ATTR_TRANSITION]
|
||||||
@ -604,17 +393,15 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
def _supports_color_mode(self, color_mode: ColorMode | str) -> bool:
|
def _supports_color_mode(self, color_mode: ColorMode | str) -> bool:
|
||||||
"""Return True if the light natively supports a color mode."""
|
"""Return True if the light natively supports a color mode."""
|
||||||
return (
|
return (
|
||||||
not self._deprecated_color_handling
|
self.supported_color_modes is not None
|
||||||
and self.supported_color_modes is not None
|
|
||||||
and color_mode in self.supported_color_modes
|
and color_mode in self.supported_color_modes
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_turn_on(self, **kwargs: Any) -> None: # noqa: C901
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
"""Turn the device on.
|
"""Turn the device on.
|
||||||
|
|
||||||
This method is a coroutine.
|
This method is a coroutine.
|
||||||
"""
|
"""
|
||||||
brightness: int
|
|
||||||
should_update = False
|
should_update = False
|
||||||
hs_color: tuple[float, float]
|
hs_color: tuple[float, float]
|
||||||
message: dict[str, Any] = {"state": "ON"}
|
message: dict[str, Any] = {"state": "ON"}
|
||||||
@ -623,39 +410,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
|||||||
rgbcw: tuple[int, ...]
|
rgbcw: tuple[int, ...]
|
||||||
xy_color: tuple[float, float]
|
xy_color: tuple[float, float]
|
||||||
|
|
||||||
if ATTR_HS_COLOR in kwargs and (
|
|
||||||
self._config[CONF_HS] or self._config[CONF_RGB] or self._config[CONF_XY]
|
|
||||||
):
|
|
||||||
# Legacy color handling
|
|
||||||
hs_color = kwargs[ATTR_HS_COLOR]
|
|
||||||
message["color"] = {}
|
|
||||||
if self._config[CONF_RGB]:
|
|
||||||
# If brightness is supported, we don't want to scale the
|
|
||||||
# RGB values given using the brightness.
|
|
||||||
if self._config[CONF_BRIGHTNESS]:
|
|
||||||
brightness = 255
|
|
||||||
else:
|
|
||||||
# We pop the brightness, to omit it from the payload
|
|
||||||
brightness = kwargs.pop(ATTR_BRIGHTNESS, 255)
|
|
||||||
rgb = color_util.color_hsv_to_RGB(
|
|
||||||
hs_color[0], hs_color[1], brightness / 255 * 100
|
|
||||||
)
|
|
||||||
message["color"]["r"] = rgb[0]
|
|
||||||
message["color"]["g"] = rgb[1]
|
|
||||||
message["color"]["b"] = rgb[2]
|
|
||||||
if self._config[CONF_XY]:
|
|
||||||
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
|
|
||||||
message["color"]["x"] = xy_color[0]
|
|
||||||
message["color"]["y"] = xy_color[1]
|
|
||||||
if self._config[CONF_HS]:
|
|
||||||
message["color"]["h"] = hs_color[0]
|
|
||||||
message["color"]["s"] = hs_color[1]
|
|
||||||
|
|
||||||
if self._optimistic:
|
|
||||||
self._attr_color_temp_kelvin = None
|
|
||||||
self._attr_hs_color = kwargs[ATTR_HS_COLOR]
|
|
||||||
should_update = True
|
|
||||||
|
|
||||||
if ATTR_HS_COLOR in kwargs and self._supports_color_mode(ColorMode.HS):
|
if ATTR_HS_COLOR in kwargs and self._supports_color_mode(ColorMode.HS):
|
||||||
hs_color = kwargs[ATTR_HS_COLOR]
|
hs_color = kwargs[ATTR_HS_COLOR]
|
||||||
message["color"] = {"h": hs_color[0], "s": hs_color[1]}
|
message["color"] = {"h": hs_color[0], "s": hs_color[1]}
|
||||||
|
@ -1,13 +1,5 @@
|
|||||||
{
|
{
|
||||||
"issues": {
|
"issues": {
|
||||||
"deprecated_color_handling": {
|
|
||||||
"title": "Deprecated color handling used for MQTT light",
|
|
||||||
"description": "An MQTT light config (with `json` schema) found in `configuration.yaml` uses deprecated color handling flags.\n\nConfiguration found:\n```yaml\n{config}\n```\nDeprecated flags: **{deprecated_flags}**.\n\nUse the `supported_color_modes` option instead and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
|
||||||
},
|
|
||||||
"deprecated_color_mode_flag": {
|
|
||||||
"title": "Deprecated color_mode option flag used for MQTT light",
|
|
||||||
"description": "An MQTT light config (with `json` schema) found in `configuration.yaml` uses a deprecated `color_mode` flag.\n\nConfiguration found:\n```yaml\n{config}\n```\n\nRemove the option from your config and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
|
||||||
},
|
|
||||||
"invalid_platform_config": {
|
"invalid_platform_config": {
|
||||||
"title": "Invalid config found for mqtt {domain} item",
|
"title": "Invalid config found for mqtt {domain} item",
|
||||||
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from nclib.errors import NetcatError
|
|
||||||
from nhc.controller import NHCController
|
from nhc.controller import NHCController
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@ -25,12 +24,8 @@ async def async_setup_entry(
|
|||||||
controller = NHCController(entry.data[CONF_HOST])
|
controller = NHCController(entry.data[CONF_HOST])
|
||||||
try:
|
try:
|
||||||
await controller.connect()
|
await controller.connect()
|
||||||
except NetcatError as err:
|
except (TimeoutError, OSError) as err:
|
||||||
raise ConfigEntryNotReady("cannot connect to controller.") from err
|
raise ConfigEntryNotReady("cannot connect to controller.") from err
|
||||||
except OSError as err:
|
|
||||||
raise ConfigEntryNotReady(
|
|
||||||
"unknown error while connecting to controller."
|
|
||||||
) from err
|
|
||||||
|
|
||||||
entry.runtime_data = controller
|
entry.runtime_data = controller
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
@ -37,17 +37,17 @@ class NikoHomeControlCover(NikoHomeControlEntity, CoverEntity):
|
|||||||
)
|
)
|
||||||
_action: NHCCover
|
_action: NHCCover
|
||||||
|
|
||||||
def open_cover(self, **kwargs: Any) -> None:
|
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||||
"""Open the cover."""
|
"""Open the cover."""
|
||||||
self._action.open()
|
await self._action.open()
|
||||||
|
|
||||||
def close_cover(self, **kwargs: Any) -> None:
|
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||||
"""Close the cover."""
|
"""Close the cover."""
|
||||||
self._action.close()
|
await self._action.close()
|
||||||
|
|
||||||
def stop_cover(self, **kwargs: Any) -> None:
|
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||||
"""Stop the cover."""
|
"""Stop the cover."""
|
||||||
self._action.stop()
|
await self._action.stop()
|
||||||
|
|
||||||
def update_state(self):
|
def update_state(self):
|
||||||
"""Update HA state."""
|
"""Update HA state."""
|
||||||
|
@ -109,13 +109,13 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
|
|||||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||||
self._attr_brightness = round(action.state * 2.55)
|
self._attr_brightness = round(action.state * 2.55)
|
||||||
|
|
||||||
def turn_on(self, **kwargs: Any) -> None:
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
"""Instruct the light to turn on."""
|
"""Instruct the light to turn on."""
|
||||||
self._action.turn_on(round(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55))
|
await self._action.turn_on(round(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55))
|
||||||
|
|
||||||
def turn_off(self, **kwargs: Any) -> None:
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
"""Instruct the light to turn off."""
|
"""Instruct the light to turn off."""
|
||||||
self._action.turn_off()
|
await self._action.turn_off()
|
||||||
|
|
||||||
def update_state(self) -> None:
|
def update_state(self) -> None:
|
||||||
"""Handle updates from the controller."""
|
"""Handle updates from the controller."""
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["nikohomecontrol"],
|
"loggers": ["nikohomecontrol"],
|
||||||
"requirements": ["nhc==0.3.9"]
|
"requirements": ["nhc==0.4.4"]
|
||||||
}
|
}
|
||||||
|
@ -378,7 +378,7 @@ class BackupInfoView(BackupOnboardingView):
|
|||||||
backups, _ = await manager.async_get_backups()
|
backups, _ = await manager.async_get_backups()
|
||||||
return self.json(
|
return self.json(
|
||||||
{
|
{
|
||||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
"backups": list(backups.values()),
|
||||||
"state": manager.state,
|
"state": manager.state,
|
||||||
"last_non_idle_event": manager.last_non_idle_event,
|
"last_non_idle_event": manager.last_non_idle_event,
|
||||||
}
|
}
|
||||||
|
@ -9,5 +9,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["onedrive_personal_sdk"],
|
"loggers": ["onedrive_personal_sdk"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["onedrive-personal-sdk==0.0.2"]
|
"requirements": ["onedrive-personal-sdk==0.0.3"]
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||||
"requirements": ["pyoverkiz==1.15.5"],
|
"requirements": ["pyoverkiz==1.16.0"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_kizbox._tcp.local.",
|
"type": "_kizbox._tcp.local.",
|
||||||
|
@ -101,7 +101,7 @@ def entity_class_for_type(entity_type):
|
|||||||
entity_device_mapping = {
|
entity_device_mapping = {
|
||||||
# sends only 'dim' commands not compatible with on/off switches
|
# sends only 'dim' commands not compatible with on/off switches
|
||||||
TYPE_DIMMABLE: DimmableRflinkLight,
|
TYPE_DIMMABLE: DimmableRflinkLight,
|
||||||
# sends only 'on/off' commands not advices with dimmers and signal
|
# sends only 'on/off' commands not advised with dimmers and signal
|
||||||
# repetition
|
# repetition
|
||||||
TYPE_SWITCHABLE: RflinkLight,
|
TYPE_SWITCHABLE: RflinkLight,
|
||||||
# sends 'dim' and 'on' command to support both dimmers and on/off
|
# sends 'dim' and 'on' command to support both dimmers and on/off
|
||||||
|
@ -31,6 +31,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import RingConfigEntry
|
from . import RingConfigEntry
|
||||||
|
from .const import DOMAIN
|
||||||
from .coordinator import RingDataCoordinator
|
from .coordinator import RingDataCoordinator
|
||||||
from .entity import RingDeviceT, RingEntity, exception_wrap
|
from .entity import RingDeviceT, RingEntity, exception_wrap
|
||||||
|
|
||||||
@ -218,8 +219,13 @@ class RingCam(RingEntity[RingDoorBell], Camera):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Handle a WebRTC candidate."""
|
"""Handle a WebRTC candidate."""
|
||||||
if candidate.sdp_m_line_index is None:
|
if candidate.sdp_m_line_index is None:
|
||||||
msg = "The sdp_m_line_index is required for ring webrtc streaming"
|
raise HomeAssistantError(
|
||||||
raise HomeAssistantError(msg)
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="sdp_m_line_index_required",
|
||||||
|
translation_placeholders={
|
||||||
|
"device": self._device.name,
|
||||||
|
},
|
||||||
|
)
|
||||||
await self._device.on_webrtc_candidate(
|
await self._device.on_webrtc_candidate(
|
||||||
session_id, candidate.candidate, candidate.sdp_m_line_index
|
session_id, candidate.candidate, candidate.sdp_m_line_index
|
||||||
)
|
)
|
||||||
|
@ -27,7 +27,7 @@ from homeassistant.helpers.update_coordinator import (
|
|||||||
UpdateFailed,
|
UpdateFailed,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import SCAN_INTERVAL
|
from .const import DOMAIN, SCAN_INTERVAL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -45,26 +45,6 @@ class RingData:
|
|||||||
type RingConfigEntry = ConfigEntry[RingData]
|
type RingConfigEntry = ConfigEntry[RingData]
|
||||||
|
|
||||||
|
|
||||||
async def _call_api[*_Ts, _R](
|
|
||||||
hass: HomeAssistant,
|
|
||||||
target: Callable[[*_Ts], Coroutine[Any, Any, _R]],
|
|
||||||
*args: *_Ts,
|
|
||||||
msg_suffix: str = "",
|
|
||||||
) -> _R:
|
|
||||||
try:
|
|
||||||
return await target(*args)
|
|
||||||
except AuthenticationError as err:
|
|
||||||
# Raising ConfigEntryAuthFailed will cancel future updates
|
|
||||||
# and start a config flow with SOURCE_REAUTH (async_step_reauth)
|
|
||||||
raise ConfigEntryAuthFailed from err
|
|
||||||
except RingTimeout as err:
|
|
||||||
raise UpdateFailed(
|
|
||||||
f"Timeout communicating with API{msg_suffix}: {err}"
|
|
||||||
) from err
|
|
||||||
except RingError as err:
|
|
||||||
raise UpdateFailed(f"Error communicating with API{msg_suffix}: {err}") from err
|
|
||||||
|
|
||||||
|
|
||||||
class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
|
class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
|
||||||
"""Base class for device coordinators."""
|
"""Base class for device coordinators."""
|
||||||
|
|
||||||
@ -87,12 +67,37 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
|
|||||||
self.ring_api: Ring = ring_api
|
self.ring_api: Ring = ring_api
|
||||||
self.first_call: bool = True
|
self.first_call: bool = True
|
||||||
|
|
||||||
|
async def _call_api[*_Ts, _R](
|
||||||
|
self,
|
||||||
|
target: Callable[[*_Ts], Coroutine[Any, Any, _R]],
|
||||||
|
*args: *_Ts,
|
||||||
|
) -> _R:
|
||||||
|
try:
|
||||||
|
return await target(*args)
|
||||||
|
except AuthenticationError as err:
|
||||||
|
# Raising ConfigEntryAuthFailed will cancel future updates
|
||||||
|
# and start a config flow with SOURCE_REAUTH (async_step_reauth)
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_authentication",
|
||||||
|
) from err
|
||||||
|
except RingTimeout as err:
|
||||||
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_timeout",
|
||||||
|
) from err
|
||||||
|
except RingError as err:
|
||||||
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_error",
|
||||||
|
) from err
|
||||||
|
|
||||||
async def _async_update_data(self) -> RingDevices:
|
async def _async_update_data(self) -> RingDevices:
|
||||||
"""Fetch data from API endpoint."""
|
"""Fetch data from API endpoint."""
|
||||||
update_method: str = (
|
update_method: str = (
|
||||||
"async_update_data" if self.first_call else "async_update_devices"
|
"async_update_data" if self.first_call else "async_update_devices"
|
||||||
)
|
)
|
||||||
await _call_api(self.hass, getattr(self.ring_api, update_method))
|
await self._call_api(getattr(self.ring_api, update_method))
|
||||||
self.first_call = False
|
self.first_call = False
|
||||||
devices: RingDevices = self.ring_api.devices()
|
devices: RingDevices = self.ring_api.devices()
|
||||||
subscribed_device_ids = set(self.async_contexts())
|
subscribed_device_ids = set(self.async_contexts())
|
||||||
@ -104,18 +109,14 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]):
|
|||||||
async with TaskGroup() as tg:
|
async with TaskGroup() as tg:
|
||||||
if device.has_capability("history"):
|
if device.has_capability("history"):
|
||||||
tg.create_task(
|
tg.create_task(
|
||||||
_call_api(
|
self._call_api(
|
||||||
self.hass,
|
|
||||||
lambda device: device.async_history(limit=10),
|
lambda device: device.async_history(limit=10),
|
||||||
device,
|
device,
|
||||||
msg_suffix=f" for device {device.name}", # device_id is the mac
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
tg.create_task(
|
tg.create_task(
|
||||||
_call_api(
|
self._call_api(
|
||||||
self.hass,
|
|
||||||
device.async_update_health_data,
|
device.async_update_health_data,
|
||||||
msg_suffix=f" for device {device.name}",
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except ExceptionGroup as eg:
|
except ExceptionGroup as eg:
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
from typing import Any, Concatenate, Generic, TypeVar, cast
|
from typing import Any, Concatenate, Generic, TypeVar, cast
|
||||||
|
|
||||||
from ring_doorbell import (
|
from ring_doorbell import (
|
||||||
@ -36,6 +37,8 @@ _RingCoordinatorT = TypeVar(
|
|||||||
bound=(RingDataCoordinator | RingListenCoordinator),
|
bound=(RingDataCoordinator | RingListenCoordinator),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class DeprecatedInfo:
|
class DeprecatedInfo:
|
||||||
@ -62,14 +65,22 @@ def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R](
|
|||||||
return await async_func(self, *args, **kwargs)
|
return await async_func(self, *args, **kwargs)
|
||||||
except AuthenticationError as err:
|
except AuthenticationError as err:
|
||||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||||
raise HomeAssistantError(err) from err
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_authentication",
|
||||||
|
) from err
|
||||||
except RingTimeout as err:
|
except RingTimeout as err:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Timeout communicating with API {async_func}: {err}"
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_timeout",
|
||||||
) from err
|
) from err
|
||||||
except RingError as err:
|
except RingError as err:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Error calling %s in platform %s: ", async_func.__name__, self.platform
|
||||||
|
)
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Error communicating with API{async_func}: {err}"
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="api_error",
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
return _wrap
|
return _wrap
|
||||||
|
@ -141,6 +141,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"api_authentication": {
|
||||||
|
"message": "Authentication error communicating with Ring API"
|
||||||
|
},
|
||||||
|
"api_timeout": {
|
||||||
|
"message": "Timeout communicating with Ring API"
|
||||||
|
},
|
||||||
|
"api_error": {
|
||||||
|
"message": "Error communicating with Ring API"
|
||||||
|
},
|
||||||
|
"sdp_m_line_index_required": {
|
||||||
|
"message": "Error negotiating stream for {device}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"issues": {
|
"issues": {
|
||||||
"deprecated_entity": {
|
"deprecated_entity": {
|
||||||
"title": "Detected deprecated {platform} entity usage",
|
"title": "Detected deprecated {platform} entity usage",
|
||||||
|
@ -161,15 +161,20 @@ class SynologyDSMBackupAgent(BackupAgent):
|
|||||||
|
|
||||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||||
"""
|
"""
|
||||||
try:
|
for filename in (f"{backup_id}.tar", f"{backup_id}_meta.json"):
|
||||||
await self._file_station.delete_file(
|
try:
|
||||||
path=self.path, filename=f"{backup_id}.tar"
|
await self._file_station.delete_file(path=self.path, filename=filename)
|
||||||
)
|
except SynologyDSMAPIErrorException as err:
|
||||||
await self._file_station.delete_file(
|
err_args: dict = err.args[0]
|
||||||
path=self.path, filename=f"{backup_id}_meta.json"
|
if int(err_args.get("code", 0)) != 900 or (
|
||||||
)
|
(err_details := err_args.get("details")) is not None
|
||||||
except SynologyDSMAPIErrorException as err:
|
and isinstance(err_details, list)
|
||||||
raise BackupAgentError("Failed to delete the backup") from err
|
and isinstance(err_details[0], dict)
|
||||||
|
and int(err_details[0].get("code", 0))
|
||||||
|
!= 408 # No such file or directory
|
||||||
|
):
|
||||||
|
LOGGER.error("Failed to delete backup: %s", err)
|
||||||
|
raise BackupAgentError("Failed to delete backup") from err
|
||||||
|
|
||||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||||
"""List backups."""
|
"""List backups."""
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tesla-fleet-api"],
|
"loggers": ["tesla-fleet-api"],
|
||||||
"requirements": ["tesla-fleet-api==0.9.2"]
|
"requirements": ["tesla-fleet-api==0.9.6"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tesla-fleet-api"],
|
"loggers": ["tesla-fleet-api"],
|
||||||
"requirements": ["tesla-fleet-api==0.9.2", "teslemetry-stream==0.6.10"]
|
"requirements": ["tesla-fleet-api==0.9.6", "teslemetry-stream==0.6.10"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["tessie", "tesla-fleet-api"],
|
"loggers": ["tessie", "tesla-fleet-api"],
|
||||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.2"]
|
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.6"]
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ class TuyaSensorEntityDescription(SensorEntityDescription):
|
|||||||
subkey: str | None = None
|
subkey: str | None = None
|
||||||
|
|
||||||
|
|
||||||
# Commonly used battery sensors, that are re-used in the sensors down below.
|
# Commonly used battery sensors, that are reused in the sensors down below.
|
||||||
BATTERY_SENSORS: tuple[TuyaSensorEntityDescription, ...] = (
|
BATTERY_SENSORS: tuple[TuyaSensorEntityDescription, ...] = (
|
||||||
TuyaSensorEntityDescription(
|
TuyaSensorEntityDescription(
|
||||||
key=DPCode.BATTERY_PERCENTAGE,
|
key=DPCode.BATTERY_PERCENTAGE,
|
||||||
|
@ -157,11 +157,15 @@ class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity):
|
|||||||
"""Set the mode of the device."""
|
"""Set the mode of the device."""
|
||||||
if mode not in self.available_modes:
|
if mode not in self.available_modes:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
"{mode} is not one of the valid available modes: {self.available_modes}"
|
f"{mode} is not one of the valid available modes: {self.available_modes}"
|
||||||
)
|
)
|
||||||
if not self.device.set_humidity_mode(self._get_vs_mode(mode)):
|
if not self.device.set_humidity_mode(self._get_vs_mode(mode)):
|
||||||
raise HomeAssistantError(f"An error occurred while setting mode {mode}.")
|
raise HomeAssistantError(f"An error occurred while setting mode {mode}.")
|
||||||
|
|
||||||
|
if mode == MODE_SLEEP:
|
||||||
|
# We successfully changed the mode. Consider it a success even if display operation fails.
|
||||||
|
self.device.set_display(False)
|
||||||
|
|
||||||
# Changing mode while humidifier is off actually turns it on, as per the app. But
|
# Changing mode while humidifier is off actually turns it on, as per the app. But
|
||||||
# the library does not seem to update the device_status. It is also possible that
|
# the library does not seem to update the device_status. It is also possible that
|
||||||
# other attributes get updated. Scheduling a forced refresh to get device status.
|
# other attributes get updated. Scheduling a forced refresh to get device status.
|
||||||
|
@ -17,7 +17,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
entry.data[CONF_HOST],
|
entry.data[CONF_HOST],
|
||||||
entry.data[CONF_USERNAME],
|
entry.data[CONF_USERNAME],
|
||||||
entry.data[CONF_PASSWORD],
|
entry.data[CONF_PASSWORD],
|
||||||
entry.unique_id,
|
entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
@ -50,7 +50,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
|||||||
host: str,
|
host: str,
|
||||||
username: str,
|
username: str,
|
||||||
password: str,
|
password: str,
|
||||||
config_entry_unique_id: str | None,
|
config_entry: ConfigEntry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the scanner."""
|
"""Initialize the scanner."""
|
||||||
|
|
||||||
@ -58,13 +58,14 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
|||||||
self.api = VodafoneStationSercommApi(host, username, password)
|
self.api = VodafoneStationSercommApi(host, username, password)
|
||||||
|
|
||||||
# Last resort as no MAC or S/N can be retrieved via API
|
# Last resort as no MAC or S/N can be retrieved via API
|
||||||
self._id = config_entry_unique_id
|
self._id = config_entry.unique_id
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass=hass,
|
hass=hass,
|
||||||
logger=_LOGGER,
|
logger=_LOGGER,
|
||||||
name=f"{DOMAIN}-{host}-coordinator",
|
name=f"{DOMAIN}-{host}-coordinator",
|
||||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||||
|
config_entry=config_entry,
|
||||||
)
|
)
|
||||||
device_reg = dr.async_get(self.hass)
|
device_reg = dr.async_get(self.hass)
|
||||||
device_list = dr.async_entries_for_config_entry(
|
device_list = dr.async_entries_for_config_entry(
|
||||||
|
@ -146,6 +146,10 @@ class Debouncer[_R_co]:
|
|||||||
"""Cancel any scheduled call, and prevent new runs."""
|
"""Cancel any scheduled call, and prevent new runs."""
|
||||||
self._shutdown_requested = True
|
self._shutdown_requested = True
|
||||||
self.async_cancel()
|
self.async_cancel()
|
||||||
|
# Release hard references to parent function
|
||||||
|
# https://github.com/home-assistant/core/issues/137237
|
||||||
|
self._function = None
|
||||||
|
self._job = None
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_cancel(self) -> None:
|
def async_cancel(self) -> None:
|
||||||
|
@ -14,7 +14,7 @@ astral==2.2
|
|||||||
async-interrupt==1.2.0
|
async-interrupt==1.2.0
|
||||||
async-upnp-client==0.43.0
|
async-upnp-client==0.43.0
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==24.2.0
|
attrs==25.1.0
|
||||||
audioop-lts==0.2.1;python_version>='3.13'
|
audioop-lts==0.2.1;python_version>='3.13'
|
||||||
av==13.1.0
|
av==13.1.0
|
||||||
awesomeversion==24.6.0
|
awesomeversion==24.6.0
|
||||||
@ -34,7 +34,7 @@ fnv-hash-fast==1.2.2
|
|||||||
go2rtc-client==0.1.2
|
go2rtc-client==0.1.2
|
||||||
ha-ffmpeg==3.2.2
|
ha-ffmpeg==3.2.2
|
||||||
habluetooth==3.21.0
|
habluetooth==3.21.0
|
||||||
hass-nabucasa==0.88.1
|
hass-nabucasa==0.89.0
|
||||||
hassil==2.2.0
|
hassil==2.2.0
|
||||||
home-assistant-bluetooth==1.13.0
|
home-assistant-bluetooth==1.13.0
|
||||||
home-assistant-frontend==20250203.0
|
home-assistant-frontend==20250203.0
|
||||||
@ -67,7 +67,7 @@ standard-telnetlib==3.13.0;python_version>='3.13'
|
|||||||
typing-extensions>=4.12.2,<5.0
|
typing-extensions>=4.12.2,<5.0
|
||||||
ulid-transform==1.2.0
|
ulid-transform==1.2.0
|
||||||
urllib3>=1.26.5,<2
|
urllib3>=1.26.5,<2
|
||||||
uv==0.5.21
|
uv==0.5.27
|
||||||
voluptuous-openapi==0.0.6
|
voluptuous-openapi==0.0.6
|
||||||
voluptuous-serialize==2.6.0
|
voluptuous-serialize==2.6.0
|
||||||
voluptuous==0.15.2
|
voluptuous==0.15.2
|
||||||
|
@ -140,7 +140,7 @@ class HassEnforceClassModule(BaseChecker):
|
|||||||
|
|
||||||
for ancestor in top_level_ancestors:
|
for ancestor in top_level_ancestors:
|
||||||
if ancestor.name in _BASE_ENTITY_MODULES and not any(
|
if ancestor.name in _BASE_ENTITY_MODULES and not any(
|
||||||
anc.name in _MODULE_CLASSES for anc in ancestors
|
parent.name in _MODULE_CLASSES for parent in ancestors
|
||||||
):
|
):
|
||||||
self.add_message(
|
self.add_message(
|
||||||
"hass-enforce-class-module",
|
"hass-enforce-class-module",
|
||||||
|
@ -35,7 +35,7 @@ dependencies = [
|
|||||||
"aiozoneinfo==0.2.1",
|
"aiozoneinfo==0.2.1",
|
||||||
"astral==2.2",
|
"astral==2.2",
|
||||||
"async-interrupt==1.2.0",
|
"async-interrupt==1.2.0",
|
||||||
"attrs==24.2.0",
|
"attrs==25.1.0",
|
||||||
"atomicwrites-homeassistant==1.4.1",
|
"atomicwrites-homeassistant==1.4.1",
|
||||||
"audioop-lts==0.2.1;python_version>='3.13'",
|
"audioop-lts==0.2.1;python_version>='3.13'",
|
||||||
"awesomeversion==24.6.0",
|
"awesomeversion==24.6.0",
|
||||||
@ -46,7 +46,7 @@ dependencies = [
|
|||||||
"fnv-hash-fast==1.2.2",
|
"fnv-hash-fast==1.2.2",
|
||||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||||
# integration
|
# integration
|
||||||
"hass-nabucasa==0.88.1",
|
"hass-nabucasa==0.89.0",
|
||||||
# When bumping httpx, please check the version pins of
|
# When bumping httpx, please check the version pins of
|
||||||
# httpcore, anyio, and h11 in gen_requirements_all
|
# httpcore, anyio, and h11 in gen_requirements_all
|
||||||
"httpx==0.28.1",
|
"httpx==0.28.1",
|
||||||
@ -76,7 +76,7 @@ dependencies = [
|
|||||||
# Temporary setting an upper bound, to prevent compat issues with urllib3>=2
|
# Temporary setting an upper bound, to prevent compat issues with urllib3>=2
|
||||||
# https://github.com/home-assistant/core/issues/97248
|
# https://github.com/home-assistant/core/issues/97248
|
||||||
"urllib3>=1.26.5,<2",
|
"urllib3>=1.26.5,<2",
|
||||||
"uv==0.5.21",
|
"uv==0.5.27",
|
||||||
"voluptuous==0.15.2",
|
"voluptuous==0.15.2",
|
||||||
"voluptuous-serialize==2.6.0",
|
"voluptuous-serialize==2.6.0",
|
||||||
"voluptuous-openapi==0.0.6",
|
"voluptuous-openapi==0.0.6",
|
||||||
|
6
requirements.txt
generated
6
requirements.txt
generated
@ -12,7 +12,7 @@ aiohttp-asyncmdnsresolver==0.0.3
|
|||||||
aiozoneinfo==0.2.1
|
aiozoneinfo==0.2.1
|
||||||
astral==2.2
|
astral==2.2
|
||||||
async-interrupt==1.2.0
|
async-interrupt==1.2.0
|
||||||
attrs==24.2.0
|
attrs==25.1.0
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
audioop-lts==0.2.1;python_version>='3.13'
|
audioop-lts==0.2.1;python_version>='3.13'
|
||||||
awesomeversion==24.6.0
|
awesomeversion==24.6.0
|
||||||
@ -21,7 +21,7 @@ certifi>=2021.5.30
|
|||||||
ciso8601==2.3.2
|
ciso8601==2.3.2
|
||||||
cronsim==2.6
|
cronsim==2.6
|
||||||
fnv-hash-fast==1.2.2
|
fnv-hash-fast==1.2.2
|
||||||
hass-nabucasa==0.88.1
|
hass-nabucasa==0.89.0
|
||||||
httpx==0.28.1
|
httpx==0.28.1
|
||||||
home-assistant-bluetooth==1.13.0
|
home-assistant-bluetooth==1.13.0
|
||||||
ifaddr==0.2.0
|
ifaddr==0.2.0
|
||||||
@ -45,7 +45,7 @@ standard-telnetlib==3.13.0;python_version>='3.13'
|
|||||||
typing-extensions>=4.12.2,<5.0
|
typing-extensions>=4.12.2,<5.0
|
||||||
ulid-transform==1.2.0
|
ulid-transform==1.2.0
|
||||||
urllib3>=1.26.5,<2
|
urllib3>=1.26.5,<2
|
||||||
uv==0.5.21
|
uv==0.5.27
|
||||||
voluptuous==0.15.2
|
voluptuous==0.15.2
|
||||||
voluptuous-serialize==2.6.0
|
voluptuous-serialize==2.6.0
|
||||||
voluptuous-openapi==0.0.6
|
voluptuous-openapi==0.0.6
|
||||||
|
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@ -1109,7 +1109,7 @@ habiticalib==0.3.4
|
|||||||
habluetooth==3.21.0
|
habluetooth==3.21.0
|
||||||
|
|
||||||
# homeassistant.components.cloud
|
# homeassistant.components.cloud
|
||||||
hass-nabucasa==0.88.1
|
hass-nabucasa==0.89.0
|
||||||
|
|
||||||
# homeassistant.components.splunk
|
# homeassistant.components.splunk
|
||||||
hass-splunk==0.1.1
|
hass-splunk==0.1.1
|
||||||
@ -1408,7 +1408,7 @@ microBeesPy==0.3.5
|
|||||||
mill-local==0.3.0
|
mill-local==0.3.0
|
||||||
|
|
||||||
# homeassistant.components.mill
|
# homeassistant.components.mill
|
||||||
millheater==0.12.2
|
millheater==0.12.3
|
||||||
|
|
||||||
# homeassistant.components.minio
|
# homeassistant.components.minio
|
||||||
minio==7.1.12
|
minio==7.1.12
|
||||||
@ -1489,7 +1489,7 @@ nextcord==2.6.0
|
|||||||
nextdns==4.0.0
|
nextdns==4.0.0
|
||||||
|
|
||||||
# homeassistant.components.niko_home_control
|
# homeassistant.components.niko_home_control
|
||||||
nhc==0.3.9
|
nhc==0.4.4
|
||||||
|
|
||||||
# homeassistant.components.nibe_heatpump
|
# homeassistant.components.nibe_heatpump
|
||||||
nibe==2.14.0
|
nibe==2.14.0
|
||||||
@ -1559,7 +1559,7 @@ omnilogic==0.4.5
|
|||||||
ondilo==0.5.0
|
ondilo==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.onedrive
|
# homeassistant.components.onedrive
|
||||||
onedrive-personal-sdk==0.0.2
|
onedrive-personal-sdk==0.0.3
|
||||||
|
|
||||||
# homeassistant.components.onvif
|
# homeassistant.components.onvif
|
||||||
onvif-zeep-async==3.2.5
|
onvif-zeep-async==3.2.5
|
||||||
@ -2193,7 +2193,7 @@ pyotgw==2.2.2
|
|||||||
pyotp==2.8.0
|
pyotp==2.8.0
|
||||||
|
|
||||||
# homeassistant.components.overkiz
|
# homeassistant.components.overkiz
|
||||||
pyoverkiz==1.15.5
|
pyoverkiz==1.16.0
|
||||||
|
|
||||||
# homeassistant.components.onewire
|
# homeassistant.components.onewire
|
||||||
pyownet==0.10.0.post1
|
pyownet==0.10.0.post1
|
||||||
@ -2860,7 +2860,7 @@ temperusb==1.6.1
|
|||||||
# homeassistant.components.tesla_fleet
|
# homeassistant.components.tesla_fleet
|
||||||
# homeassistant.components.teslemetry
|
# homeassistant.components.teslemetry
|
||||||
# homeassistant.components.tessie
|
# homeassistant.components.tessie
|
||||||
tesla-fleet-api==0.9.2
|
tesla-fleet-api==0.9.6
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.5.2
|
tesla-powerwall==0.5.2
|
||||||
|
@ -19,7 +19,7 @@ pylint==3.3.4
|
|||||||
pylint-per-file-ignores==1.4.0
|
pylint-per-file-ignores==1.4.0
|
||||||
pipdeptree==2.25.0
|
pipdeptree==2.25.0
|
||||||
pytest-asyncio==0.25.3
|
pytest-asyncio==0.25.3
|
||||||
pytest-aiohttp==1.0.5
|
pytest-aiohttp==1.1.0
|
||||||
pytest-cov==6.0.0
|
pytest-cov==6.0.0
|
||||||
pytest-freezer==0.4.9
|
pytest-freezer==0.4.9
|
||||||
pytest-github-actions-annotate-failures==0.3.0
|
pytest-github-actions-annotate-failures==0.3.0
|
||||||
@ -37,19 +37,19 @@ tqdm==4.67.1
|
|||||||
types-aiofiles==24.1.0.20241221
|
types-aiofiles==24.1.0.20241221
|
||||||
types-atomicwrites==1.4.5.1
|
types-atomicwrites==1.4.5.1
|
||||||
types-croniter==5.0.1.20241205
|
types-croniter==5.0.1.20241205
|
||||||
types-beautifulsoup4==4.12.0.20241020
|
types-beautifulsoup4==4.12.0.20250204
|
||||||
types-caldav==1.3.0.20241107
|
types-caldav==1.3.0.20241107
|
||||||
types-chardet==0.1.5
|
types-chardet==0.1.5
|
||||||
types-decorator==5.1.8.20240310
|
types-decorator==5.1.8.20250121
|
||||||
types-paho-mqtt==1.6.0.20240321
|
types-paho-mqtt==1.6.0.20240321
|
||||||
types-pexpect==4.9.0.20241208
|
types-pexpect==4.9.0.20241208
|
||||||
types-pillow==10.2.0.20240822
|
types-pillow==10.2.0.20240822
|
||||||
types-protobuf==5.29.1.20241207
|
types-protobuf==5.29.1.20241207
|
||||||
types-psutil==6.1.0.20241221
|
types-psutil==6.1.0.20241221
|
||||||
types-pyserial==3.5.0.20241221
|
types-pyserial==3.5.0.20250130
|
||||||
types-python-dateutil==2.9.0.20241206
|
types-python-dateutil==2.9.0.20241206
|
||||||
types-python-slugify==8.0.2.20240310
|
types-python-slugify==8.0.2.20240310
|
||||||
types-pytz==2024.2.0.20241221
|
types-pytz==2025.1.0.20250204
|
||||||
types-PyYAML==6.0.12.20241230
|
types-PyYAML==6.0.12.20241230
|
||||||
types-requests==2.31.0.3
|
types-requests==2.31.0.3
|
||||||
types-xmltodict==0.13.0.3
|
types-xmltodict==0.13.0.3
|
||||||
|
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@ -947,7 +947,7 @@ habiticalib==0.3.4
|
|||||||
habluetooth==3.21.0
|
habluetooth==3.21.0
|
||||||
|
|
||||||
# homeassistant.components.cloud
|
# homeassistant.components.cloud
|
||||||
hass-nabucasa==0.88.1
|
hass-nabucasa==0.89.0
|
||||||
|
|
||||||
# homeassistant.components.conversation
|
# homeassistant.components.conversation
|
||||||
hassil==2.2.0
|
hassil==2.2.0
|
||||||
@ -1177,7 +1177,7 @@ microBeesPy==0.3.5
|
|||||||
mill-local==0.3.0
|
mill-local==0.3.0
|
||||||
|
|
||||||
# homeassistant.components.mill
|
# homeassistant.components.mill
|
||||||
millheater==0.12.2
|
millheater==0.12.3
|
||||||
|
|
||||||
# homeassistant.components.minio
|
# homeassistant.components.minio
|
||||||
minio==7.1.12
|
minio==7.1.12
|
||||||
@ -1249,7 +1249,7 @@ nextcord==2.6.0
|
|||||||
nextdns==4.0.0
|
nextdns==4.0.0
|
||||||
|
|
||||||
# homeassistant.components.niko_home_control
|
# homeassistant.components.niko_home_control
|
||||||
nhc==0.3.9
|
nhc==0.4.4
|
||||||
|
|
||||||
# homeassistant.components.nibe_heatpump
|
# homeassistant.components.nibe_heatpump
|
||||||
nibe==2.14.0
|
nibe==2.14.0
|
||||||
@ -1304,7 +1304,7 @@ omnilogic==0.4.5
|
|||||||
ondilo==0.5.0
|
ondilo==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.onedrive
|
# homeassistant.components.onedrive
|
||||||
onedrive-personal-sdk==0.0.2
|
onedrive-personal-sdk==0.0.3
|
||||||
|
|
||||||
# homeassistant.components.onvif
|
# homeassistant.components.onvif
|
||||||
onvif-zeep-async==3.2.5
|
onvif-zeep-async==3.2.5
|
||||||
@ -1786,7 +1786,7 @@ pyotgw==2.2.2
|
|||||||
pyotp==2.8.0
|
pyotp==2.8.0
|
||||||
|
|
||||||
# homeassistant.components.overkiz
|
# homeassistant.components.overkiz
|
||||||
pyoverkiz==1.15.5
|
pyoverkiz==1.16.0
|
||||||
|
|
||||||
# homeassistant.components.onewire
|
# homeassistant.components.onewire
|
||||||
pyownet==0.10.0.post1
|
pyownet==0.10.0.post1
|
||||||
@ -2294,7 +2294,7 @@ temperusb==1.6.1
|
|||||||
# homeassistant.components.tesla_fleet
|
# homeassistant.components.tesla_fleet
|
||||||
# homeassistant.components.teslemetry
|
# homeassistant.components.teslemetry
|
||||||
# homeassistant.components.tessie
|
# homeassistant.components.tessie
|
||||||
tesla-fleet-api==0.9.2
|
tesla-fleet-api==0.9.6
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.5.2
|
tesla-powerwall==0.5.2
|
||||||
|
2
requirements_test_pre_commit.txt
generated
2
requirements_test_pre_commit.txt
generated
@ -1,5 +1,5 @@
|
|||||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||||
|
|
||||||
codespell==2.3.0
|
codespell==2.4.1
|
||||||
ruff==0.9.1
|
ruff==0.9.1
|
||||||
yamllint==1.35.1
|
yamllint==1.35.1
|
||||||
|
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@ -14,7 +14,7 @@ WORKDIR "/github/workspace"
|
|||||||
COPY . /usr/src/homeassistant
|
COPY . /usr/src/homeassistant
|
||||||
|
|
||||||
# Uv is only needed during build
|
# Uv is only needed during build
|
||||||
RUN --mount=from=ghcr.io/astral-sh/uv:0.5.21,source=/uv,target=/bin/uv \
|
RUN --mount=from=ghcr.io/astral-sh/uv:0.5.27,source=/uv,target=/bin/uv \
|
||||||
# Uv creates a lock file in /tmp
|
# Uv creates a lock file in /tmp
|
||||||
--mount=type=tmpfs,target=/tmp \
|
--mount=type=tmpfs,target=/tmp \
|
||||||
# Required for PyTurboJPEG
|
# Required for PyTurboJPEG
|
||||||
|
@ -10,7 +10,7 @@ from homeassistant.const import Platform
|
|||||||
from script.hassfest import ast_parse_module
|
from script.hassfest import ast_parse_module
|
||||||
from script.hassfest.model import Config, Integration
|
from script.hassfest.model import Config, Integration
|
||||||
|
|
||||||
_ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$")
|
_ANNOTATION_MATCH = re.compile(r"^[A-Za-z][A-Za-z0-9]+ConfigEntry$")
|
||||||
_FUNCTIONS: dict[str, dict[str, int]] = {
|
_FUNCTIONS: dict[str, dict[str, int]] = {
|
||||||
"__init__": { # based on ComponentProtocol
|
"__init__": { # based on ComponentProtocol
|
||||||
"async_migrate_entry": 2,
|
"async_migrate_entry": 2,
|
||||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
from collections.abc import AsyncIterator, Callable, Coroutine, Iterable
|
from collections.abc import AsyncIterator, Callable, Coroutine, Iterable
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import ANY, AsyncMock, Mock, patch
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
|
|
||||||
from homeassistant.components.backup import (
|
from homeassistant.components.backup import (
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
@ -29,7 +29,7 @@ TEST_BACKUP_ABC123 = AgentBackup(
|
|||||||
backup_id="abc123",
|
backup_id="abc123",
|
||||||
database_included=True,
|
database_included=True,
|
||||||
date="1970-01-01T00:00:00.000Z",
|
date="1970-01-01T00:00:00.000Z",
|
||||||
extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
|
extra_metadata={"instance_id": "our_uuid", "with_automatic_settings": True},
|
||||||
folders=[Folder.MEDIA, Folder.SHARE],
|
folders=[Folder.MEDIA, Folder.SHARE],
|
||||||
homeassistant_included=True,
|
homeassistant_included=True,
|
||||||
homeassistant_version="2024.12.0",
|
homeassistant_version="2024.12.0",
|
||||||
|
@ -18,6 +18,16 @@ from .common import TEST_BACKUP_PATH_ABC123, TEST_BACKUP_PATH_DEF456
|
|||||||
from tests.common import get_fixture_path
|
from tests.common import get_fixture_path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="instance_id", autouse=True)
|
||||||
|
def instance_id_fixture(hass: HomeAssistant) -> Generator[None]:
|
||||||
|
"""Mock instance ID."""
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.backup.manager.instance_id.async_get",
|
||||||
|
return_value="our_uuid",
|
||||||
|
):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="mocked_json_bytes")
|
@pytest.fixture(name="mocked_json_bytes")
|
||||||
def mocked_json_bytes_fixture() -> Generator[Mock]:
|
def mocked_json_bytes_fixture() -> Generator[Mock]:
|
||||||
"""Mock json_bytes."""
|
"""Mock json_bytes."""
|
||||||
|
@ -71,6 +71,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -94,6 +98,10 @@
|
|||||||
'backup_id': 'def456',
|
'backup_id': 'def456',
|
||||||
'database_included': False,
|
'database_included': False,
|
||||||
'date': '1980-01-01T00:00:00.000Z',
|
'date': '1980-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'unknown_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
|
@ -3040,6 +3040,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3117,6 +3121,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3175,6 +3183,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3217,6 +3229,10 @@
|
|||||||
'backup_id': 'def456',
|
'backup_id': 'def456',
|
||||||
'database_included': False,
|
'database_included': False,
|
||||||
'date': '1980-01-01T00:00:00.000Z',
|
'date': '1980-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'unknown_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3270,6 +3286,10 @@
|
|||||||
'backup_id': 'def456',
|
'backup_id': 'def456',
|
||||||
'database_included': False,
|
'database_included': False,
|
||||||
'date': '1980-01-01T00:00:00.000Z',
|
'date': '1980-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'unknown_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3321,6 +3341,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3379,6 +3403,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3438,6 +3466,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3497,6 +3527,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
'test.remote',
|
'test.remote',
|
||||||
]),
|
]),
|
||||||
@ -3556,6 +3588,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3614,6 +3648,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3672,6 +3708,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3730,6 +3768,8 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00Z',
|
'date': '1970-01-01T00:00:00Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
'test.remote',
|
'test.remote',
|
||||||
]),
|
]),
|
||||||
@ -3789,6 +3829,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3828,6 +3872,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3883,6 +3931,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -3923,6 +3975,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4199,6 +4255,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4246,6 +4306,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4297,6 +4361,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4339,6 +4407,10 @@
|
|||||||
'backup_id': 'def456',
|
'backup_id': 'def456',
|
||||||
'database_included': False,
|
'database_included': False,
|
||||||
'date': '1980-01-01T00:00:00.000Z',
|
'date': '1980-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'unknown_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4367,6 +4439,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -4415,6 +4491,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'our_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
|
@ -136,7 +136,7 @@ async def test_create_backup_service(
|
|||||||
agent_ids=["backup.local"],
|
agent_ids=["backup.local"],
|
||||||
backup_name="Custom backup 2025.1.0",
|
backup_name="Custom backup 2025.1.0",
|
||||||
extra_metadata={
|
extra_metadata={
|
||||||
"instance_id": hass.data["core.uuid"],
|
"instance_id": "our_uuid",
|
||||||
"with_automatic_settings": False,
|
"with_automatic_settings": False,
|
||||||
},
|
},
|
||||||
include_addons=None,
|
include_addons=None,
|
||||||
@ -595,7 +595,7 @@ async def test_initiate_backup(
|
|||||||
"compressed": True,
|
"compressed": True,
|
||||||
"date": ANY,
|
"date": ANY,
|
||||||
"extra": {
|
"extra": {
|
||||||
"instance_id": hass.data["core.uuid"],
|
"instance_id": "our_uuid",
|
||||||
"with_automatic_settings": False,
|
"with_automatic_settings": False,
|
||||||
},
|
},
|
||||||
"homeassistant": {
|
"homeassistant": {
|
||||||
@ -625,6 +625,7 @@ async def test_initiate_backup(
|
|||||||
"backup_id": backup_id,
|
"backup_id": backup_id,
|
||||||
"database_included": include_database,
|
"database_included": include_database,
|
||||||
"date": ANY,
|
"date": ANY,
|
||||||
|
"extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
@ -675,6 +676,10 @@ async def test_initiate_backup_with_agent_error(
|
|||||||
"backup_id": "backup1",
|
"backup_id": "backup1",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00.000Z",
|
"date": "1970-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "our_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -691,6 +696,10 @@ async def test_initiate_backup_with_agent_error(
|
|||||||
"backup_id": "backup2",
|
"backup_id": "backup2",
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"date": "1980-01-01T00:00:00.000Z",
|
"date": "1980-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "unknown_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -713,6 +722,10 @@ async def test_initiate_backup_with_agent_error(
|
|||||||
"backup_id": "backup3",
|
"backup_id": "backup3",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00.000Z",
|
"date": "1970-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "our_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -836,6 +849,7 @@ async def test_initiate_backup_with_agent_error(
|
|||||||
"backup_id": "abc123",
|
"backup_id": "abc123",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": ANY,
|
"date": ANY,
|
||||||
|
"extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
|
||||||
"failed_agent_ids": ["test.remote"],
|
"failed_agent_ids": ["test.remote"],
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
@ -1770,6 +1784,10 @@ async def test_receive_backup_agent_error(
|
|||||||
"backup_id": "backup1",
|
"backup_id": "backup1",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00.000Z",
|
"date": "1970-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "our_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -1786,6 +1804,10 @@ async def test_receive_backup_agent_error(
|
|||||||
"backup_id": "backup2",
|
"backup_id": "backup2",
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"date": "1980-01-01T00:00:00.000Z",
|
"date": "1980-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "unknown_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -1808,6 +1830,10 @@ async def test_receive_backup_agent_error(
|
|||||||
"backup_id": "backup3",
|
"backup_id": "backup3",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00.000Z",
|
"date": "1970-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {
|
||||||
|
"instance_id": "our_uuid",
|
||||||
|
"with_automatic_settings": True,
|
||||||
|
},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [
|
"folders": [
|
||||||
"media",
|
"media",
|
||||||
@ -3325,6 +3351,7 @@ async def test_initiate_backup_per_agent_encryption(
|
|||||||
"backup_id": backup_id,
|
"backup_id": backup_id,
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": ANY,
|
"date": ANY,
|
||||||
|
"extra_metadata": {"instance_id": "our_uuid", "with_automatic_settings": False},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from collections.abc import AsyncIterator
|
from collections.abc import AsyncIterator
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import tarfile
|
import tarfile
|
||||||
@ -189,6 +190,73 @@ async def test_decrypted_backup_streamer(hass: HomeAssistant) -> None:
|
|||||||
assert decrypted_output == decrypted_backup_data + expected_padding
|
assert decrypted_output == decrypted_backup_data + expected_padding
|
||||||
|
|
||||||
|
|
||||||
|
async def test_decrypted_backup_streamer_interrupt_stuck_reader(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> None:
|
||||||
|
"""Test the decrypted backup streamer."""
|
||||||
|
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||||
|
backup = AgentBackup(
|
||||||
|
addons=["addon_1", "addon_2"],
|
||||||
|
backup_id="1234",
|
||||||
|
date="2024-12-02T07:23:58.261875-05:00",
|
||||||
|
database_included=False,
|
||||||
|
extra_metadata={},
|
||||||
|
folders=[],
|
||||||
|
homeassistant_included=True,
|
||||||
|
homeassistant_version="2024.12.0.dev0",
|
||||||
|
name="test",
|
||||||
|
protected=True,
|
||||||
|
size=encrypted_backup_path.stat().st_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
stuck = asyncio.Event()
|
||||||
|
|
||||||
|
async def send_backup() -> AsyncIterator[bytes]:
|
||||||
|
f = encrypted_backup_path.open("rb")
|
||||||
|
while chunk := f.read(1024):
|
||||||
|
await stuck.wait()
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
async def open_backup() -> AsyncIterator[bytes]:
|
||||||
|
return send_backup()
|
||||||
|
|
||||||
|
decryptor = DecryptedBackupStreamer(hass, backup, open_backup, "hunter2")
|
||||||
|
await decryptor.open_stream()
|
||||||
|
await decryptor.wait()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_decrypted_backup_streamer_interrupt_stuck_writer(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> None:
|
||||||
|
"""Test the decrypted backup streamer."""
|
||||||
|
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||||
|
backup = AgentBackup(
|
||||||
|
addons=["addon_1", "addon_2"],
|
||||||
|
backup_id="1234",
|
||||||
|
date="2024-12-02T07:23:58.261875-05:00",
|
||||||
|
database_included=False,
|
||||||
|
extra_metadata={},
|
||||||
|
folders=[],
|
||||||
|
homeassistant_included=True,
|
||||||
|
homeassistant_version="2024.12.0.dev0",
|
||||||
|
name="test",
|
||||||
|
protected=True,
|
||||||
|
size=encrypted_backup_path.stat().st_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def send_backup() -> AsyncIterator[bytes]:
|
||||||
|
f = encrypted_backup_path.open("rb")
|
||||||
|
while chunk := f.read(1024):
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
async def open_backup() -> AsyncIterator[bytes]:
|
||||||
|
return send_backup()
|
||||||
|
|
||||||
|
decryptor = DecryptedBackupStreamer(hass, backup, open_backup, "hunter2")
|
||||||
|
await decryptor.open_stream()
|
||||||
|
await decryptor.wait()
|
||||||
|
|
||||||
|
|
||||||
async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) -> None:
|
async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) -> None:
|
||||||
"""Test the decrypted backup streamer with wrong password."""
|
"""Test the decrypted backup streamer with wrong password."""
|
||||||
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
encrypted_backup_path = get_fixture_path("test_backups/c0cb53bd.tar", DOMAIN)
|
||||||
@ -279,6 +347,77 @@ async def test_encrypted_backup_streamer(hass: HomeAssistant) -> None:
|
|||||||
assert encrypted_output == encrypted_backup_data + expected_padding
|
assert encrypted_output == encrypted_backup_data + expected_padding
|
||||||
|
|
||||||
|
|
||||||
|
async def test_encrypted_backup_streamer_interrupt_stuck_reader(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> None:
|
||||||
|
"""Test the encrypted backup streamer."""
|
||||||
|
decrypted_backup_path = get_fixture_path(
|
||||||
|
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
|
||||||
|
)
|
||||||
|
backup = AgentBackup(
|
||||||
|
addons=["addon_1", "addon_2"],
|
||||||
|
backup_id="1234",
|
||||||
|
date="2024-12-02T07:23:58.261875-05:00",
|
||||||
|
database_included=False,
|
||||||
|
extra_metadata={},
|
||||||
|
folders=[],
|
||||||
|
homeassistant_included=True,
|
||||||
|
homeassistant_version="2024.12.0.dev0",
|
||||||
|
name="test",
|
||||||
|
protected=False,
|
||||||
|
size=decrypted_backup_path.stat().st_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
stuck = asyncio.Event()
|
||||||
|
|
||||||
|
async def send_backup() -> AsyncIterator[bytes]:
|
||||||
|
f = decrypted_backup_path.open("rb")
|
||||||
|
while chunk := f.read(1024):
|
||||||
|
await stuck.wait()
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
async def open_backup() -> AsyncIterator[bytes]:
|
||||||
|
return send_backup()
|
||||||
|
|
||||||
|
decryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
|
||||||
|
await decryptor.open_stream()
|
||||||
|
await decryptor.wait()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_encrypted_backup_streamer_interrupt_stuck_writer(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> None:
|
||||||
|
"""Test the encrypted backup streamer."""
|
||||||
|
decrypted_backup_path = get_fixture_path(
|
||||||
|
"test_backups/c0cb53bd.tar.decrypted", DOMAIN
|
||||||
|
)
|
||||||
|
backup = AgentBackup(
|
||||||
|
addons=["addon_1", "addon_2"],
|
||||||
|
backup_id="1234",
|
||||||
|
date="2024-12-02T07:23:58.261875-05:00",
|
||||||
|
database_included=False,
|
||||||
|
extra_metadata={},
|
||||||
|
folders=[],
|
||||||
|
homeassistant_included=True,
|
||||||
|
homeassistant_version="2024.12.0.dev0",
|
||||||
|
name="test",
|
||||||
|
protected=True,
|
||||||
|
size=decrypted_backup_path.stat().st_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def send_backup() -> AsyncIterator[bytes]:
|
||||||
|
f = decrypted_backup_path.open("rb")
|
||||||
|
while chunk := f.read(1024):
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
async def open_backup() -> AsyncIterator[bytes]:
|
||||||
|
return send_backup()
|
||||||
|
|
||||||
|
decryptor = EncryptedBackupStreamer(hass, backup, open_backup, "hunter2")
|
||||||
|
await decryptor.open_stream()
|
||||||
|
await decryptor.wait()
|
||||||
|
|
||||||
|
|
||||||
async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> None:
|
async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> None:
|
||||||
"""Test the encrypted backup streamer."""
|
"""Test the encrypted backup streamer."""
|
||||||
decrypted_backup_path = get_fixture_path(
|
decrypted_backup_path = get_fixture_path(
|
||||||
|
@ -215,7 +215,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
@pytest.mark.usefixtures("mock_bluetooth", "mock_device_tracker_conf")
|
@pytest.mark.usefixtures("mock_bluetooth", "mock_device_tracker_conf")
|
||||||
async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None:
|
async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None:
|
||||||
"""Test preserving tracked device name across new seens."""
|
"""Test preserving tracked device name across new seens.""" # codespell:ignore seens
|
||||||
|
|
||||||
address = "DE:AD:BE:EF:13:37"
|
address = "DE:AD:BE:EF:13:37"
|
||||||
name = "Mock device name"
|
name = "Mock device name"
|
||||||
|
@ -174,6 +174,7 @@ async def test_agents_list_backups(
|
|||||||
"backup_id": "23e64aec",
|
"backup_id": "23e64aec",
|
||||||
"date": "2024-11-22T11:48:48.727189+01:00",
|
"date": "2024-11-22T11:48:48.727189+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2024.12.0.dev0",
|
"homeassistant_version": "2024.12.0.dev0",
|
||||||
@ -223,6 +224,7 @@ async def test_agents_list_backups_fail_cloud(
|
|||||||
"backup_id": "23e64aec",
|
"backup_id": "23e64aec",
|
||||||
"date": "2024-11-22T11:48:48.727189+01:00",
|
"date": "2024-11-22T11:48:48.727189+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2024.12.0.dev0",
|
"homeassistant_version": "2024.12.0.dev0",
|
||||||
|
@ -12,7 +12,12 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY
|
from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY
|
||||||
from homeassistant.components.cloud.const import DEFAULT_TTS_DEFAULT_VOICE, DOMAIN
|
from homeassistant.components.cloud.const import DEFAULT_TTS_DEFAULT_VOICE, DOMAIN
|
||||||
from homeassistant.components.cloud.tts import PLATFORM_SCHEMA, SUPPORT_LANGUAGES, Voice
|
from homeassistant.components.cloud.tts import (
|
||||||
|
DEFAULT_VOICES,
|
||||||
|
PLATFORM_SCHEMA,
|
||||||
|
SUPPORT_LANGUAGES,
|
||||||
|
Voice,
|
||||||
|
)
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
ATTR_MEDIA_CONTENT_ID,
|
ATTR_MEDIA_CONTENT_ID,
|
||||||
DOMAIN as DOMAIN_MP,
|
DOMAIN as DOMAIN_MP,
|
||||||
@ -61,6 +66,19 @@ def test_default_exists() -> None:
|
|||||||
assert DEFAULT_TTS_DEFAULT_VOICE[1] in TTS_VOICES[DEFAULT_TTS_DEFAULT_VOICE[0]]
|
assert DEFAULT_TTS_DEFAULT_VOICE[1] in TTS_VOICES[DEFAULT_TTS_DEFAULT_VOICE[0]]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_languages_have_default() -> None:
|
||||||
|
"""Test all languages have a default voice."""
|
||||||
|
assert set(SUPPORT_LANGUAGES).difference(DEFAULT_VOICES) == set()
|
||||||
|
assert set(DEFAULT_VOICES).difference(SUPPORT_LANGUAGES) == set()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(("language", "voice"), DEFAULT_VOICES.items())
|
||||||
|
def test_default_voice_is_valid(language: str, voice: str) -> None:
|
||||||
|
"""Test that the default voice is valid."""
|
||||||
|
assert language in TTS_VOICES
|
||||||
|
assert voice in TTS_VOICES[language]
|
||||||
|
|
||||||
|
|
||||||
def test_schema() -> None:
|
def test_schema() -> None:
|
||||||
"""Test schema."""
|
"""Test schema."""
|
||||||
assert "nl-NL" in SUPPORT_LANGUAGES
|
assert "nl-NL" in SUPPORT_LANGUAGES
|
||||||
|
@ -439,7 +439,7 @@ async def test_zero_conf_old_blank_entry(
|
|||||||
mock_setup_entry: AsyncMock,
|
mock_setup_entry: AsyncMock,
|
||||||
mock_envoy: AsyncMock,
|
mock_envoy: AsyncMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test re-using old blank entry."""
|
"""Test reusing old blank entry."""
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={
|
data={
|
||||||
@ -478,7 +478,7 @@ async def test_zero_conf_old_blank_entry_standard_title(
|
|||||||
mock_setup_entry: AsyncMock,
|
mock_setup_entry: AsyncMock,
|
||||||
mock_envoy: AsyncMock,
|
mock_envoy: AsyncMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test re-using old blank entry was Envoy as title."""
|
"""Test reusing old blank entry was Envoy as title."""
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={
|
data={
|
||||||
@ -519,7 +519,7 @@ async def test_zero_conf_old_blank_entry_user_title(
|
|||||||
mock_setup_entry: AsyncMock,
|
mock_setup_entry: AsyncMock,
|
||||||
mock_envoy: AsyncMock,
|
mock_envoy: AsyncMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test re-using old blank entry with user title."""
|
"""Test reusing old blank entry with user title."""
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={
|
data={
|
||||||
|
@ -47,6 +47,7 @@ TEST_AGENT_BACKUP_RESULT = {
|
|||||||
"backup_id": "test-backup",
|
"backup_id": "test-backup",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "2025-01-01T01:23:45.678Z",
|
"date": "2025-01-01T01:23:45.678Z",
|
||||||
|
"extra_metadata": {"with_automatic_settings": False},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2024.12.0",
|
"homeassistant_version": "2024.12.0",
|
||||||
|
@ -106,7 +106,7 @@ TEST_BACKUP_2 = supervisor_backups.Backup(
|
|||||||
compressed=False,
|
compressed=False,
|
||||||
content=supervisor_backups.BackupContent(
|
content=supervisor_backups.BackupContent(
|
||||||
addons=["ssl"],
|
addons=["ssl"],
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant=False,
|
homeassistant=False,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
@ -136,7 +136,7 @@ TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete(
|
|||||||
compressed=TEST_BACKUP_2.compressed,
|
compressed=TEST_BACKUP_2.compressed,
|
||||||
date=TEST_BACKUP_2.date,
|
date=TEST_BACKUP_2.date,
|
||||||
extra=None,
|
extra=None,
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant=None,
|
homeassistant=None,
|
||||||
location=TEST_BACKUP_2.location,
|
location=TEST_BACKUP_2.location,
|
||||||
@ -156,7 +156,7 @@ TEST_BACKUP_3 = supervisor_backups.Backup(
|
|||||||
compressed=False,
|
compressed=False,
|
||||||
content=supervisor_backups.BackupContent(
|
content=supervisor_backups.BackupContent(
|
||||||
addons=["ssl"],
|
addons=["ssl"],
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
@ -186,7 +186,7 @@ TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete(
|
|||||||
compressed=TEST_BACKUP_3.compressed,
|
compressed=TEST_BACKUP_3.compressed,
|
||||||
date=TEST_BACKUP_3.date,
|
date=TEST_BACKUP_3.date,
|
||||||
extra=None,
|
extra=None,
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=False,
|
homeassistant_exclude_database=False,
|
||||||
homeassistant=None,
|
homeassistant=None,
|
||||||
location=TEST_BACKUP_3.location,
|
location=TEST_BACKUP_3.location,
|
||||||
@ -207,7 +207,7 @@ TEST_BACKUP_4 = supervisor_backups.Backup(
|
|||||||
compressed=False,
|
compressed=False,
|
||||||
content=supervisor_backups.BackupContent(
|
content=supervisor_backups.BackupContent(
|
||||||
addons=["ssl"],
|
addons=["ssl"],
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant=True,
|
homeassistant=True,
|
||||||
),
|
),
|
||||||
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
|
||||||
@ -234,23 +234,23 @@ TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete(
|
|||||||
version="9.14.0",
|
version="9.14.0",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
compressed=TEST_BACKUP.compressed,
|
compressed=TEST_BACKUP_4.compressed,
|
||||||
date=TEST_BACKUP.date,
|
date=TEST_BACKUP_4.date,
|
||||||
extra=None,
|
extra=None,
|
||||||
folders=["share"],
|
folders=[supervisor_backups.Folder.SHARE],
|
||||||
homeassistant_exclude_database=True,
|
homeassistant_exclude_database=True,
|
||||||
homeassistant="2024.12.0",
|
homeassistant="2024.12.0",
|
||||||
location=TEST_BACKUP.location,
|
location=TEST_BACKUP_4.location,
|
||||||
location_attributes=TEST_BACKUP.location_attributes,
|
location_attributes=TEST_BACKUP_4.location_attributes,
|
||||||
locations=TEST_BACKUP.locations,
|
locations=TEST_BACKUP_4.locations,
|
||||||
name=TEST_BACKUP.name,
|
name=TEST_BACKUP_4.name,
|
||||||
protected=TEST_BACKUP.protected,
|
protected=TEST_BACKUP_4.protected,
|
||||||
repositories=[],
|
repositories=[],
|
||||||
size=TEST_BACKUP.size,
|
size=TEST_BACKUP_4.size,
|
||||||
size_bytes=TEST_BACKUP.size_bytes,
|
size_bytes=TEST_BACKUP_4.size_bytes,
|
||||||
slug=TEST_BACKUP.slug,
|
slug=TEST_BACKUP_4.slug,
|
||||||
supervisor_version="2024.11.2",
|
supervisor_version="2024.11.2",
|
||||||
type=TEST_BACKUP.type,
|
type=TEST_BACKUP_4.type,
|
||||||
)
|
)
|
||||||
|
|
||||||
TEST_BACKUP_5 = supervisor_backups.Backup(
|
TEST_BACKUP_5 = supervisor_backups.Backup(
|
||||||
@ -364,7 +364,7 @@ async def hassio_enabled(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def setup_integration(
|
async def setup_backup_integration(
|
||||||
hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock
|
hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Backup integration."""
|
"""Set up Backup integration."""
|
||||||
@ -494,7 +494,7 @@ async def test_agent_info(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("backup", "backup_details", "expected_response"),
|
("backup", "backup_details", "expected_response"),
|
||||||
[
|
[
|
||||||
@ -509,6 +509,7 @@ async def test_agent_info(
|
|||||||
"backup_id": "abc123",
|
"backup_id": "abc123",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00+00:00",
|
"date": "1970-01-01T00:00:00+00:00",
|
||||||
|
"extra_metadata": {},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": ["share"],
|
"folders": ["share"],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
@ -528,6 +529,7 @@ async def test_agent_info(
|
|||||||
"backup_id": "abc123",
|
"backup_id": "abc123",
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"date": "1970-01-01T00:00:00+00:00",
|
"date": "1970-01-01T00:00:00+00:00",
|
||||||
|
"extra_metadata": {},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": ["share"],
|
"folders": ["share"],
|
||||||
"homeassistant_included": False,
|
"homeassistant_included": False,
|
||||||
@ -558,7 +560,7 @@ async def test_agent_list_backups(
|
|||||||
assert response["result"]["backups"] == [expected_response]
|
assert response["result"]["backups"] == [expected_response]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_download(
|
async def test_agent_download(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_client: ClientSessionGenerator,
|
hass_client: ClientSessionGenerator,
|
||||||
@ -582,7 +584,7 @@ async def test_agent_download(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_download_unavailable_backup(
|
async def test_agent_download_unavailable_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_client: ClientSessionGenerator,
|
hass_client: ClientSessionGenerator,
|
||||||
@ -601,7 +603,7 @@ async def test_agent_download_unavailable_backup(
|
|||||||
assert resp.status == 404
|
assert resp.status == 404
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_upload(
|
async def test_agent_upload(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_client: ClientSessionGenerator,
|
hass_client: ClientSessionGenerator,
|
||||||
@ -650,7 +652,7 @@ async def test_agent_upload(
|
|||||||
supervisor_client.backups.remove_backup.assert_not_called()
|
supervisor_client.backups.remove_backup.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_get_backup(
|
async def test_agent_get_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -680,6 +682,7 @@ async def test_agent_get_backup(
|
|||||||
"backup_id": "abc123",
|
"backup_id": "abc123",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00+00:00",
|
"date": "1970-01-01T00:00:00+00:00",
|
||||||
|
"extra_metadata": {},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": ["share"],
|
"folders": ["share"],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
@ -691,7 +694,7 @@ async def test_agent_get_backup(
|
|||||||
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
|
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("backup_info_side_effect", "expected_response"),
|
("backup_info_side_effect", "expected_response"),
|
||||||
[
|
[
|
||||||
@ -735,7 +738,7 @@ async def test_agent_get_backup_with_error(
|
|||||||
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
|
supervisor_client.backups.backup_info.assert_called_once_with(backup_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_delete_backup(
|
async def test_agent_delete_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -760,7 +763,7 @@ async def test_agent_delete_backup(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("remove_side_effect", "expected_response"),
|
("remove_side_effect", "expected_response"),
|
||||||
[
|
[
|
||||||
@ -806,7 +809,7 @@ async def test_agent_delete_with_error(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("event_data", "mount_info_calls"),
|
("event_data", "mount_info_calls"),
|
||||||
[
|
[
|
||||||
@ -887,7 +890,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("extra_generate_options", "expected_supervisor_options"),
|
("extra_generate_options", "expected_supervisor_options"),
|
||||||
[
|
[
|
||||||
@ -1002,7 +1005,114 @@ async def test_reader_writer_create(
|
|||||||
assert response["event"] == {"manager_state": "idle"}
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
|
async def test_reader_writer_create_report_progress(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
supervisor_client: AsyncMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test generating a backup."""
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
freezer.move_to("2025-01-30 13:42:12.345678")
|
||||||
|
supervisor_client.backups.partial_backup.return_value.job_id = TEST_JOB_ID
|
||||||
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
|
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "create_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": None,
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {"backup_job_id": TEST_JOB_ID}
|
||||||
|
|
||||||
|
supervisor_client.backups.partial_backup.assert_called_once_with(
|
||||||
|
DEFAULT_BACKUP_OPTIONS
|
||||||
|
)
|
||||||
|
|
||||||
|
supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
|
||||||
|
supervisor_events = [
|
||||||
|
supervisor_event_base | {"done": False, "stage": "addon_repositories"},
|
||||||
|
supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "home_assistant"},
|
||||||
|
supervisor_event_base | {"done": False, "stage": "addons"},
|
||||||
|
supervisor_event_base | {"done": True, "stage": "finishing_file"},
|
||||||
|
]
|
||||||
|
expected_manager_events = [
|
||||||
|
"addon_repositories",
|
||||||
|
"home_assistant",
|
||||||
|
"addons",
|
||||||
|
"finishing_file",
|
||||||
|
]
|
||||||
|
|
||||||
|
for supervisor_event in supervisor_events:
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "supervisor/event",
|
||||||
|
"data": {"event": "job", "data": supervisor_event},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
acks = 0
|
||||||
|
events = []
|
||||||
|
for _ in range(len(supervisor_events) + len(expected_manager_events)):
|
||||||
|
response = await client.receive_json()
|
||||||
|
if "event" in response:
|
||||||
|
events.append(response)
|
||||||
|
continue
|
||||||
|
assert response["success"]
|
||||||
|
acks += 1
|
||||||
|
|
||||||
|
assert acks == len(supervisor_events)
|
||||||
|
assert len(events) == len(expected_manager_events)
|
||||||
|
|
||||||
|
for i, event in enumerate(events):
|
||||||
|
assert event["event"] == {
|
||||||
|
"manager_state": "create_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": expected_manager_events[i],
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "create_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": "upload_to_agents",
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "create_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": None,
|
||||||
|
"state": "completed",
|
||||||
|
}
|
||||||
|
|
||||||
|
supervisor_client.backups.download_backup.assert_not_called()
|
||||||
|
supervisor_client.backups.remove_backup.assert_not_called()
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_create_job_done(
|
async def test_reader_writer_create_job_done(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -1293,7 +1403,7 @@ async def test_reader_writer_create_per_agent_encryption(
|
|||||||
assert response["event"] == {"manager_state": "idle"}
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("side_effect", "error_code", "error_message", "expected_reason"),
|
("side_effect", "error_code", "error_message", "expected_reason"),
|
||||||
[
|
[
|
||||||
@ -1388,7 +1498,7 @@ async def test_reader_writer_create_partial_backup_error(
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_create_missing_reference_error(
|
async def test_reader_writer_create_missing_reference_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -1447,7 +1557,7 @@ async def test_reader_writer_create_missing_reference_error(
|
|||||||
assert response["event"] == {"manager_state": "idle"}
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
|
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("method", "download_call_count", "remove_call_count"),
|
("method", "download_call_count", "remove_call_count"),
|
||||||
@ -1541,7 +1651,7 @@ async def test_reader_writer_create_download_remove_error(
|
|||||||
assert response["event"] == {"manager_state": "idle"}
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
|
@pytest.mark.parametrize("exception", [SupervisorError("Boom!"), Exception("Boom!")])
|
||||||
async def test_reader_writer_create_info_error(
|
async def test_reader_writer_create_info_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -1618,7 +1728,7 @@ async def test_reader_writer_create_info_error(
|
|||||||
assert response["event"] == {"manager_state": "idle"}
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_create_remote_backup(
|
async def test_reader_writer_create_remote_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -1702,7 +1812,7 @@ async def test_reader_writer_create_remote_backup(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("extra_generate_options", "expected_error"),
|
("extra_generate_options", "expected_error"),
|
||||||
[
|
[
|
||||||
@ -1772,7 +1882,7 @@ async def test_reader_writer_create_wrong_parameters(
|
|||||||
supervisor_client.backups.partial_backup.assert_not_called()
|
supervisor_client.backups.partial_backup.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_agent_receive_remote_backup(
|
async def test_agent_receive_remote_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_client: ClientSessionGenerator,
|
hass_client: ClientSessionGenerator,
|
||||||
@ -1848,7 +1958,7 @@ async def test_agent_receive_remote_backup(
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_restore(
|
async def test_reader_writer_restore(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -1915,6 +2025,109 @@ async def test_reader_writer_restore(
|
|||||||
assert response["result"] is None
|
assert response["result"] is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
|
async def test_reader_writer_restore_report_progress(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
supervisor_client: AsyncMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test restoring a backup."""
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
supervisor_client.backups.partial_restore.return_value.job_id = TEST_JOB_ID
|
||||||
|
supervisor_client.backups.list.return_value = [TEST_BACKUP]
|
||||||
|
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
|
||||||
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
|
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "idle",
|
||||||
|
}
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": None,
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
supervisor_client.backups.partial_restore.assert_called_once_with(
|
||||||
|
"abc123",
|
||||||
|
supervisor_backups.PartialRestoreOptions(
|
||||||
|
addons=None,
|
||||||
|
background=True,
|
||||||
|
folders=None,
|
||||||
|
homeassistant=True,
|
||||||
|
location=None,
|
||||||
|
password=None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
|
||||||
|
supervisor_events = [
|
||||||
|
supervisor_event_base | {"done": False, "stage": "addon_repositories"},
|
||||||
|
supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "home_assistant"},
|
||||||
|
supervisor_event_base | {"done": True, "stage": "addons"},
|
||||||
|
]
|
||||||
|
expected_manager_events = [
|
||||||
|
"addon_repositories",
|
||||||
|
"home_assistant",
|
||||||
|
"addons",
|
||||||
|
]
|
||||||
|
|
||||||
|
for supervisor_event in supervisor_events:
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "supervisor/event",
|
||||||
|
"data": {"event": "job", "data": supervisor_event},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
acks = 0
|
||||||
|
events = []
|
||||||
|
for _ in range(len(supervisor_events) + len(expected_manager_events)):
|
||||||
|
response = await client.receive_json()
|
||||||
|
if "event" in response:
|
||||||
|
events.append(response)
|
||||||
|
continue
|
||||||
|
assert response["success"]
|
||||||
|
acks += 1
|
||||||
|
|
||||||
|
assert acks == len(supervisor_events)
|
||||||
|
assert len(events) == len(expected_manager_events)
|
||||||
|
|
||||||
|
for i, event in enumerate(events):
|
||||||
|
assert event["event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": expected_manager_events[i],
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": None,
|
||||||
|
"state": "completed",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("supervisor_error_string", "expected_error_code", "expected_reason"),
|
("supervisor_error_string", "expected_error_code", "expected_reason"),
|
||||||
[
|
[
|
||||||
@ -1926,7 +2139,7 @@ async def test_reader_writer_restore(
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_restore_error(
|
async def test_reader_writer_restore_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -1987,7 +2200,7 @@ async def test_reader_writer_restore_error(
|
|||||||
assert response["error"]["code"] == expected_error_code
|
assert response["error"]["code"] == expected_error_code
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_restore_late_error(
|
async def test_reader_writer_restore_late_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -2101,7 +2314,7 @@ async def test_reader_writer_restore_late_error(
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.usefixtures("hassio_client", "setup_integration")
|
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
|
||||||
async def test_reader_writer_restore_wrong_parameters(
|
async def test_reader_writer_restore_wrong_parameters(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -2138,7 +2351,7 @@ async def test_reader_writer_restore_wrong_parameters(
|
|||||||
TEST_JOB_DONE,
|
TEST_JOB_DONE,
|
||||||
{
|
{
|
||||||
"manager_state": "restore_backup",
|
"manager_state": "restore_backup",
|
||||||
"reason": "",
|
"reason": None,
|
||||||
"stage": None,
|
"stage": None,
|
||||||
"state": "completed",
|
"state": "completed",
|
||||||
},
|
},
|
||||||
@ -2179,6 +2392,88 @@ async def test_restore_progress_after_restart(
|
|||||||
assert response["result"]["state"] == "idle"
|
assert response["result"]["state"] == "idle"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("hassio_client")
|
||||||
|
async def test_restore_progress_after_restart_report_progress(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
supervisor_client: AsyncMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test restore backup progress after restart."""
|
||||||
|
|
||||||
|
supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE
|
||||||
|
|
||||||
|
with patch.dict(os.environ, MOCK_ENVIRON | {RESTORE_JOB_ID_ENV: TEST_JOB_ID}):
|
||||||
|
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
|
||||||
|
await client.send_json_auto_id({"type": "backup/subscribe_events"})
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": None,
|
||||||
|
"state": "in_progress",
|
||||||
|
}
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
|
||||||
|
supervisor_event_base = {"uuid": TEST_JOB_ID, "reference": "test_slug"}
|
||||||
|
supervisor_events = [
|
||||||
|
supervisor_event_base | {"done": False, "stage": "addon_repositories"},
|
||||||
|
supervisor_event_base | {"done": False, "stage": None}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "unknown"}, # Will be skipped
|
||||||
|
supervisor_event_base | {"done": False, "stage": "home_assistant"},
|
||||||
|
supervisor_event_base | {"done": True, "stage": "addons"},
|
||||||
|
]
|
||||||
|
expected_manager_events = ["addon_repositories", "home_assistant", "addons"]
|
||||||
|
expected_manager_states = ["in_progress", "in_progress", "completed"]
|
||||||
|
|
||||||
|
for supervisor_event in supervisor_events:
|
||||||
|
await client.send_json_auto_id(
|
||||||
|
{
|
||||||
|
"type": "supervisor/event",
|
||||||
|
"data": {"event": "job", "data": supervisor_event},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
acks = 0
|
||||||
|
events = []
|
||||||
|
for _ in range(len(supervisor_events) + len(expected_manager_events)):
|
||||||
|
response = await client.receive_json()
|
||||||
|
if "event" in response:
|
||||||
|
events.append(response)
|
||||||
|
continue
|
||||||
|
assert response["success"]
|
||||||
|
acks += 1
|
||||||
|
|
||||||
|
assert acks == len(supervisor_events)
|
||||||
|
assert len(events) == len(expected_manager_events)
|
||||||
|
|
||||||
|
for i, event in enumerate(events):
|
||||||
|
assert event["event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": expected_manager_events[i],
|
||||||
|
"state": expected_manager_states[i],
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["event"] == {"manager_state": "idle"}
|
||||||
|
|
||||||
|
await client.send_json_auto_id({"type": "backup/info"})
|
||||||
|
response = await client.receive_json()
|
||||||
|
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"]["last_non_idle_event"] == {
|
||||||
|
"manager_state": "restore_backup",
|
||||||
|
"reason": None,
|
||||||
|
"stage": "addons",
|
||||||
|
"state": "completed",
|
||||||
|
}
|
||||||
|
assert response["result"]["state"] == "idle"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("hassio_client")
|
@pytest.mark.usefixtures("hassio_client")
|
||||||
async def test_restore_progress_after_restart_unknown_job(
|
async def test_restore_progress_after_restart_unknown_job(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
@ -160,7 +160,7 @@ def mock_config_entry_v2() -> MockConfigEntry:
|
|||||||
CONF_IP_ADDRESS: "127.0.0.1",
|
CONF_IP_ADDRESS: "127.0.0.1",
|
||||||
CONF_TOKEN: "00112233445566778899ABCDEFABCDEF",
|
CONF_TOKEN: "00112233445566778899ABCDEFABCDEF",
|
||||||
},
|
},
|
||||||
unique_id="HWE-P1_5c2fafabcdef",
|
unique_id="HWE-BAT_5c2fafabcdef",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import pytest
|
|||||||
|
|
||||||
from homeassistant.components.homewizard.const import DOMAIN
|
from homeassistant.components.homewizard.const import DOMAIN
|
||||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||||
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_TOKEN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||||
@ -52,6 +53,36 @@ async def test_load_unload_v2(
|
|||||||
assert mock_config_entry_v2.state is ConfigEntryState.NOT_LOADED
|
assert mock_config_entry_v2.state is ConfigEntryState.NOT_LOADED
|
||||||
|
|
||||||
|
|
||||||
|
async def test_load_unload_v2_as_v1(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_homewizardenergy: MagicMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test loading and unloading of integration with v2 config, but without using it."""
|
||||||
|
|
||||||
|
# Simulate v2 config but as a P1 Meter
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
title="Device",
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={
|
||||||
|
CONF_IP_ADDRESS: "127.0.0.1",
|
||||||
|
CONF_TOKEN: "00112233445566778899ABCDEFABCDEF",
|
||||||
|
},
|
||||||
|
unique_id="HWE-P1_5c2fafabcdef",
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||||
|
assert len(mock_homewizardenergy.combined.mock_calls) == 1
|
||||||
|
|
||||||
|
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
||||||
|
|
||||||
|
|
||||||
async def test_load_failed_host_unavailable(
|
async def test_load_failed_host_unavailable(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
|
@ -36,6 +36,10 @@ async def test_repair_acquires_token(
|
|||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
mock_config_entry, unique_id="HWE-BAT_5c2fafabcdef"
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch("homeassistant.components.homewizard.has_v2_api", return_value=True):
|
with patch("homeassistant.components.homewizard.has_v2_api", return_value=True):
|
||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
@ -392,7 +392,7 @@ async def test_light_availability(
|
|||||||
assert test_light is not None
|
assert test_light is not None
|
||||||
assert test_light.state == "on"
|
assert test_light.state == "on"
|
||||||
|
|
||||||
# Change availability by modififying the zigbee_connectivity status
|
# Change availability by modifying the zigbee_connectivity status
|
||||||
for status in ("connectivity_issue", "disconnected", "connected"):
|
for status in ("connectivity_issue", "disconnected", "connected"):
|
||||||
mock_bridge_v2.api.emit_event(
|
mock_bridge_v2.api.emit_event(
|
||||||
"update",
|
"update",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from collections.abc import AsyncGenerator
|
from collections.abc import AsyncGenerator
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import ANY, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -106,6 +106,7 @@ async def test_agents_list_backups(
|
|||||||
"backup_id": "abc123",
|
"backup_id": "abc123",
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"date": "1970-01-01T00:00:00Z",
|
"date": "1970-01-01T00:00:00Z",
|
||||||
|
"extra_metadata": {},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": ["media", "share"],
|
"folders": ["media", "share"],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
@ -187,6 +188,7 @@ async def test_agents_upload(
|
|||||||
"backup_id": "test-backup",
|
"backup_id": "test-backup",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
"date": "1970-01-01T00:00:00.000Z",
|
"date": "1970-01-01T00:00:00.000Z",
|
||||||
|
"extra_metadata": {"instance_id": ANY, "with_automatic_settings": False},
|
||||||
"failed_agent_ids": [],
|
"failed_agent_ids": [],
|
||||||
"folders": ["media", "share"],
|
"folders": ["media", "share"],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -52,7 +52,7 @@ async def test_canceling_debouncer_on_shutdown(
|
|||||||
assert not mock_debouncer.is_set()
|
assert not mock_debouncer.is_set()
|
||||||
mqtt_client_mock.subscribe.assert_not_called()
|
mqtt_client_mock.subscribe.assert_not_called()
|
||||||
|
|
||||||
# Note thet the broker connection will not be disconnected gracefully
|
# Note that the broker connection will not be disconnected gracefully
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
async_fire_time_changed(hass, utcnow() + timedelta(seconds=5))
|
async_fire_time_changed(hass, utcnow() + timedelta(seconds=5))
|
||||||
await asyncio.sleep(0)
|
await asyncio.sleep(0)
|
||||||
|
@ -19,6 +19,10 @@
|
|||||||
'backup_id': 'abc123',
|
'backup_id': 'abc123',
|
||||||
'database_included': True,
|
'database_included': True,
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'abc123',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
@ -42,6 +46,10 @@
|
|||||||
'backup_id': 'def456',
|
'backup_id': 'def456',
|
||||||
'database_included': False,
|
'database_included': False,
|
||||||
'date': '1980-01-01T00:00:00.000Z',
|
'date': '1980-01-01T00:00:00.000Z',
|
||||||
|
'extra_metadata': dict({
|
||||||
|
'instance_id': 'unknown_uuid',
|
||||||
|
'with_automatic_settings': True,
|
||||||
|
}),
|
||||||
'failed_agent_ids': list([
|
'failed_agent_ids': list([
|
||||||
]),
|
]),
|
||||||
'folders': list([
|
'folders': list([
|
||||||
|
@ -88,6 +88,7 @@ async def test_agents_list_backups(
|
|||||||
"backup_id": "23e64aec",
|
"backup_id": "23e64aec",
|
||||||
"date": "2024-11-22T11:48:48.727189+01:00",
|
"date": "2024-11-22T11:48:48.727189+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2024.12.0.dev0",
|
"homeassistant_version": "2024.12.0.dev0",
|
||||||
@ -123,6 +124,7 @@ async def test_agents_get_backup(
|
|||||||
"backup_id": "23e64aec",
|
"backup_id": "23e64aec",
|
||||||
"date": "2024-11-22T11:48:48.727189+01:00",
|
"date": "2024-11-22T11:48:48.727189+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2024.12.0.dev0",
|
"homeassistant_version": "2024.12.0.dev0",
|
||||||
|
@ -436,9 +436,9 @@ async def test_camera_webrtc(
|
|||||||
assert response
|
assert response
|
||||||
assert response.get("success") is False
|
assert response.get("success") is False
|
||||||
assert response["error"]["code"] == "home_assistant_error"
|
assert response["error"]["code"] == "home_assistant_error"
|
||||||
msg = "The sdp_m_line_index is required for ring webrtc streaming"
|
error_msg = f"Error negotiating stream for {front_camera_mock.name}"
|
||||||
assert msg in response["error"].get("message")
|
assert error_msg in response["error"].get("message")
|
||||||
assert msg in caplog.text
|
assert error_msg in caplog.text
|
||||||
front_camera_mock.on_webrtc_candidate.assert_called_once()
|
front_camera_mock.on_webrtc_candidate.assert_called_once()
|
||||||
|
|
||||||
# Answer message
|
# Answer message
|
||||||
|
@ -16,7 +16,7 @@ from homeassistant.components.ring.const import (
|
|||||||
CONF_LISTEN_CREDENTIALS,
|
CONF_LISTEN_CREDENTIALS,
|
||||||
SCAN_INTERVAL,
|
SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
from homeassistant.components.ring.coordinator import RingEventListener
|
from homeassistant.components.ring.coordinator import RingConfigEntry, RingEventListener
|
||||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||||
from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME
|
from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@ -80,12 +80,12 @@ async def test_auth_failed_on_setup(
|
|||||||
("error_type", "log_msg"),
|
("error_type", "log_msg"),
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
RingTimeout,
|
RingTimeout("Some internal error info"),
|
||||||
"Timeout communicating with API: ",
|
"Timeout communicating with Ring API",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
RingError,
|
RingError("Some internal error info"),
|
||||||
"Error communicating with API: ",
|
"Error communicating with Ring API",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
ids=["timeout-error", "other-error"],
|
ids=["timeout-error", "other-error"],
|
||||||
@ -95,6 +95,7 @@ async def test_error_on_setup(
|
|||||||
mock_ring_client,
|
mock_ring_client,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
error_type,
|
error_type,
|
||||||
log_msg,
|
log_msg,
|
||||||
) -> None:
|
) -> None:
|
||||||
@ -166,11 +167,11 @@ async def test_auth_failure_on_device_update(
|
|||||||
[
|
[
|
||||||
(
|
(
|
||||||
RingTimeout,
|
RingTimeout,
|
||||||
"Error fetching devices data: Timeout communicating with API: ",
|
"Error fetching devices data: Timeout communicating with Ring API",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
RingError,
|
RingError,
|
||||||
"Error fetching devices data: Error communicating with API: ",
|
"Error fetching devices data: Error communicating with Ring API",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
ids=["timeout-error", "other-error"],
|
ids=["timeout-error", "other-error"],
|
||||||
@ -178,7 +179,7 @@ async def test_auth_failure_on_device_update(
|
|||||||
async def test_error_on_global_update(
|
async def test_error_on_global_update(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_ring_client,
|
mock_ring_client,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: RingConfigEntry,
|
||||||
freezer: FrozenDateTimeFactory,
|
freezer: FrozenDateTimeFactory,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
error_type,
|
error_type,
|
||||||
@ -189,15 +190,35 @@ async def test_error_on_global_update(
|
|||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
mock_ring_client.async_update_devices.side_effect = error_type
|
coordinator = mock_config_entry.runtime_data.devices_coordinator
|
||||||
|
assert coordinator
|
||||||
|
|
||||||
freezer.tick(SCAN_INTERVAL)
|
with patch.object(
|
||||||
async_fire_time_changed(hass)
|
coordinator, "_async_update_data", wraps=coordinator._async_update_data
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
) as refresh_spy:
|
||||||
|
error = error_type("Some internal error info 1")
|
||||||
|
mock_ring_client.async_update_devices.side_effect = error
|
||||||
|
|
||||||
assert log_msg in caplog.text
|
freezer.tick(SCAN_INTERVAL * 2)
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
assert hass.config_entries.async_get_entry(mock_config_entry.entry_id)
|
refresh_spy.assert_called()
|
||||||
|
assert coordinator.last_exception.__cause__ == error
|
||||||
|
assert log_msg in caplog.text
|
||||||
|
|
||||||
|
# Check log is not being spammed.
|
||||||
|
refresh_spy.reset_mock()
|
||||||
|
error2 = error_type("Some internal error info 2")
|
||||||
|
caplog.clear()
|
||||||
|
mock_ring_client.async_update_devices.side_effect = error2
|
||||||
|
freezer.tick(SCAN_INTERVAL * 2)
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
refresh_spy.assert_called()
|
||||||
|
assert coordinator.last_exception.__cause__ == error2
|
||||||
|
assert log_msg not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@ -205,11 +226,11 @@ async def test_error_on_global_update(
|
|||||||
[
|
[
|
||||||
(
|
(
|
||||||
RingTimeout,
|
RingTimeout,
|
||||||
"Error fetching devices data: Timeout communicating with API for device Front: ",
|
"Error fetching devices data: Timeout communicating with Ring API",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
RingError,
|
RingError,
|
||||||
"Error fetching devices data: Error communicating with API for device Front: ",
|
"Error fetching devices data: Error communicating with Ring API",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
ids=["timeout-error", "other-error"],
|
ids=["timeout-error", "other-error"],
|
||||||
@ -218,7 +239,7 @@ async def test_error_on_device_update(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_ring_client,
|
mock_ring_client,
|
||||||
mock_ring_devices,
|
mock_ring_devices,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: RingConfigEntry,
|
||||||
freezer: FrozenDateTimeFactory,
|
freezer: FrozenDateTimeFactory,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
error_type,
|
error_type,
|
||||||
@ -229,15 +250,36 @@ async def test_error_on_device_update(
|
|||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
front_door_doorbell = mock_ring_devices.get_device(765432)
|
coordinator = mock_config_entry.runtime_data.devices_coordinator
|
||||||
front_door_doorbell.async_history.side_effect = error_type
|
assert coordinator
|
||||||
|
|
||||||
freezer.tick(SCAN_INTERVAL)
|
with patch.object(
|
||||||
async_fire_time_changed(hass)
|
coordinator, "_async_update_data", wraps=coordinator._async_update_data
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
) as refresh_spy:
|
||||||
|
error = error_type("Some internal error info 1")
|
||||||
|
front_door_doorbell = mock_ring_devices.get_device(765432)
|
||||||
|
front_door_doorbell.async_history.side_effect = error
|
||||||
|
|
||||||
assert log_msg in caplog.text
|
freezer.tick(SCAN_INTERVAL * 2)
|
||||||
assert hass.config_entries.async_get_entry(mock_config_entry.entry_id)
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
refresh_spy.assert_called()
|
||||||
|
assert coordinator.last_exception.__cause__ == error
|
||||||
|
assert log_msg in caplog.text
|
||||||
|
|
||||||
|
# Check log is not being spammed.
|
||||||
|
error2 = error_type("Some internal error info 2")
|
||||||
|
front_door_doorbell.async_history.side_effect = error2
|
||||||
|
refresh_spy.reset_mock()
|
||||||
|
caplog.clear()
|
||||||
|
freezer.tick(SCAN_INTERVAL * 2)
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
refresh_spy.assert_called()
|
||||||
|
assert coordinator.last_exception.__cause__ == error2
|
||||||
|
assert log_msg not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder
|
from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder
|
||||||
@ -299,6 +299,7 @@ async def test_agents_list_backups(
|
|||||||
"backup_id": "abcd12ef",
|
"backup_id": "abcd12ef",
|
||||||
"date": "2025-01-09T20:14:35.457323+01:00",
|
"date": "2025-01-09T20:14:35.457323+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {"instance_id": ANY, "with_automatic_settings": True},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2025.2.0.dev0",
|
"homeassistant_version": "2025.2.0.dev0",
|
||||||
@ -369,6 +370,7 @@ async def test_agents_list_backups_disabled_filestation(
|
|||||||
"backup_id": "abcd12ef",
|
"backup_id": "abcd12ef",
|
||||||
"date": "2025-01-09T20:14:35.457323+01:00",
|
"date": "2025-01-09T20:14:35.457323+01:00",
|
||||||
"database_included": True,
|
"database_included": True,
|
||||||
|
"extra_metadata": {"instance_id": ANY, "with_automatic_settings": True},
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"homeassistant_included": True,
|
"homeassistant_included": True,
|
||||||
"homeassistant_version": "2025.2.0.dev0",
|
"homeassistant_version": "2025.2.0.dev0",
|
||||||
@ -673,7 +675,11 @@ async def test_agents_delete_not_existing(
|
|||||||
backup_id = "ef34ab12"
|
backup_id = "ef34ab12"
|
||||||
|
|
||||||
setup_dsm_with_filestation.file.delete_file = AsyncMock(
|
setup_dsm_with_filestation.file.delete_file = AsyncMock(
|
||||||
side_effect=SynologyDSMAPIErrorException("api", "404", "not found")
|
side_effect=SynologyDSMAPIErrorException(
|
||||||
|
"api",
|
||||||
|
"900",
|
||||||
|
[{"code": 408, "path": f"/ha_backup/my_backup_path/{backup_id}.tar"}],
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
await client.send_json_auto_id(
|
await client.send_json_auto_id(
|
||||||
@ -685,26 +691,40 @@ async def test_agents_delete_not_existing(
|
|||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
|
|
||||||
assert response["success"]
|
assert response["success"]
|
||||||
assert response["result"] == {
|
assert response["result"] == {"agent_errors": {}}
|
||||||
"agent_errors": {
|
|
||||||
"synology_dsm.mocked_syno_dsm_entry": "Failed to delete the backup"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("error", "expected_log"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
SynologyDSMAPIErrorException("api", "100", "Unknown error"),
|
||||||
|
"{'api': 'api', 'code': '100', 'reason': 'Unknown', 'details': 'Unknown error'}",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
SynologyDSMAPIErrorException("api", "900", [{"code": 407}]),
|
||||||
|
"{'api': 'api', 'code': '900', 'reason': 'Unknown', 'details': [{'code': 407}]",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
SynologyDSMAPIErrorException("api", "900", [{"code": 417}]),
|
||||||
|
"{'api': 'api', 'code': '900', 'reason': 'Unknown', 'details': [{'code': 417}]",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
async def test_agents_delete_error(
|
async def test_agents_delete_error(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
setup_dsm_with_filestation: MagicMock,
|
setup_dsm_with_filestation: MagicMock,
|
||||||
|
error: SynologyDSMAPIErrorException,
|
||||||
|
expected_log: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test error while delete backup."""
|
"""Test error while delete backup."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
|
|
||||||
# error while delete
|
# error while delete
|
||||||
backup_id = "abcd12ef"
|
backup_id = "abcd12ef"
|
||||||
setup_dsm_with_filestation.file.delete_file.side_effect = (
|
setup_dsm_with_filestation.file.delete_file.side_effect = error
|
||||||
SynologyDSMAPIErrorException("api", "404", "not found")
|
|
||||||
)
|
|
||||||
await client.send_json_auto_id(
|
await client.send_json_auto_id(
|
||||||
{
|
{
|
||||||
"type": "backup/delete",
|
"type": "backup/delete",
|
||||||
@ -716,9 +736,10 @@ async def test_agents_delete_error(
|
|||||||
assert response["success"]
|
assert response["success"]
|
||||||
assert response["result"] == {
|
assert response["result"] == {
|
||||||
"agent_errors": {
|
"agent_errors": {
|
||||||
"synology_dsm.mocked_syno_dsm_entry": "Failed to delete the backup"
|
"synology_dsm.mocked_syno_dsm_entry": "Failed to delete backup"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert f"Failed to delete backup: {expected_log}" in caplog.text
|
||||||
mock: AsyncMock = setup_dsm_with_filestation.file.delete_file
|
mock: AsyncMock = setup_dsm_with_filestation.file.delete_file
|
||||||
assert len(mock.mock_calls) == 1
|
assert len(mock.mock_calls) == 1
|
||||||
assert mock.call_args_list[0].kwargs["filename"] == "abcd12ef.tar"
|
assert mock.call_args_list[0].kwargs["filename"] == "abcd12ef.tar"
|
||||||
|
@ -174,7 +174,7 @@ def validate_common_camera_state(
|
|||||||
entity_id: str,
|
entity_id: str,
|
||||||
features: int = CameraEntityFeature.STREAM,
|
features: int = CameraEntityFeature.STREAM,
|
||||||
):
|
):
|
||||||
"""Validate state that is common to all camera entity, regradless of type."""
|
"""Validate state that is common to all camera entity, regardless of type."""
|
||||||
entity_state = hass.states.get(entity_id)
|
entity_state = hass.states.get(entity_id)
|
||||||
assert entity_state
|
assert entity_state
|
||||||
assert entity_state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION
|
assert entity_state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION
|
||||||
|
@ -10,9 +10,16 @@ from homeassistant.components.humidifier import (
|
|||||||
ATTR_HUMIDITY,
|
ATTR_HUMIDITY,
|
||||||
ATTR_MODE,
|
ATTR_MODE,
|
||||||
DOMAIN as HUMIDIFIER_DOMAIN,
|
DOMAIN as HUMIDIFIER_DOMAIN,
|
||||||
|
MODE_AUTO,
|
||||||
|
MODE_SLEEP,
|
||||||
SERVICE_SET_HUMIDITY,
|
SERVICE_SET_HUMIDITY,
|
||||||
SERVICE_SET_MODE,
|
SERVICE_SET_MODE,
|
||||||
)
|
)
|
||||||
|
from homeassistant.components.vesync.const import (
|
||||||
|
VS_HUMIDIFIER_MODE_AUTO,
|
||||||
|
VS_HUMIDIFIER_MODE_MANUAL,
|
||||||
|
VS_HUMIDIFIER_MODE_SLEEP,
|
||||||
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
@ -222,7 +229,7 @@ async def test_set_mode(
|
|||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
HUMIDIFIER_DOMAIN,
|
HUMIDIFIER_DOMAIN,
|
||||||
SERVICE_SET_MODE,
|
SERVICE_SET_MODE,
|
||||||
{ATTR_ENTITY_ID: ENTITY_HUMIDIFIER, ATTR_MODE: "auto"},
|
{ATTR_ENTITY_ID: ENTITY_HUMIDIFIER, ATTR_MODE: MODE_AUTO},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -285,3 +292,38 @@ async def test_valid_mist_modes(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert "Unknown mode 'auto'" not in caplog.text
|
assert "Unknown mode 'auto'" not in caplog.text
|
||||||
assert "Unknown mode 'manual'" not in caplog.text
|
assert "Unknown mode 'manual'" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
async def test_set_mode_sleep_turns_display_off(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
humidifier,
|
||||||
|
manager,
|
||||||
|
) -> None:
|
||||||
|
"""Test update of display for sleep mode."""
|
||||||
|
|
||||||
|
# First define valid mist modes
|
||||||
|
humidifier.mist_modes = [
|
||||||
|
VS_HUMIDIFIER_MODE_AUTO,
|
||||||
|
VS_HUMIDIFIER_MODE_MANUAL,
|
||||||
|
VS_HUMIDIFIER_MODE_SLEEP,
|
||||||
|
]
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.vesync.async_generate_device_list",
|
||||||
|
return_value=[humidifier],
|
||||||
|
):
|
||||||
|
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(humidifier, "set_humidity_mode", return_value=True),
|
||||||
|
patch.object(humidifier, "set_display") as display_mock,
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
HUMIDIFIER_DOMAIN,
|
||||||
|
SERVICE_SET_MODE,
|
||||||
|
{ATTR_ENTITY_ID: ENTITY_HUMIDIFIER, ATTR_MODE: MODE_SLEEP},
|
||||||
|
blocking=True,
|
||||||
|
)
|
||||||
|
display_mock.assert_called_once_with(False)
|
||||||
|
@ -4,6 +4,7 @@ import asyncio
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from unittest.mock import AsyncMock, Mock
|
from unittest.mock import AsyncMock, Mock
|
||||||
|
import weakref
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -529,3 +530,37 @@ async def test_background(
|
|||||||
async_fire_time_changed(hass, utcnow() + timedelta(seconds=1))
|
async_fire_time_changed(hass, utcnow() + timedelta(seconds=1))
|
||||||
await hass.async_block_till_done(wait_background_tasks=False)
|
await hass.async_block_till_done(wait_background_tasks=False)
|
||||||
assert len(calls) == 2
|
assert len(calls) == 2
|
||||||
|
|
||||||
|
|
||||||
|
async def test_shutdown_releases_parent_class(hass: HomeAssistant) -> None:
|
||||||
|
"""Test shutdown releases parent class.
|
||||||
|
|
||||||
|
See https://github.com/home-assistant/core/issues/137237
|
||||||
|
"""
|
||||||
|
calls = []
|
||||||
|
|
||||||
|
class SomeClass:
|
||||||
|
def run_func(self) -> None:
|
||||||
|
calls.append(None)
|
||||||
|
|
||||||
|
my_class = SomeClass()
|
||||||
|
my_class_weak_ref = weakref.ref(my_class)
|
||||||
|
|
||||||
|
debouncer = debounce.Debouncer(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
cooldown=0.01,
|
||||||
|
immediate=True,
|
||||||
|
function=my_class.run_func,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Debouncer keeps a reference to the function, prevening GC
|
||||||
|
del my_class
|
||||||
|
await debouncer.async_call()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert len(calls) == 1
|
||||||
|
assert my_class_weak_ref() is not None
|
||||||
|
|
||||||
|
# Debouncer shutdown releases the class
|
||||||
|
debouncer.async_shutdown()
|
||||||
|
assert my_class_weak_ref() is None
|
||||||
|
@ -134,8 +134,10 @@ async def test_create_eager_task_312(hass: HomeAssistant) -> None:
|
|||||||
async def test_create_eager_task_from_thread(hass: HomeAssistant) -> None:
|
async def test_create_eager_task_from_thread(hass: HomeAssistant) -> None:
|
||||||
"""Test we report trying to create an eager task from a thread."""
|
"""Test we report trying to create an eager task from a thread."""
|
||||||
|
|
||||||
|
coro = asyncio.sleep(0)
|
||||||
|
|
||||||
def create_task():
|
def create_task():
|
||||||
hasync.create_eager_task(asyncio.sleep(0))
|
hasync.create_eager_task(coro)
|
||||||
|
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
RuntimeError,
|
RuntimeError,
|
||||||
@ -145,14 +147,19 @@ async def test_create_eager_task_from_thread(hass: HomeAssistant) -> None:
|
|||||||
):
|
):
|
||||||
await hass.async_add_executor_job(create_task)
|
await hass.async_add_executor_job(create_task)
|
||||||
|
|
||||||
|
# Avoid `RuntimeWarning: coroutine 'sleep' was never awaited`
|
||||||
|
await coro
|
||||||
|
|
||||||
|
|
||||||
async def test_create_eager_task_from_thread_in_integration(
|
async def test_create_eager_task_from_thread_in_integration(
|
||||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test we report trying to create an eager task from a thread."""
|
"""Test we report trying to create an eager task from a thread."""
|
||||||
|
|
||||||
|
coro = asyncio.sleep(0)
|
||||||
|
|
||||||
def create_task():
|
def create_task():
|
||||||
hasync.create_eager_task(asyncio.sleep(0))
|
hasync.create_eager_task(coro)
|
||||||
|
|
||||||
frames = extract_stack_to_frame(
|
frames = extract_stack_to_frame(
|
||||||
[
|
[
|
||||||
@ -200,6 +207,9 @@ async def test_create_eager_task_from_thread_in_integration(
|
|||||||
"self.light.is_on"
|
"self.light.is_on"
|
||||||
) in caplog.text
|
) in caplog.text
|
||||||
|
|
||||||
|
# Avoid `RuntimeWarning: coroutine 'sleep' was never awaited`
|
||||||
|
await coro
|
||||||
|
|
||||||
|
|
||||||
async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None:
|
async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None:
|
||||||
"""Test get_scheduled_timer_handles returns all scheduled timer handles."""
|
"""Test get_scheduled_timer_handles returns all scheduled timer handles."""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user