mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 20:09:35 +00:00
Compare commits
225 Commits
add-num-op
...
aioesphome
Author | SHA1 | Date | |
---|---|---|---|
![]() |
18f8b28904 | ||
![]() |
3bac6b86df | ||
![]() |
20293e2a11 | ||
![]() |
15cc28e6c1 | ||
![]() |
874ca1323b | ||
![]() |
ca186925af | ||
![]() |
2ab051b716 | ||
![]() |
a2a726de34 | ||
![]() |
5d543d2185 | ||
![]() |
a78c909b34 | ||
![]() |
f00ab80d17 | ||
![]() |
014881d985 | ||
![]() |
29a42a8e58 | ||
![]() |
3f70084d7f | ||
![]() |
b1ae9c95c9 | ||
![]() |
8be79ecdb0 | ||
![]() |
f6b8aa893b | ||
![]() |
c867026bdd | ||
![]() |
da3a164e66 | ||
![]() |
32688e1108 | ||
![]() |
4305ea9b4c | ||
![]() |
61153ec456 | ||
![]() |
9e4a2d5fa9 | ||
![]() |
72e608918b | ||
![]() |
86db60c442 | ||
![]() |
25806615a9 | ||
![]() |
a0f67381e5 | ||
![]() |
90bfadda9b | ||
![]() |
0f8e700965 | ||
![]() |
21d4ed2837 | ||
![]() |
ce363b3835 | ||
![]() |
dd3e6b8df5 | ||
![]() |
abbf8390ac | ||
![]() |
689039959c | ||
![]() |
52c25cfc88 | ||
![]() |
00b2017767 | ||
![]() |
dd7f7be6ad | ||
![]() |
22709506c6 | ||
![]() |
f0c0492375 | ||
![]() |
58459cb80f | ||
![]() |
a19e378447 | ||
![]() |
38a5a3ed4b | ||
![]() |
e76bed4a83 | ||
![]() |
d73309ba60 | ||
![]() |
19fdea024c | ||
![]() |
a3cfd7f707 | ||
![]() |
3dd941eff7 | ||
![]() |
d389141aee | ||
![]() |
3c542b8d43 | ||
![]() |
2367df89d9 | ||
![]() |
7bfdfb3fc7 | ||
![]() |
485916265a | ||
![]() |
1bb3c96fc1 | ||
![]() |
4eaf6784af | ||
![]() |
7b7265a6b0 | ||
![]() |
9059e3dadc | ||
![]() |
d9d42b3ad5 | ||
![]() |
d565fb3cb4 | ||
![]() |
6e93e480d1 | ||
![]() |
5a3570702d | ||
![]() |
b26b1df143 | ||
![]() |
fdbff76733 | ||
![]() |
018d59a892 | ||
![]() |
4b6dd0eb8f | ||
![]() |
b7db87bd3d | ||
![]() |
86dc453c55 | ||
![]() |
a4f2c88c7f | ||
![]() |
3cdb894e61 | ||
![]() |
cb837aaae5 | ||
![]() |
82443ded34 | ||
![]() |
71cc3b7fcd | ||
![]() |
e5658f9747 | ||
![]() |
868ded141f | ||
![]() |
1151fa698d | ||
![]() |
2796d6110a | ||
![]() |
844b97bd32 | ||
![]() |
286b2500bd | ||
![]() |
4b7746ab51 | ||
![]() |
ca1c366f4f | ||
![]() |
de42ac14ac | ||
![]() |
7f7bd5a97f | ||
![]() |
8a70a1badb | ||
![]() |
181741cab6 | ||
![]() |
1e14fb6dab | ||
![]() |
2b6a125927 | ||
![]() |
e61ad10708 | ||
![]() |
5177f9e8c2 | ||
![]() |
850aeeb5eb | ||
![]() |
a1b9061060 | ||
![]() |
0ec1f27489 | ||
![]() |
befc93bc73 | ||
![]() |
1526d953bf | ||
![]() |
d38082a5c8 | ||
![]() |
42850421d2 | ||
![]() |
21a835c4b4 | ||
![]() |
e9294dbf72 | ||
![]() |
5c4dfbff1b | ||
![]() |
abe628506d | ||
![]() |
12cc0ed18d | ||
![]() |
8ca7562390 | ||
![]() |
942f7eebb1 | ||
![]() |
1a167e6aee | ||
![]() |
9531ae10f2 | ||
![]() |
bfc9616abf | ||
![]() |
054a5d751a | ||
![]() |
a43ba4f966 | ||
![]() |
1a5cae125f | ||
![]() |
f3b9bda876 | ||
![]() |
3f3aaa2815 | ||
![]() |
6dc7870779 | ||
![]() |
be83416c72 | ||
![]() |
c745ee18eb | ||
![]() |
cf907ae196 | ||
![]() |
8eee53036a | ||
![]() |
b37237d24b | ||
![]() |
950e758b62 | ||
![]() |
9cd940b7df | ||
![]() |
10b186a20d | ||
![]() |
757aec1c6b | ||
![]() |
0b159bdb9c | ||
![]() |
8728312e87 | ||
![]() |
bbb67db354 | ||
![]() |
265f5da21a | ||
![]() |
54859e8a83 | ||
![]() |
c87dba878d | ||
![]() |
8d8e008123 | ||
![]() |
b30667a469 | ||
![]() |
8920c548d5 | ||
![]() |
eac719f9af | ||
![]() |
71c274cb91 | ||
![]() |
d4902361e6 | ||
![]() |
f63eee3889 | ||
![]() |
21bfe610d1 | ||
![]() |
21c174e895 | ||
![]() |
ec148e0459 | ||
![]() |
286763b998 | ||
![]() |
5f88122a2b | ||
![]() |
31968d16ab | ||
![]() |
c125554817 | ||
![]() |
10f2955d34 | ||
![]() |
55712b784c | ||
![]() |
fe3a929556 | ||
![]() |
534801e80d | ||
![]() |
8aeda5a0c0 | ||
![]() |
eb1cbbc75c | ||
![]() |
fa8a4d7098 | ||
![]() |
2623ebac4d | ||
![]() |
1746c51ce4 | ||
![]() |
8b984a2105 | ||
![]() |
ebee370a56 | ||
![]() |
dabd096587 | ||
![]() |
21399818af | ||
![]() |
4354214fbf | ||
![]() |
5bd39804f1 | ||
![]() |
6d3ad3ab9c | ||
![]() |
4c212bdcd4 | ||
![]() |
b91b39580f | ||
![]() |
472d70b6c9 | ||
![]() |
3499ed7a98 | ||
![]() |
2c809d5903 | ||
![]() |
40988198f3 | ||
![]() |
ab5d1d27f1 | ||
![]() |
1c10b85fed | ||
![]() |
91a7db08ff | ||
![]() |
a764d54123 | ||
![]() |
dc09e33556 | ||
![]() |
14173bd9ec | ||
![]() |
d2e7537629 | ||
![]() |
9a165a64fe | ||
![]() |
9c749a6abc | ||
![]() |
2e33222c71 | ||
![]() |
ab1c2c4f70 | ||
![]() |
529219ae69 | ||
![]() |
d6ce71fa61 | ||
![]() |
e5b67d513a | ||
![]() |
a547179f66 | ||
![]() |
8c61788a7d | ||
![]() |
6b934d94db | ||
![]() |
d30ad82774 | ||
![]() |
4618b33e93 | ||
![]() |
d6299094db | ||
![]() |
087d9d30c0 | ||
![]() |
f07890cf5c | ||
![]() |
e5b78cc481 | ||
![]() |
12b409d8e1 | ||
![]() |
def5408db8 | ||
![]() |
f105b45ee2 | ||
![]() |
9d904c30a7 | ||
![]() |
99b047939f | ||
![]() |
3a615908ee | ||
![]() |
baff541f46 | ||
![]() |
6d8c35cfe9 | ||
![]() |
b8d9883e74 | ||
![]() |
c3c65af450 | ||
![]() |
3af8616764 | ||
![]() |
64ec4609c5 | ||
![]() |
c78bc26b83 | ||
![]() |
0c093646c9 | ||
![]() |
1b27acdde0 | ||
![]() |
9dafc0e02f | ||
![]() |
0091dafcb0 | ||
![]() |
b387acffb7 | ||
![]() |
36b3133fa2 | ||
![]() |
fe01e96012 | ||
![]() |
0b56ec16ed | ||
![]() |
ca79f4c963 | ||
![]() |
9a43f2776d | ||
![]() |
0cda883b56 | ||
![]() |
ae58e633f0 | ||
![]() |
06480bfd9d | ||
![]() |
625f586945 | ||
![]() |
7dbeaa475d | ||
![]() |
dff3d5f8af | ||
![]() |
89c335919a | ||
![]() |
2bb4573357 | ||
![]() |
7037ce989c | ||
![]() |
bfdd2053ba | ||
![]() |
fcc3f92f8c | ||
![]() |
8710267d53 | ||
![]() |
85b6adcc9a | ||
![]() |
beec6e86e0 | ||
![]() |
3dacffaaf9 | ||
![]() |
d90f2a1de1 | ||
![]() |
b6c9217429 | ||
![]() |
7fc8da6769 |
@@ -142,6 +142,7 @@ homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.compit.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
|
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -107,8 +107,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ambient_station/ @bachya
|
||||
/tests/components/ambient_station/ @bachya
|
||||
/homeassistant/components/amcrest/ @flacjacket
|
||||
/homeassistant/components/analytics/ @home-assistant/core @ludeeus
|
||||
/tests/components/analytics/ @home-assistant/core @ludeeus
|
||||
/homeassistant/components/analytics/ @home-assistant/core
|
||||
/tests/components/analytics/ @home-assistant/core
|
||||
/homeassistant/components/analytics_insights/ @joostlek
|
||||
/tests/components/analytics_insights/ @joostlek
|
||||
/homeassistant/components/android_ip_webcam/ @engrbm87
|
||||
@@ -292,6 +292,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/command_line/ @gjohansson-ST
|
||||
/homeassistant/components/compensation/ @Petro31
|
||||
/tests/components/compensation/ @Petro31
|
||||
/homeassistant/components/compit/ @Przemko92
|
||||
/tests/components/compit/ @Przemko92
|
||||
/homeassistant/components/config/ @home-assistant/core
|
||||
/tests/components/config/ @home-assistant/core
|
||||
/homeassistant/components/configurator/ @home-assistant/core
|
||||
@@ -770,6 +772,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/iqvia/ @bachya
|
||||
/tests/components/iqvia/ @bachya
|
||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||
/homeassistant/components/irm_kmi/ @jdejaegh
|
||||
/tests/components/irm_kmi/ @jdejaegh
|
||||
/homeassistant/components/iron_os/ @tr4nt0r
|
||||
/tests/components/iron_os/ @tr4nt0r
|
||||
/homeassistant/components/isal/ @bdraco
|
||||
@@ -1727,8 +1731,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
/tests/components/volvo/ @thomasddn
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
/tests/components/volvooncall/ @molobrakos
|
||||
/homeassistant/components/volvooncall/ @molobrakos @svrooij
|
||||
/tests/components/volvooncall/ @molobrakos @svrooij
|
||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||
/tests/components/wake_on_lan/ @ntilley905
|
||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||
|
@@ -4,10 +4,13 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
from bleak import BleakScanner
|
||||
|
||||
from homeassistant.components.bluetooth import async_get_scanner
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -42,6 +45,7 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
scanner=cast(BleakScanner, async_get_scanner(hass)),
|
||||
)
|
||||
|
||||
@property
|
||||
|
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.14"]
|
||||
"requirements": ["aioacaia==0.1.17"]
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||
@@ -22,6 +23,8 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for AccuWeather."""
|
||||
|
||||
VERSION = 1
|
||||
_latitude: float | None = None
|
||||
_longitude: float | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -74,3 +77,46 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._latitude = entry_data[CONF_LATITUDE]
|
||||
self._longitude = entry_data[CONF_LONGITUDE]
|
||||
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
websession = async_get_clientsession(self.hass)
|
||||
try:
|
||||
async with timeout(10):
|
||||
accuweather = AccuWeather(
|
||||
user_input[CONF_API_KEY],
|
||||
websession,
|
||||
latitude=self._latitude,
|
||||
longitude=self._longitude,
|
||||
)
|
||||
await accuweather.async_get_location()
|
||||
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidApiKeyError:
|
||||
errors["base"] = "invalid_api_key"
|
||||
except RequestsExceededError:
|
||||
errors["base"] = "requests_exceeded"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -15,6 +15,7 @@ from aiohttp.client_exceptions import ClientConnectorError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
DataUpdateCoordinator,
|
||||
@@ -30,7 +31,7 @@ from .const import (
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, RequestsExceededError)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -52,6 +53,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
|
||||
config_entry: AccuWeatherConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -87,6 +90,12 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
translation_key="current_conditions_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
except InvalidApiKeyError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
translation_placeholders={"entry": self.config_entry.title},
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
@@ -98,6 +107,8 @@ class AccuWeatherForecastDataUpdateCoordinator(
|
||||
):
|
||||
"""Base class for AccuWeather forecast."""
|
||||
|
||||
config_entry: AccuWeatherConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -137,6 +148,12 @@ class AccuWeatherForecastDataUpdateCoordinator(
|
||||
translation_key="forecast_update_error",
|
||||
translation_placeholders={"error": repr(error)},
|
||||
) from error
|
||||
except InvalidApiKeyError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
translation_placeholders={"entry": self.config_entry.title},
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
return result
|
||||
|
@@ -7,6 +7,17 @@
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "API key generated in the AccuWeather APIs portal."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::accuweather::config::step::user::data_description::api_key%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -19,7 +30,8 @@
|
||||
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
@@ -239,6 +251,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed for {entry}, please update your API key"
|
||||
},
|
||||
"current_conditions_update_error": {
|
||||
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||
},
|
||||
|
@@ -41,7 +41,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
||||
return []
|
||||
|
||||
call_ids = await async_extract_entity_ids(hass, call)
|
||||
call_ids = await async_extract_entity_ids(call)
|
||||
entity_ids = []
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
||||
if entity_id not in call_ids:
|
||||
|
@@ -12,10 +12,25 @@ from homeassistant.helpers.event import async_call_later, async_track_time_inter
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .analytics import Analytics
|
||||
from .analytics import (
|
||||
Analytics,
|
||||
AnalyticsInput,
|
||||
AnalyticsModifications,
|
||||
DeviceAnalyticsModifications,
|
||||
EntityAnalyticsModifications,
|
||||
async_devices_payload,
|
||||
)
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
__all__ = [
|
||||
"AnalyticsInput",
|
||||
"AnalyticsModifications",
|
||||
"DeviceAnalyticsModifications",
|
||||
"EntityAnalyticsModifications",
|
||||
"async_devices_payload",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
@@ -4,9 +4,10 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass
|
||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from typing import Any, Protocol
|
||||
import uuid
|
||||
|
||||
import aiohttp
|
||||
@@ -35,11 +36,14 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.system_info import async_get_system_info
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
IntegrationNotFound,
|
||||
async_get_integration,
|
||||
async_get_integrations,
|
||||
)
|
||||
from homeassistant.setup import async_get_loaded_integrations
|
||||
@@ -75,12 +79,116 @@ from .const import (
|
||||
ATTR_USER_COUNT,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
|
||||
DATA_ANALYTICS_MODIFIERS = "analytics_modifiers"
|
||||
|
||||
type AnalyticsModifier = Callable[
|
||||
[HomeAssistant, AnalyticsInput], Awaitable[AnalyticsModifications]
|
||||
]
|
||||
|
||||
|
||||
@singleton(DATA_ANALYTICS_MODIFIERS)
|
||||
def _async_get_modifiers(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, AnalyticsModifier | None]:
|
||||
"""Return the analytics modifiers."""
|
||||
return {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsInput:
|
||||
"""Analytics input for a single integration.
|
||||
|
||||
This is sent to integrations that implement the platform.
|
||||
"""
|
||||
|
||||
device_ids: Iterable[str] = field(default_factory=list)
|
||||
entity_ids: Iterable[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsModifications:
|
||||
"""Analytics config for a single integration.
|
||||
|
||||
This is used by integrations that implement the platform.
|
||||
"""
|
||||
|
||||
remove: bool = False
|
||||
devices: Mapping[str, DeviceAnalyticsModifications] | None = None
|
||||
entities: Mapping[str, EntityAnalyticsModifications] | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeviceAnalyticsModifications:
|
||||
"""Analytics config for a single device.
|
||||
|
||||
This is used by integrations that implement the platform.
|
||||
"""
|
||||
|
||||
remove: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntityAnalyticsModifications:
|
||||
"""Analytics config for a single entity.
|
||||
|
||||
This is used by integrations that implement the platform.
|
||||
"""
|
||||
|
||||
remove: bool = False
|
||||
capabilities: dict[str, Any] | None | UndefinedType = UNDEFINED
|
||||
|
||||
|
||||
class AnalyticsPlatformProtocol(Protocol):
|
||||
"""Define the format of analytics platforms."""
|
||||
|
||||
async def async_modify_analytics(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
analytics_input: AnalyticsInput,
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
|
||||
|
||||
async def _async_get_analytics_platform(
|
||||
hass: HomeAssistant, domain: str
|
||||
) -> AnalyticsPlatformProtocol | None:
|
||||
"""Get analytics platform."""
|
||||
try:
|
||||
integration = await async_get_integration(hass, domain)
|
||||
except IntegrationNotFound:
|
||||
return None
|
||||
try:
|
||||
return await integration.async_get_platform(DOMAIN)
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
|
||||
async def _async_get_modifier(
|
||||
hass: HomeAssistant, domain: str
|
||||
) -> AnalyticsModifier | None:
|
||||
"""Get analytics modifier."""
|
||||
modifiers = _async_get_modifiers(hass)
|
||||
modifier = modifiers.get(domain, UNDEFINED)
|
||||
|
||||
if modifier is not UNDEFINED:
|
||||
return modifier
|
||||
|
||||
platform = await _async_get_analytics_platform(hass, domain)
|
||||
if platform is None:
|
||||
modifiers[domain] = None
|
||||
return None
|
||||
|
||||
modifier = getattr(platform, "async_modify_analytics", None)
|
||||
modifiers[domain] = modifier
|
||||
return modifier
|
||||
|
||||
|
||||
def gen_uuid() -> str:
|
||||
"""Generate a new UUID."""
|
||||
@@ -393,17 +501,20 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
return domains
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
DEFAULT_ANALYTICS_CONFIG = AnalyticsModifications()
|
||||
DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices."""
|
||||
integrations_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# We need to refer to other devices, for example in `via_device` field.
|
||||
# We don't however send the original device ids outside of Home Assistant,
|
||||
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
|
||||
device_id_mapping: dict[str, tuple[str, int]] = {}
|
||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||
|
||||
# Get device list
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
continue
|
||||
@@ -416,27 +527,96 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
integration_input[0].append(device_entry.id)
|
||||
|
||||
# Get entity list
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
integration_domain = entity_entry.platform
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
integration_input[1].append(entity_entry.entity_id)
|
||||
|
||||
# Call integrations that implement the analytics platform
|
||||
for integration_domain, integration_input in integration_inputs.items():
|
||||
if (
|
||||
modifier := await _async_get_modifier(hass, integration_domain)
|
||||
) is not None:
|
||||
try:
|
||||
integration_config = await modifier(
|
||||
hass, AnalyticsInput(*integration_input)
|
||||
)
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.exception(
|
||||
"Calling async_modify_analytics for integration '%s' failed: %s",
|
||||
integration_domain,
|
||||
err,
|
||||
)
|
||||
integration_configs[integration_domain] = AnalyticsModifications(
|
||||
remove=True
|
||||
)
|
||||
continue
|
||||
|
||||
if not isinstance(integration_config, AnalyticsModifications):
|
||||
LOGGER.error( # type: ignore[unreachable]
|
||||
"Calling async_modify_analytics for integration '%s' did not return an AnalyticsConfig",
|
||||
integration_domain,
|
||||
)
|
||||
integration_configs[integration_domain] = AnalyticsModifications(
|
||||
remove=True
|
||||
)
|
||||
continue
|
||||
|
||||
integration_configs[integration_domain] = integration_config
|
||||
|
||||
integrations_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
# We need to refer to other devices, for example in `via_device` field.
|
||||
# We don't however send the original device ids outside of Home Assistant,
|
||||
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
|
||||
device_id_mapping: dict[str, tuple[str, int]] = {}
|
||||
|
||||
# Fill out information about devices
|
||||
for integration_domain, integration_input in integration_inputs.items():
|
||||
integration_config = integration_configs.get(
|
||||
integration_domain, DEFAULT_ANALYTICS_CONFIG
|
||||
)
|
||||
|
||||
if integration_config.remove:
|
||||
continue
|
||||
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
for device_id in integration_input[0]:
|
||||
device_config = DEFAULT_DEVICE_ANALYTICS_CONFIG
|
||||
if integration_config.devices is not None:
|
||||
device_config = integration_config.devices.get(device_id, device_config)
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
"manufacturer": device_entry.manufacturer,
|
||||
"model": device_entry.model,
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
}
|
||||
)
|
||||
if device_config.remove:
|
||||
continue
|
||||
|
||||
device_entry = dev_reg.devices[device_id]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
"manufacturer": device_entry.manufacturer,
|
||||
"model": device_entry.model,
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
}
|
||||
)
|
||||
|
||||
# Fill out via_device with new device ids
|
||||
for integration_info in integrations_info.values():
|
||||
@@ -445,10 +625,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
continue
|
||||
device_info["via_device"] = device_id_mapping.get(device_info["via_device"])
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
# Fill out information about entities
|
||||
for integration_domain, integration_input in integration_inputs.items():
|
||||
integration_config = integration_configs.get(
|
||||
integration_domain, DEFAULT_ANALYTICS_CONFIG
|
||||
)
|
||||
|
||||
if integration_config.remove:
|
||||
continue
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
integration_domain = entity_entry.platform
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
@@ -456,35 +641,52 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
devices_info = integration_info["devices"]
|
||||
entities_info = integration_info["entities"]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
for entity_id in integration_input[1]:
|
||||
entity_config = DEFAULT_ENTITY_ANALYTICS_CONFIG
|
||||
if integration_config.entities is not None:
|
||||
entity_config = integration_config.entities.get(
|
||||
entity_id, entity_config
|
||||
)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
# It is also not present, if entity is not in the state machine,
|
||||
# which can happen for disabled entities.
|
||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||
if entity_state is not None
|
||||
else None,
|
||||
"capabilities": entity_entry.capabilities,
|
||||
"domain": entity_entry.domain,
|
||||
"entity_category": entity_entry.entity_category,
|
||||
"has_entity_name": entity_entry.has_entity_name,
|
||||
"original_device_class": entity_entry.original_device_class,
|
||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
if entity_config.remove:
|
||||
continue
|
||||
|
||||
if (
|
||||
((device_id := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
# It is also not present, if entity is not in the state machine,
|
||||
# which can happen for disabled entities.
|
||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||
if entity_state is not None
|
||||
else None,
|
||||
"capabilities": entity_config.capabilities
|
||||
if entity_config.capabilities is not UNDEFINED
|
||||
else entity_entry.capabilities,
|
||||
"domain": entity_entry.domain,
|
||||
"entity_category": entity_entry.entity_category,
|
||||
"has_entity_name": entity_entry.has_entity_name,
|
||||
"modified_by_integration": ["capabilities"]
|
||||
if entity_config.capabilities is not UNDEFINED
|
||||
else None,
|
||||
"original_device_class": entity_entry.original_device_class,
|
||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (
|
||||
((device_id_ := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "analytics",
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["api", "websocket_api", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
|
@@ -109,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
)
|
||||
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state in self.options:
|
||||
if (state is not None) and (state.state in self.options):
|
||||
self._attr_current_option = state.state
|
||||
|
||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||
@@ -119,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
|
||||
def cleanup() -> None:
|
||||
"""Clean up registered device."""
|
||||
pipeline_data.pipeline_devices.pop(device_id)
|
||||
pipeline_data.pipeline_devices.pop(device_id, None)
|
||||
|
||||
self.async_on_remove(cleanup)
|
||||
|
||||
|
@@ -2,13 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from yalexs.activity import ActivityType, ActivityTypes
|
||||
from yalexs.lock import Lock, LockStatus
|
||||
from yalexs.activity import ActivityType
|
||||
from yalexs.lock import Lock, LockOperation, LockStatus
|
||||
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
|
||||
|
||||
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
|
||||
@@ -50,30 +49,25 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity):
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_lock)
|
||||
await self._perform_lock_operation(LockOperation.LOCK)
|
||||
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open/unlatch the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlatch_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlatch)
|
||||
await self._perform_lock_operation(LockOperation.OPEN)
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlock)
|
||||
await self._perform_lock_operation(LockOperation.UNLOCK)
|
||||
|
||||
async def _call_lock_operation(
|
||||
self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]]
|
||||
) -> None:
|
||||
async def _perform_lock_operation(self, operation: LockOperation) -> None:
|
||||
"""Perform a lock operation."""
|
||||
try:
|
||||
activities = await lock_operation(self._device_id)
|
||||
activities = await self._data.async_operate_lock(
|
||||
self._device_id,
|
||||
operation,
|
||||
self._data.push_updates_connected,
|
||||
self._hyper_bridge,
|
||||
)
|
||||
except ClientResponseError as err:
|
||||
if err.status == LOCK_JAMMED_ERR:
|
||||
self._detail.lock_status = LockStatus.JAMMED
|
||||
|
24
homeassistant/components/automation/analytics.py
Normal file
24
homeassistant/components/automation/analytics.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import (
|
||||
AnalyticsInput,
|
||||
AnalyticsModifications,
|
||||
EntityAnalyticsModifications,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
entities: dict[str, EntityAnalyticsModifications] = {}
|
||||
for entity_id in analytics_input.entity_ids:
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
if entity_entry.capabilities is not None:
|
||||
entities[entity_id] = EntityAnalyticsModifications(capabilities=None)
|
||||
|
||||
return AnalyticsModifications(entities=entities)
|
@@ -205,6 +205,7 @@ class BringActivityCoordinator(BringBaseCoordinator[dict[str, BringActivityData]
|
||||
|
||||
async def _async_update_data(self) -> dict[str, BringActivityData]:
|
||||
"""Fetch activity data from bring."""
|
||||
self.lists = self.coordinator.lists
|
||||
|
||||
list_dict: dict[str, BringActivityData] = {}
|
||||
for lst in self.lists:
|
||||
|
@@ -43,7 +43,7 @@ async def async_setup_entry(
|
||||
)
|
||||
lists_added |= new_lists
|
||||
|
||||
coordinator.activity.async_add_listener(add_entities)
|
||||
coordinator.data.async_add_listener(add_entities)
|
||||
add_entities()
|
||||
|
||||
|
||||
@@ -67,7 +67,8 @@ class BringEventEntity(BringBaseEntity, EventEntity):
|
||||
|
||||
def _async_handle_event(self) -> None:
|
||||
"""Handle the activity event."""
|
||||
bring_list = self.coordinator.data[self._list_uuid]
|
||||
if (bring_list := self.coordinator.data.get(self._list_uuid)) is None:
|
||||
return
|
||||
last_event_triggered = self.state
|
||||
if bring_list.activity.timeline and (
|
||||
last_event_triggered is None
|
||||
|
@@ -25,7 +25,11 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo
|
||||
return await cloud.payments.subscription_info()
|
||||
except PaymentsApiError as exception:
|
||||
_LOGGER.error("Failed to fetch subscription information - %s", exception)
|
||||
|
||||
except TimeoutError:
|
||||
_LOGGER.error(
|
||||
"A timeout of %s was reached while trying to fetch subscription information",
|
||||
REQUEST_TIMEOUT,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
from typing import TypeVar
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from aiocomelit.api import (
|
||||
AlarmDataObject,
|
||||
@@ -13,7 +13,16 @@ from aiocomelit.api import (
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
from aiocomelit.const import (
|
||||
BRIDGE,
|
||||
CLIMATE,
|
||||
COVER,
|
||||
IRRIGATION,
|
||||
LIGHT,
|
||||
OTHER,
|
||||
SCENARIO,
|
||||
VEDO,
|
||||
)
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession
|
||||
|
||||
@@ -111,6 +120,32 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
async def _async_update_system_data(self) -> T:
|
||||
"""Class method for updating data."""
|
||||
|
||||
async def _async_remove_stale_devices(
|
||||
self,
|
||||
previous_list: dict[int, Any],
|
||||
current_list: dict[int, Any],
|
||||
dev_type: str,
|
||||
) -> None:
|
||||
"""Remove stale devices."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for i in previous_list:
|
||||
if i not in current_list:
|
||||
_LOGGER.debug(
|
||||
"Detected change in %s devices: index %s removed",
|
||||
dev_type,
|
||||
i,
|
||||
)
|
||||
identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}"
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, identifier)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
||||
|
||||
class ComelitSerialBridge(
|
||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||
@@ -137,7 +172,15 @@ class ComelitSerialBridge(
|
||||
self,
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_devices()
|
||||
data = await self.api.get_all_devices()
|
||||
|
||||
if self.data:
|
||||
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[dev_type], data[dev_type], dev_type
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
@@ -163,4 +206,14 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
self,
|
||||
) -> AlarmDataObject:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_areas_and_zones()
|
||||
data = await self.api.get_all_areas_and_zones()
|
||||
|
||||
if self.data:
|
||||
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[obj_type],
|
||||
data[obj_type],
|
||||
"area" if obj_type == "alarm_areas" else "zone",
|
||||
)
|
||||
|
||||
return data
|
||||
|
@@ -72,9 +72,7 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: missing implementation
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
45
homeassistant/components/compit/__init__.py
Normal file
45
homeassistant/components/compit/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""The Compit integration."""
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Set up Compit from a config entry."""
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
connector = CompitApiConnector(session)
|
||||
try:
|
||||
connected = await connector.init(
|
||||
entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language
|
||||
)
|
||||
except CannotConnect as e:
|
||||
raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e
|
||||
except InvalidAuth as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Invalid credentials for {entry.data[CONF_EMAIL]}"
|
||||
) from e
|
||||
|
||||
if not connected:
|
||||
raise ConfigEntryAuthFailed("Authentication API error")
|
||||
|
||||
coordinator = CompitDataUpdateCoordinator(hass, entry, connector)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Unload an entry for the Compit integration."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
265
homeassistant/components/compit/climate.py
Normal file
265
homeassistant/components/compit/climate.py
Normal file
@@ -0,0 +1,265 @@
|
||||
"""Module contains the CompitClimate class for controlling climate entities."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import Param, Parameter
|
||||
from compit_inext_api.consts import (
|
||||
CompitFanMode,
|
||||
CompitHVACMode,
|
||||
CompitParameter,
|
||||
CompitPresetMode,
|
||||
)
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
FAN_OFF,
|
||||
PRESET_AWAY,
|
||||
PRESET_ECO,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# Device class for climate devices in Compit system
|
||||
CLIMATE_DEVICE_CLASS = 10
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
COMPIT_MODE_MAP = {
|
||||
CompitHVACMode.COOL: HVACMode.COOL,
|
||||
CompitHVACMode.HEAT: HVACMode.HEAT,
|
||||
CompitHVACMode.OFF: HVACMode.OFF,
|
||||
}
|
||||
|
||||
COMPIT_FANSPEED_MAP = {
|
||||
CompitFanMode.OFF: FAN_OFF,
|
||||
CompitFanMode.AUTO: FAN_AUTO,
|
||||
CompitFanMode.LOW: FAN_LOW,
|
||||
CompitFanMode.MEDIUM: FAN_MEDIUM,
|
||||
CompitFanMode.HIGH: FAN_HIGH,
|
||||
CompitFanMode.HOLIDAY: FAN_AUTO,
|
||||
}
|
||||
|
||||
COMPIT_PRESET_MAP = {
|
||||
CompitPresetMode.AUTO: PRESET_HOME,
|
||||
CompitPresetMode.HOLIDAY: PRESET_ECO,
|
||||
CompitPresetMode.MANUAL: PRESET_NONE,
|
||||
CompitPresetMode.AWAY: PRESET_AWAY,
|
||||
}
|
||||
|
||||
HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()}
|
||||
FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()}
|
||||
PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the CompitClimate platform from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
climate_entities = []
|
||||
for device_id in coordinator.connector.all_devices:
|
||||
device = coordinator.connector.all_devices[device_id]
|
||||
|
||||
if device.definition.device_class == CLIMATE_DEVICE_CLASS:
|
||||
climate_entities.append(
|
||||
CompitClimate(
|
||||
coordinator,
|
||||
device_id,
|
||||
{
|
||||
parameter.parameter_code: parameter
|
||||
for parameter in device.definition.parameters
|
||||
},
|
||||
device.definition.name,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_devices(climate_entities)
|
||||
|
||||
|
||||
class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity):
|
||||
"""Representation of a Compit climate device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [*COMPIT_MODE_MAP.values()]
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
parameters: dict[str, Parameter],
|
||||
device_name: str,
|
||||
) -> None:
|
||||
"""Initialize the climate device."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{device_name}_{device_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=device_name,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=device_name,
|
||||
)
|
||||
|
||||
self.parameters = parameters
|
||||
self.device_id = device_id
|
||||
self.available_presets: Parameter | None = self.parameters.get(
|
||||
CompitParameter.PRESET_MODE.value
|
||||
)
|
||||
self.available_fan_modes: Parameter | None = self.parameters.get(
|
||||
CompitParameter.FAN_MODE.value
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.device_id in self.coordinator.connector.all_devices
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@cached_property
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return the available preset modes."""
|
||||
if self.available_presets is None or self.available_presets.details is None:
|
||||
return []
|
||||
|
||||
preset_modes = []
|
||||
for item in self.available_presets.details:
|
||||
if item is not None:
|
||||
ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state))
|
||||
if ha_preset and ha_preset not in preset_modes:
|
||||
preset_modes.append(ha_preset)
|
||||
|
||||
return preset_modes
|
||||
|
||||
@cached_property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
"""Return the available fan modes."""
|
||||
if self.available_fan_modes is None or self.available_fan_modes.details is None:
|
||||
return []
|
||||
|
||||
fan_modes = []
|
||||
for item in self.available_fan_modes.details:
|
||||
if item is not None:
|
||||
ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state))
|
||||
if ha_fan_mode and ha_fan_mode not in fan_modes:
|
||||
fan_modes.append(ha_fan_mode)
|
||||
|
||||
return fan_modes
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE)
|
||||
|
||||
if preset_mode:
|
||||
compit_preset_mode = CompitPresetMode(preset_mode.value)
|
||||
return COMPIT_PRESET_MAP.get(compit_preset_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE)
|
||||
if fan_mode:
|
||||
compit_fan_mode = CompitFanMode(fan_mode.value)
|
||||
return COMPIT_FANSPEED_MAP.get(compit_fan_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE)
|
||||
if hvac_mode:
|
||||
compit_hvac_mode = CompitHVACMode(hvac_mode.value)
|
||||
return COMPIT_MODE_MAP.get(compit_hvac_mode)
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
if temp is None:
|
||||
raise ServiceValidationError("Temperature argument missing")
|
||||
await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target HVAC mode."""
|
||||
|
||||
if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)):
|
||||
raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value)
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
|
||||
compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode)
|
||||
if compit_preset is None:
|
||||
raise ServiceValidationError(f"Invalid preset mode: {preset_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
|
||||
compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode)
|
||||
if compit_fan_mode is None:
|
||||
raise ServiceValidationError(f"Invalid fan mode: {fan_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value)
|
||||
|
||||
async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None:
|
||||
"""Call the API to set a parameter to a new value."""
|
||||
await self.coordinator.connector.set_device_parameter(
|
||||
self.device_id, parameter, value
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def get_parameter_value(self, parameter: CompitParameter) -> Param | None:
|
||||
"""Get the parameter value from the device state."""
|
||||
return self.coordinator.connector.get_device_parameter(
|
||||
self.device_id, parameter
|
||||
)
|
110
homeassistant/components/compit/config_flow.py
Normal file
110
homeassistant/components/compit/config_flow.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Config flow for Compit integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Compit."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_create_clientsession(self.hass)
|
||||
api = CompitApiConnector(session)
|
||||
success = False
|
||||
try:
|
||||
success = await api.init(
|
||||
user_input[CONF_EMAIL],
|
||||
user_input[CONF_PASSWORD],
|
||||
self.hass.config.language,
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not success:
|
||||
# Api returned unexpected result but no exception
|
||||
_LOGGER.error("Compit api returned unexpected result")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle re-auth."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm re-authentication."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
reauth_entry_data = reauth_entry.data
|
||||
|
||||
if user_input:
|
||||
# Reuse async_step_user with combined credentials
|
||||
return await self.async_step_user(
|
||||
{
|
||||
CONF_EMAIL: reauth_entry_data[CONF_EMAIL],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_SCHEMA,
|
||||
description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]},
|
||||
errors=errors,
|
||||
)
|
4
homeassistant/components/compit/const.py
Normal file
4
homeassistant/components/compit/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the Compit integration."""
|
||||
|
||||
DOMAIN = "compit"
|
||||
MANUFACTURER_NAME = "Compit"
|
43
homeassistant/components/compit/coordinator.py
Normal file
43
homeassistant/components/compit/coordinator.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Define an object to manage fetching Compit data."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from compit_inext_api import CompitApiConnector, DeviceInstance
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
type CompitConfigEntry = ConfigEntry[CompitDataUpdateCoordinator]
|
||||
|
||||
|
||||
class CompitDataUpdateCoordinator(DataUpdateCoordinator[dict[int, DeviceInstance]]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
connector: CompitApiConnector,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.connector = connector
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, DeviceInstance]:
|
||||
"""Update data via library."""
|
||||
await self.connector.update_state(device_id=None) # Update all devices
|
||||
return self.connector.all_devices
|
12
homeassistant/components/compit/manifest.json
Normal file
12
homeassistant/components/compit/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "compit",
|
||||
"name": "Compit",
|
||||
"codeowners": ["@Przemko92"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/compit",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.3.1"]
|
||||
}
|
86
homeassistant/components/compit/quality_scale.yaml
Normal file
86
homeassistant/components/compit/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not use any common modules.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any entities that should disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no need for icon translations.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: done
|
35
homeassistant/components/compit/strings.json
Normal file
35
homeassistant/components/compit/strings.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Please enter your https://inext.compit.pl/ credentials.",
|
||||
"title": "Connect to Compit iNext",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "The email address of your inext.compit.pl account",
|
||||
"password": "The password of your inext.compit.pl account"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Please update your password for {email}",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::compit::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
@@ -8,7 +8,13 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import Context, HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Context,
|
||||
HomeAssistant,
|
||||
async_get_hass,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||
|
||||
@@ -30,6 +36,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .default_agent import DefaultAgent
|
||||
from .trigger import TriggerDetails
|
||||
|
||||
|
||||
@singleton.singleton("conversation_agent")
|
||||
@@ -140,6 +147,7 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
|
||||
@callback
|
||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||
@@ -191,4 +199,20 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
|
||||
@callback
|
||||
def unregister_trigger() -> None:
|
||||
"""Unregister the trigger."""
|
||||
self.triggers_details.remove(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
|
||||
return unregister_trigger
|
||||
|
@@ -4,13 +4,11 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum, auto
|
||||
import functools
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import time
|
||||
from typing import IO, Any, cast
|
||||
|
||||
@@ -53,6 +51,7 @@ from homeassistant.components.homeassistant.exposed_entities import (
|
||||
async_should_expose,
|
||||
)
|
||||
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
|
||||
from homeassistant.core import Event, callback
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
@@ -74,17 +73,16 @@ from .const import DOMAIN, ConversationEntityFeature
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
from .trigger import TriggerDetails
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
TRIGGER_CALLBACK_TYPE = Callable[
|
||||
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
||||
]
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||
@@ -110,14 +108,6 @@ class LanguageIntents:
|
||||
fuzzy_responses: FuzzyLanguageResponses | None = None
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TriggerData:
|
||||
"""List of sentences and the callback for a trigger."""
|
||||
|
||||
sentences: list[str]
|
||||
callback: TRIGGER_CALLBACK_TYPE
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class SentenceTriggerResult:
|
||||
"""Result when matching a sentence trigger in an automation."""
|
||||
@@ -153,8 +143,8 @@ class IntentCacheKey:
|
||||
language: str
|
||||
"""Language of text."""
|
||||
|
||||
device_id: str | None
|
||||
"""Device id from user input."""
|
||||
satellite_id: str | None
|
||||
"""Satellite id from user input."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -240,21 +230,23 @@ class DefaultAgent(ConversationEntity):
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# intent -> [sentences]
|
||||
self._config_intents: dict[str, Any] = config_intents
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self._triggers_details: list[TriggerDetails] = []
|
||||
self._trigger_intents: Intents | None = None
|
||||
|
||||
# Slot lists for entities, areas, etc.
|
||||
self._slot_lists: dict[str, SlotList] | None = None
|
||||
self._unsub_clear_slot_list: list[Callable[[], None]] | None = None
|
||||
|
||||
# Used to filter slot lists before intent matching
|
||||
self._exposed_names_trie: Trie | None = None
|
||||
self._unexposed_names_trie: Trie | None = None
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self.trigger_sentences: list[TriggerData] = []
|
||||
self._trigger_intents: Intents | None = None
|
||||
self._unsub_clear_slot_list: list[Callable[[], None]] | None = None
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# LRU cache to avoid unnecessary intent matching
|
||||
self._intent_cache = IntentCache(capacity=128)
|
||||
|
||||
@@ -443,9 +435,15 @@ class DefaultAgent(ConversationEntity):
|
||||
}
|
||||
for entity in result.entities_list
|
||||
}
|
||||
device_area = self._get_device_area(user_input.device_id)
|
||||
if device_area:
|
||||
slots["preferred_area_id"] = {"value": device_area.id}
|
||||
|
||||
satellite_id = user_input.satellite_id
|
||||
device_id = user_input.device_id
|
||||
satellite_area, device_id = self._get_satellite_area_and_device(
|
||||
satellite_id, device_id
|
||||
)
|
||||
if satellite_area is not None:
|
||||
slots["preferred_area_id"] = {"value": satellite_area.id}
|
||||
|
||||
async_conversation_trace_append(
|
||||
ConversationTraceEventType.TOOL_CALL,
|
||||
{
|
||||
@@ -467,8 +465,8 @@ class DefaultAgent(ConversationEntity):
|
||||
user_input.context,
|
||||
language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
conversation_agent_id=user_input.agent_id,
|
||||
)
|
||||
except intent.MatchFailedError as match_error:
|
||||
@@ -534,7 +532,9 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
# Try cache first
|
||||
cache_key = IntentCacheKey(
|
||||
text=user_input.text, language=language, device_id=user_input.device_id
|
||||
text=user_input.text,
|
||||
language=language,
|
||||
satellite_id=user_input.satellite_id,
|
||||
)
|
||||
cache_value = self._intent_cache.get(cache_key)
|
||||
if cache_value is not None:
|
||||
@@ -1190,8 +1190,8 @@ class DefaultAgent(ConversationEntity):
|
||||
fuzzy_responses=fuzzy_responses,
|
||||
)
|
||||
|
||||
@core.callback
|
||||
def _async_clear_slot_list(self, event: core.Event[Any] | None = None) -> None:
|
||||
@callback
|
||||
def _async_clear_slot_list(self, event: Event[Any] | None = None) -> None:
|
||||
"""Clear slot lists when a registry has changed."""
|
||||
# Two subscribers can be scheduled at same time
|
||||
_LOGGER.debug("Clearing slot lists")
|
||||
@@ -1304,28 +1304,40 @@ class DefaultAgent(ConversationEntity):
|
||||
self, user_input: ConversationInput
|
||||
) -> dict[str, Any] | None:
|
||||
"""Return intent recognition context for user input."""
|
||||
if not user_input.device_id:
|
||||
satellite_area, _ = self._get_satellite_area_and_device(
|
||||
user_input.satellite_id, user_input.device_id
|
||||
)
|
||||
if satellite_area is None:
|
||||
return None
|
||||
|
||||
device_area = self._get_device_area(user_input.device_id)
|
||||
if device_area is None:
|
||||
return None
|
||||
return {"area": {"value": satellite_area.name, "text": satellite_area.name}}
|
||||
|
||||
return {"area": {"value": device_area.name, "text": device_area.name}}
|
||||
def _get_satellite_area_and_device(
|
||||
self, satellite_id: str | None, device_id: str | None = None
|
||||
) -> tuple[ar.AreaEntry | None, str | None]:
|
||||
"""Return area entry and device id."""
|
||||
hass = self.hass
|
||||
|
||||
def _get_device_area(self, device_id: str | None) -> ar.AreaEntry | None:
|
||||
"""Return area object for given device identifier."""
|
||||
if device_id is None:
|
||||
return None
|
||||
area_id: str | None = None
|
||||
|
||||
devices = dr.async_get(self.hass)
|
||||
device = devices.async_get(device_id)
|
||||
if (device is None) or (device.area_id is None):
|
||||
return None
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (entity_entry := er.async_get(hass).async_get(satellite_id)) is not None
|
||||
):
|
||||
area_id = entity_entry.area_id
|
||||
device_id = entity_entry.device_id
|
||||
|
||||
areas = ar.async_get(self.hass)
|
||||
if (
|
||||
area_id is None
|
||||
and device_id is not None
|
||||
and (device_entry := dr.async_get(hass).async_get(device_id)) is not None
|
||||
):
|
||||
area_id = device_entry.area_id
|
||||
|
||||
return areas.async_get_area(device.area_id)
|
||||
if area_id is None:
|
||||
return None, device_id
|
||||
|
||||
return ar.async_get(hass).async_get_area(area_id), device_id
|
||||
|
||||
def _get_error_text(
|
||||
self,
|
||||
@@ -1349,22 +1361,14 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
return response_template.async_render(response_args)
|
||||
|
||||
@core.callback
|
||||
def register_trigger(
|
||||
self,
|
||||
sentences: list[str],
|
||||
callback: TRIGGER_CALLBACK_TYPE,
|
||||
) -> core.CALLBACK_TYPE:
|
||||
"""Register a list of sentences that will trigger a callback when recognized."""
|
||||
trigger_data = TriggerData(sentences=sentences, callback=callback)
|
||||
self.trigger_sentences.append(trigger_data)
|
||||
@callback
|
||||
def update_triggers(self, triggers_details: list[TriggerDetails]) -> None:
|
||||
"""Update triggers."""
|
||||
self._triggers_details = triggers_details
|
||||
|
||||
# Force rebuild on next use
|
||||
self._trigger_intents = None
|
||||
|
||||
return functools.partial(self._unregister_trigger, trigger_data)
|
||||
|
||||
@core.callback
|
||||
def _rebuild_trigger_intents(self) -> None:
|
||||
"""Rebuild the HassIL intents object from the current trigger sentences."""
|
||||
intents_dict = {
|
||||
@@ -1373,8 +1377,8 @@ class DefaultAgent(ConversationEntity):
|
||||
# Use trigger data index as a virtual intent name for HassIL.
|
||||
# This works because the intents are rebuilt on every
|
||||
# register/unregister.
|
||||
str(trigger_id): {"data": [{"sentences": trigger_data.sentences}]}
|
||||
for trigger_id, trigger_data in enumerate(self.trigger_sentences)
|
||||
str(trigger_id): {"data": [{"sentences": trigger_details.sentences}]}
|
||||
for trigger_id, trigger_details in enumerate(self._triggers_details)
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1394,14 +1398,6 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
_LOGGER.debug("Rebuilt trigger intents: %s", intents_dict)
|
||||
|
||||
@core.callback
|
||||
def _unregister_trigger(self, trigger_data: TriggerData) -> None:
|
||||
"""Unregister a set of trigger sentences."""
|
||||
self.trigger_sentences.remove(trigger_data)
|
||||
|
||||
# Force rebuild on next use
|
||||
self._trigger_intents = None
|
||||
|
||||
async def async_recognize_sentence_trigger(
|
||||
self, user_input: ConversationInput
|
||||
) -> SentenceTriggerResult | None:
|
||||
@@ -1410,7 +1406,7 @@ class DefaultAgent(ConversationEntity):
|
||||
Calls the registered callbacks if there's a match and returns a sentence
|
||||
trigger result.
|
||||
"""
|
||||
if not self.trigger_sentences:
|
||||
if not self._triggers_details:
|
||||
# No triggers registered
|
||||
return None
|
||||
|
||||
@@ -1455,7 +1451,7 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
# Gather callback responses in parallel
|
||||
trigger_callbacks = [
|
||||
self.trigger_sentences[trigger_id].callback(user_input, trigger_result)
|
||||
self._triggers_details[trigger_id].callback(user_input, trigger_result)
|
||||
for trigger_id, trigger_result in result.matched_triggers.items()
|
||||
]
|
||||
|
||||
|
@@ -169,12 +169,11 @@ async def websocket_list_sentences(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""List custom registered sentences."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
sentences = []
|
||||
for trigger_data in agent.trigger_sentences:
|
||||
sentences.extend(trigger_data.sentences)
|
||||
for trigger_details in manager.triggers_details:
|
||||
sentences.extend(trigger_details.sentences)
|
||||
|
||||
connection.send_result(msg["id"], {"trigger_sentences": sentences})
|
||||
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
@@ -15,7 +17,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.script import ScriptRunResult
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType
|
||||
@@ -24,6 +26,18 @@ from .agent_manager import get_agent_manager
|
||||
from .const import DOMAIN
|
||||
from .models import ConversationInput
|
||||
|
||||
TRIGGER_CALLBACK_TYPE = Callable[
|
||||
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
||||
]
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TriggerDetails:
|
||||
"""List of sentences and the callback for a trigger."""
|
||||
|
||||
sentences: list[str]
|
||||
callback: TRIGGER_CALLBACK_TYPE
|
||||
|
||||
|
||||
def has_no_punctuation(value: list[str]) -> list[str]:
|
||||
"""Validate result does not contain punctuation."""
|
||||
@@ -71,6 +85,8 @@ async def async_attach_trigger(
|
||||
trigger_data = trigger_info["trigger_data"]
|
||||
sentences = config.get(CONF_COMMAND, [])
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
job = HassJob(action)
|
||||
|
||||
async def call_action(
|
||||
@@ -92,6 +108,14 @@ async def async_attach_trigger(
|
||||
for entity_name, entity in result.entities.items()
|
||||
}
|
||||
|
||||
satellite_id = user_input.satellite_id
|
||||
device_id = user_input.device_id
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (satellite_entry := ent_reg.async_get(satellite_id)) is not None
|
||||
):
|
||||
device_id = satellite_entry.device_id
|
||||
|
||||
trigger_input: dict[str, Any] = { # Satisfy type checker
|
||||
**trigger_data,
|
||||
"platform": DOMAIN,
|
||||
@@ -100,8 +124,8 @@ async def async_attach_trigger(
|
||||
"slots": { # direct access to values
|
||||
entity_name: entity["value"] for entity_name, entity in details.items()
|
||||
},
|
||||
"device_id": user_input.device_id,
|
||||
"satellite_id": user_input.satellite_id,
|
||||
"device_id": device_id,
|
||||
"satellite_id": satellite_id,
|
||||
"user_input": user_input.as_dict(),
|
||||
}
|
||||
|
||||
@@ -124,6 +148,6 @@ async def async_attach_trigger(
|
||||
# two trigger copies for who will provide a response.
|
||||
return None
|
||||
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
return agent.register_trigger(sentences, call_action)
|
||||
return get_agent_manager(hass).register_trigger(
|
||||
TriggerDetails(sentences=sentences, callback=call_action)
|
||||
)
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/droplet",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pydroplet==2.3.2"],
|
||||
"requirements": ["pydroplet==2.3.3"],
|
||||
"zeroconf": ["_droplet._tcp.local."]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==13.7.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
|
||||
}
|
||||
|
@@ -5,9 +5,11 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from deebot_client.capabilities import CapabilitySet
|
||||
from deebot_client.capabilities import CapabilityNumber, CapabilitySet
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import CleanCountEvent, CutDirectionEvent, VolumeEvent
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.water_info import WaterCustomAmountEvent
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
@@ -75,6 +77,19 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsNumberEntityDescription, ...] = (
|
||||
native_step=1.0,
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
EcovacsNumberEntityDescription[WaterCustomAmountEvent](
|
||||
capability_fn=lambda caps: (
|
||||
caps.water.amount
|
||||
if caps.water and isinstance(caps.water.amount, CapabilityNumber)
|
||||
else None
|
||||
),
|
||||
value_fn=lambda e: e.value,
|
||||
key="water_amount",
|
||||
translation_key="water_amount",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=1.0,
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -100,6 +115,18 @@ class EcovacsNumberEntity[EventT: Event](
|
||||
|
||||
entity_description: EcovacsNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
capability: CapabilitySet[EventT, [int]],
|
||||
entity_description: EcovacsNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, entity_description)
|
||||
if isinstance(capability, CapabilityNumber):
|
||||
self._attr_native_min_value = capability.min
|
||||
self._attr_native_max_value = capability.max
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Set up the event listeners now that hass is ready."""
|
||||
await super().async_added_to_hass()
|
||||
|
@@ -33,7 +33,11 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
EcovacsSelectEntityDescription[WaterAmountEvent](
|
||||
capability_fn=lambda caps: caps.water.amount if caps.water else None,
|
||||
capability_fn=lambda caps: (
|
||||
caps.water.amount
|
||||
if caps.water and isinstance(caps.water.amount, CapabilitySetTypes)
|
||||
else None
|
||||
),
|
||||
current_option_fn=lambda e: get_name_key(e.value),
|
||||
options_fn=lambda water: [get_name_key(amount) for amount in water.types],
|
||||
key="water_amount",
|
||||
|
@@ -102,6 +102,9 @@
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume"
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "Water flow level"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -152,8 +155,10 @@
|
||||
"station_state": {
|
||||
"name": "Station state",
|
||||
"state": {
|
||||
"drying_mop": "Drying mop",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"emptying_dustbin": "Emptying dustbin"
|
||||
"emptying_dustbin": "Emptying dustbin",
|
||||
"washing_mop": "Washing mop"
|
||||
}
|
||||
},
|
||||
"stats_area": {
|
||||
@@ -174,7 +179,7 @@
|
||||
},
|
||||
"select": {
|
||||
"water_amount": {
|
||||
"name": "Water flow level",
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
|
@@ -7,8 +7,6 @@ import random
|
||||
import string
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from deebot_client.events.station import State
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
@@ -49,9 +47,6 @@ def get_supported_entities(
|
||||
@callback
|
||||
def get_name_key(enum: Enum) -> str:
|
||||
"""Return the lower case name of the enum."""
|
||||
if enum is State.EMPTYING:
|
||||
# Will be fixed in the next major release of deebot-client
|
||||
return "emptying_dustbin"
|
||||
return enum.name.lower()
|
||||
|
||||
|
||||
|
@@ -253,6 +253,7 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
),
|
||||
EcoWittSensorTypes.PM4: SensorEntityDescription(
|
||||
key="PM4",
|
||||
device_class=SensorDeviceClass.PM4,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -51,6 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
|
||||
client_info=CLIENT_INFO,
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
noise_psk=noise_psk,
|
||||
timezone=hass.config.time_zone,
|
||||
)
|
||||
|
||||
domain_data = DomainData.get(hass)
|
||||
|
@@ -138,6 +138,16 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
# Device was configured with encryption but now connects without it.
|
||||
# Check if it's the same device before offering to remove encryption.
|
||||
if self._reauth_entry.unique_id and self._device_mac:
|
||||
expected_mac = format_mac(self._reauth_entry.unique_id)
|
||||
actual_mac = format_mac(self._device_mac)
|
||||
if expected_mac != actual_mac:
|
||||
# Different device at the same IP - do not offer to remove encryption
|
||||
return self._async_abort_wrong_device(
|
||||
self._reauth_entry, expected_mac, actual_mac
|
||||
)
|
||||
return await self.async_step_reauth_encryption_removed_confirm()
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
@@ -508,6 +518,28 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
CONF_DEVICE_NAME: self._device_name,
|
||||
}
|
||||
|
||||
@callback
|
||||
def _async_abort_wrong_device(
|
||||
self, entry: ConfigEntry, expected_mac: str, actual_mac: str
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort flow because a different device was found at the IP address."""
|
||||
assert self._host is not None
|
||||
assert self._device_name is not None
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
reason = "reconfigure_unique_id_changed"
|
||||
else:
|
||||
reason = "reauth_unique_id_changed"
|
||||
return self.async_abort(
|
||||
reason=reason,
|
||||
description_placeholders={
|
||||
"name": entry.data.get(CONF_DEVICE_NAME, entry.title),
|
||||
"host": self._host,
|
||||
"expected_mac": expected_mac,
|
||||
"unexpected_mac": actual_mac,
|
||||
"unexpected_device_name": self._device_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def _async_validated_connection(self) -> ConfigFlowResult:
|
||||
"""Handle validated connection."""
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
@@ -539,17 +571,10 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
# Reauth was triggered a while ago, and since than
|
||||
# a new device resides at the same IP address.
|
||||
assert self._device_name is not None
|
||||
return self.async_abort(
|
||||
reason="reauth_unique_id_changed",
|
||||
description_placeholders={
|
||||
"name": self._reauth_entry.data.get(
|
||||
CONF_DEVICE_NAME, self._reauth_entry.title
|
||||
),
|
||||
"host": self._host,
|
||||
"expected_mac": format_mac(self._reauth_entry.unique_id),
|
||||
"unexpected_mac": format_mac(self.unique_id),
|
||||
"unexpected_device_name": self._device_name,
|
||||
},
|
||||
return self._async_abort_wrong_device(
|
||||
self._reauth_entry,
|
||||
format_mac(self._reauth_entry.unique_id),
|
||||
format_mac(self.unique_id),
|
||||
)
|
||||
|
||||
async def _async_reconfig_validated_connection(self) -> ConfigFlowResult:
|
||||
@@ -589,17 +614,10 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if self._reconfig_entry.data.get(CONF_DEVICE_NAME) == self._device_name:
|
||||
self._entry_with_name_conflict = self._reconfig_entry
|
||||
return await self.async_step_name_conflict()
|
||||
return self.async_abort(
|
||||
reason="reconfigure_unique_id_changed",
|
||||
description_placeholders={
|
||||
"name": self._reconfig_entry.data.get(
|
||||
CONF_DEVICE_NAME, self._reconfig_entry.title
|
||||
),
|
||||
"host": self._host,
|
||||
"expected_mac": format_mac(self._reconfig_entry.unique_id),
|
||||
"unexpected_mac": format_mac(self.unique_id),
|
||||
"unexpected_device_name": self._device_name,
|
||||
},
|
||||
return self._async_abort_wrong_device(
|
||||
self._reconfig_entry,
|
||||
format_mac(self._reconfig_entry.unique_id),
|
||||
format_mac(self.unique_id),
|
||||
)
|
||||
|
||||
async def async_step_encryption_key(
|
||||
|
@@ -49,11 +49,13 @@ from aioesphomeapi import (
|
||||
from aioesphomeapi.model import ButtonInfo
|
||||
from bleak_esphome.backend.device import ESPHomeBluetoothDevice
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.assist_satellite import AssistSatelliteConfiguration
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import discovery_flow, entity_registry as er
|
||||
from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -468,7 +470,7 @@ class RuntimeEntryData:
|
||||
|
||||
@callback
|
||||
def async_on_connect(
|
||||
self, device_info: DeviceInfo, api_version: APIVersion
|
||||
self, hass: HomeAssistant, device_info: DeviceInfo, api_version: APIVersion
|
||||
) -> None:
|
||||
"""Call when the entry has been connected."""
|
||||
self.available = True
|
||||
@@ -484,6 +486,29 @@ class RuntimeEntryData:
|
||||
# be marked as unavailable or not.
|
||||
self.expected_disconnect = True
|
||||
|
||||
if not device_info.zwave_proxy_feature_flags:
|
||||
return
|
||||
|
||||
assert self.client.connected_address
|
||||
|
||||
discovery_flow.async_create_flow(
|
||||
hass,
|
||||
"zwave_js",
|
||||
{"source": config_entries.SOURCE_ESPHOME},
|
||||
ESPHomeServiceInfo(
|
||||
name=device_info.name,
|
||||
zwave_home_id=device_info.zwave_home_id or None,
|
||||
ip_address=self.client.connected_address,
|
||||
port=self.client.port,
|
||||
noise_psk=self.client.noise_psk,
|
||||
),
|
||||
discovery_key=discovery_flow.DiscoveryKey(
|
||||
domain=DOMAIN,
|
||||
key=device_info.mac_address,
|
||||
version=1,
|
||||
),
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_register_assist_satellite_config_updated_callback(
|
||||
self,
|
||||
|
@@ -505,7 +505,7 @@ class ESPHomeManager:
|
||||
|
||||
api_version = cli.api_version
|
||||
assert api_version is not None, "API version must be set"
|
||||
entry_data.async_on_connect(device_info, api_version)
|
||||
entry_data.async_on_connect(hass, device_info, api_version)
|
||||
|
||||
await self._handle_dynamic_encryption_key(device_info)
|
||||
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.1.0",
|
||||
"aioesphomeapi==41.9.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -162,12 +162,12 @@ def setup_service_functions(
|
||||
It appears that all TCC-compatible systems support the same three zones modes.
|
||||
"""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def force_refresh(call: ServiceCall) -> None:
|
||||
"""Obtain the latest state data via the vendor's RESTful API."""
|
||||
await coordinator.async_refresh()
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_system_mode(call: ServiceCall) -> None:
|
||||
"""Set the system mode."""
|
||||
assert coordinator.tcs is not None # mypy
|
||||
@@ -179,7 +179,7 @@ def setup_service_functions(
|
||||
}
|
||||
async_dispatcher_send(hass, DOMAIN, payload)
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_override(call: ServiceCall) -> None:
|
||||
"""Set the zone override (setpoint)."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
@@ -66,26 +66,6 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = {
|
||||
key="last_alarm_type_name",
|
||||
translation_key="last_alarm_type_name",
|
||||
),
|
||||
"Record_Mode": SensorEntityDescription(
|
||||
key="Record_Mode",
|
||||
translation_key="record_mode",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"battery_camera_work_mode": SensorEntityDescription(
|
||||
key="battery_camera_work_mode",
|
||||
translation_key="battery_camera_work_mode",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"powerStatus": SensorEntityDescription(
|
||||
key="powerStatus",
|
||||
translation_key="power_status",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
"OnlineStatus": SensorEntityDescription(
|
||||
key="OnlineStatus",
|
||||
translation_key="online_status",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -96,26 +76,16 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up EZVIZ sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[EzvizSensor] = []
|
||||
|
||||
for camera, sensors in coordinator.data.items():
|
||||
entities.extend(
|
||||
async_add_entities(
|
||||
[
|
||||
EzvizSensor(coordinator, camera, sensor)
|
||||
for sensor, value in sensors.items()
|
||||
if sensor in SENSOR_TYPES and value is not None
|
||||
)
|
||||
|
||||
optionals = sensors.get("optionals", {})
|
||||
entities.extend(
|
||||
EzvizSensor(coordinator, camera, optional_key)
|
||||
for optional_key in ("powerStatus", "OnlineStatus")
|
||||
if optional_key in optionals
|
||||
)
|
||||
|
||||
if "mode" in optionals.get("Record_Mode", {}):
|
||||
entities.append(EzvizSensor(coordinator, camera, "mode"))
|
||||
|
||||
async_add_entities(entities)
|
||||
for camera in coordinator.data
|
||||
for sensor, value in coordinator.data[camera].items()
|
||||
if sensor in SENSOR_TYPES
|
||||
if value is not None
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class EzvizSensor(EzvizEntity, SensorEntity):
|
||||
|
@@ -147,18 +147,6 @@
|
||||
},
|
||||
"last_alarm_type_name": {
|
||||
"name": "Last alarm type name"
|
||||
},
|
||||
"record_mode": {
|
||||
"name": "Record mode"
|
||||
},
|
||||
"battery_camera_work_mode": {
|
||||
"name": "Battery work mode"
|
||||
},
|
||||
"power_status": {
|
||||
"name": "Power status"
|
||||
},
|
||||
"online_status": {
|
||||
"name": "Online status"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@@ -31,11 +31,12 @@ SERVICE_SCHEMA_SET_GUEST_WIFI_PW = vol.Schema(
|
||||
|
||||
async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
|
||||
"""Call Fritz set guest wifi password service."""
|
||||
hass = service_call.hass
|
||||
target_entry_ids = await async_extract_config_entry_ids(hass, service_call)
|
||||
target_entry_ids = await async_extract_config_entry_ids(service_call)
|
||||
target_entries: list[FritzConfigEntry] = [
|
||||
loaded_entry
|
||||
for loaded_entry in hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
for loaded_entry in service_call.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if loaded_entry.entry_id in target_entry_ids
|
||||
]
|
||||
|
||||
|
@@ -124,7 +124,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) ->
|
||||
def setup_service_functions(hass: HomeAssistant, broker):
|
||||
"""Set up the service functions."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def set_zone_mode(call: ServiceCall) -> None:
|
||||
"""Set the system mode."""
|
||||
entity_id = call.data[ATTR_ENTITY_ID]
|
||||
|
@@ -29,6 +29,7 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -70,6 +71,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def generate_content(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
LOGGER.warning(
|
||||
"Action '%s.%s' is deprecated and will be removed in the 2026.4.0 release. "
|
||||
"Please use the 'ai_task.generate_data' action instead",
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_CONTENT,
|
||||
)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_generate_content",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_generate_content",
|
||||
)
|
||||
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
|
@@ -150,10 +150,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_generate_content": {
|
||||
"title": "Deprecated 'generate_content' action",
|
||||
"description": "Action 'google_generative_ai_conversation.generate_content' is deprecated and will be removed in the 2026.4.0 release. Please use the 'ai_task.generate_data' action instead"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"generate_content": {
|
||||
"name": "Generate content",
|
||||
"description": "Generate content from a prompt consisting of text and optionally images",
|
||||
"name": "Generate content (deprecated)",
|
||||
"description": "Generate content from a prompt consisting of text and optionally images (deprecated)",
|
||||
"fields": {
|
||||
"prompt": {
|
||||
"name": "Prompt",
|
||||
|
@@ -51,7 +51,7 @@ async def _extract_gmail_config_entries(
|
||||
) -> list[GoogleMailConfigEntry]:
|
||||
return [
|
||||
entry
|
||||
for entry_id in await async_extract_config_entry_ids(call.hass, call)
|
||||
for entry_id in await async_extract_config_entry_ids(call)
|
||||
if (entry := call.hass.config_entries.async_get_entry(entry_id))
|
||||
and entry.domain == DOMAIN
|
||||
]
|
||||
|
@@ -10,9 +10,8 @@ from typing import Self, cast
|
||||
from google_photos_library_api.exceptions import GooglePhotosApiError
|
||||
from google_photos_library_api.model import Album, MediaItem
|
||||
|
||||
from homeassistant.components.media_player import MediaClass, MediaType
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass, MediaType
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseError,
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
|
@@ -39,6 +39,7 @@ ATTR_ADD_CHECKLIST_ITEM = "add_checklist_item"
|
||||
ATTR_REMOVE_CHECKLIST_ITEM = "remove_checklist_item"
|
||||
ATTR_SCORE_CHECKLIST_ITEM = "score_checklist_item"
|
||||
ATTR_UNSCORE_CHECKLIST_ITEM = "unscore_checklist_item"
|
||||
ATTR_COLLAPSE_CHECKLIST = "collapse_checklist"
|
||||
ATTR_REMINDER = "reminder"
|
||||
ATTR_REMOVE_REMINDER = "remove_reminder"
|
||||
ATTR_CLEAR_REMINDER = "clear_reminder"
|
||||
|
@@ -47,6 +47,7 @@ from .const import (
|
||||
ATTR_ALIAS,
|
||||
ATTR_CLEAR_DATE,
|
||||
ATTR_CLEAR_REMINDER,
|
||||
ATTR_COLLAPSE_CHECKLIST,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_COST,
|
||||
ATTR_COUNTER_DOWN,
|
||||
@@ -130,6 +131,11 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
COLLAPSE_CHECKLIST_MAP = {
|
||||
"collapsed": True,
|
||||
"expanded": False,
|
||||
}
|
||||
|
||||
BASE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
@@ -160,6 +166,7 @@ BASE_TASK_SCHEMA = vol.Schema(
|
||||
vol.Optional(ATTR_REMOVE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_SCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_UNSCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_COLLAPSE_CHECKLIST): vol.In(COLLAPSE_CHECKLIST_MAP),
|
||||
vol.Optional(ATTR_START_DATE): cv.date,
|
||||
vol.Optional(ATTR_INTERVAL): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_REPEAT): vol.All(cv.ensure_list, [vol.In(WEEK_DAYS)]),
|
||||
@@ -223,6 +230,7 @@ ITEMID_MAP = {
|
||||
"shiny_seed": Skill.SHINY_SEED,
|
||||
}
|
||||
|
||||
|
||||
SERVICE_TASK_TYPE_MAP = {
|
||||
SERVICE_UPDATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_CREATE_REWARD: TaskType.REWARD,
|
||||
@@ -714,6 +722,9 @@ async def _create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa:
|
||||
):
|
||||
data["checklist"] = checklist
|
||||
|
||||
if collapse_checklist := call.data.get(ATTR_COLLAPSE_CHECKLIST):
|
||||
data["collapseChecklist"] = COLLAPSE_CHECKLIST_MAP[collapse_checklist]
|
||||
|
||||
reminders = current_task.reminders if current_task else []
|
||||
|
||||
if add_reminders := call.data.get(ATTR_REMINDER):
|
||||
|
@@ -275,6 +275,15 @@ update_todo:
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
collapse_checklist: &collapse_checklist
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- collapsed
|
||||
- expanded
|
||||
mode: list
|
||||
translation_key: collapse_checklist
|
||||
priority: *priority
|
||||
duedate_options:
|
||||
collapsed: true
|
||||
@@ -318,6 +327,7 @@ create_todo:
|
||||
name: *name
|
||||
notes: *notes
|
||||
add_checklist_item: *add_checklist_item
|
||||
collapse_checklist: *collapse_checklist
|
||||
priority: *priority
|
||||
date: *due_date
|
||||
reminder: *reminder
|
||||
@@ -419,6 +429,7 @@ create_daily:
|
||||
name: *name
|
||||
notes: *notes
|
||||
add_checklist_item: *add_checklist_item
|
||||
collapse_checklist: *collapse_checklist
|
||||
priority: *priority
|
||||
start_date: *start_date
|
||||
frequency: *frequency_daily
|
||||
|
@@ -66,7 +66,9 @@
|
||||
"repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.",
|
||||
"repeat_monthly_options_name": "Monthly repeat day",
|
||||
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly.",
|
||||
"quest_name": "Quest"
|
||||
"quest_name": "Quest",
|
||||
"collapse_checklist_name": "Collapse/expand checklist",
|
||||
"collapse_checklist_description": "Whether the checklist of a task is displayed as collapsed or expanded in Habitica."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
@@ -1006,6 +1008,10 @@
|
||||
"unscore_checklist_item": {
|
||||
"name": "[%key:component::habitica::common::unscore_checklist_item_name%]",
|
||||
"description": "[%key:component::habitica::common::unscore_checklist_item_description%]"
|
||||
},
|
||||
"collapse_checklist": {
|
||||
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
|
||||
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
@@ -1070,6 +1076,10 @@
|
||||
"add_checklist_item": {
|
||||
"name": "[%key:component::habitica::common::checklist_options_name%]",
|
||||
"description": "[%key:component::habitica::common::add_checklist_item_description%]"
|
||||
},
|
||||
"collapse_checklist": {
|
||||
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
|
||||
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
@@ -1151,6 +1161,10 @@
|
||||
"name": "[%key:component::habitica::common::unscore_checklist_item_name%]",
|
||||
"description": "[%key:component::habitica::common::unscore_checklist_item_description%]"
|
||||
},
|
||||
"collapse_checklist": {
|
||||
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
|
||||
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
|
||||
},
|
||||
"streak": {
|
||||
"name": "Adjust streak",
|
||||
"description": "Adjust or reset the streak counter of the daily."
|
||||
@@ -1247,6 +1261,10 @@
|
||||
"name": "[%key:component::habitica::common::checklist_options_name%]",
|
||||
"description": "[%key:component::habitica::common::add_checklist_item_description%]"
|
||||
},
|
||||
"collapse_checklist": {
|
||||
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
|
||||
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
|
||||
},
|
||||
"reminder": {
|
||||
"name": "[%key:component::habitica::common::reminder_options_name%]",
|
||||
"description": "[%key:component::habitica::common::reminder_description%]"
|
||||
@@ -1325,6 +1343,12 @@
|
||||
"day_of_month": "Day of the month",
|
||||
"day_of_week": "Day of the week"
|
||||
}
|
||||
},
|
||||
"collapse_checklist": {
|
||||
"options": {
|
||||
"collapsed": "Collapsed",
|
||||
"expanded": "Expanded"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -73,6 +73,7 @@ from . import ( # noqa: F401
|
||||
config_flow,
|
||||
diagnostics,
|
||||
sensor,
|
||||
switch,
|
||||
system_health,
|
||||
update,
|
||||
)
|
||||
@@ -149,7 +150,7 @@ _DEPRECATED_HassioServiceInfo = DeprecatedConstant(
|
||||
# If new platforms are added, be sure to import them above
|
||||
# so we do not make other components that depend on hassio
|
||||
# wait for the import of the platforms
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.UPDATE]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
|
||||
CONF_FRONTEND_REPO = "development_repo"
|
||||
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -545,3 +546,15 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
await super()._async_refresh(
|
||||
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||
)
|
||||
|
||||
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
|
||||
"""Force refresh of addon info data for a specific addon."""
|
||||
try:
|
||||
slug, info = await self._update_addon_info(addon_slug)
|
||||
if info is not None and DATA_KEY_ADDONS in self.data:
|
||||
if slug in self.data[DATA_KEY_ADDONS]:
|
||||
data = deepcopy(self.data)
|
||||
data[DATA_KEY_ADDONS][slug].update(info)
|
||||
self.async_set_updated_data(data)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
|
||||
|
@@ -70,7 +70,7 @@ PATHS_ADMIN = re.compile(
|
||||
r"|backups/new/upload"
|
||||
r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?"
|
||||
r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?"
|
||||
r"|core/logs(/follow|/boots/-?\d+(/follow)?)?"
|
||||
r"|core/logs(/latest|/follow|/boots/-?\d+(/follow)?)?"
|
||||
r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?"
|
||||
r"|host/logs(/follow|/boots(/-?\d+(/follow)?)?)?"
|
||||
r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?"
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.3.2"],
|
||||
"requirements": ["aiohasupervisor==0.3.3b0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -250,6 +250,10 @@
|
||||
"unsupported_os_version": {
|
||||
"title": "Unsupported system - Home Assistant OS version",
|
||||
"description": "System is unsupported because the Home Assistant OS version in use is not supported. For troubleshooting information, select Learn more."
|
||||
},
|
||||
"unsupported_home_assistant_core_version": {
|
||||
"title": "Unsupported system - Home Assistant Core version",
|
||||
"description": "System is unsupported because the Home Assistant Core version in use is not supported. For troubleshooting information, select Learn more."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
90
homeassistant/components/hassio/switch.py
Normal file
90
homeassistant/components/hassio/switch.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Switch platform for Hass.io addons."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ICON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
|
||||
from .entity import HassioAddonEntity
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTION = SwitchEntityDescription(
|
||||
key=ATTR_STATE,
|
||||
name=None,
|
||||
icon="mdi:puzzle",
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Switch set up for Hass.io config entry."""
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
HassioAddonSwitch(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
|
||||
class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
|
||||
"""Switch for Hass.io add-ons."""
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the add-on is on."""
|
||||
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
|
||||
state = addon_data.get(self.entity_description.key)
|
||||
return state == ATTR_STARTED
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the icon of the add-on if any."""
|
||||
if not self.available:
|
||||
return None
|
||||
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
|
||||
if addon_data.get(ATTR_ICON):
|
||||
return f"/api/hassio/addons/{self._addon_slug}/icon"
|
||||
return None
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
supervisor_client = get_supervisor_client(self.hass)
|
||||
try:
|
||||
await supervisor_client.addons.start_addon(self._addon_slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
supervisor_client = get_supervisor_client(self.hass)
|
||||
try:
|
||||
await supervisor_client.addons.stop_addon(self._addon_slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to stop addon %s: %s", self._addon_slug, err)
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
@@ -6,9 +6,14 @@ import logging
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, CONF_MODE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
|
||||
from .const import CONF_TRAFFIC_MODE, TRAVEL_MODE_PUBLIC
|
||||
from .const import CONF_TRAFFIC_MODE, DOMAIN, TRAVEL_MODE_PUBLIC
|
||||
from .coordinator import (
|
||||
HereConfigEntry,
|
||||
HERERoutingDataUpdateCoordinator,
|
||||
@@ -24,6 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: HereConfigEntry)
|
||||
"""Set up HERE Travel Time from a config entry."""
|
||||
api_key = config_entry.data[CONF_API_KEY]
|
||||
|
||||
alert_for_multiple_entries(hass)
|
||||
|
||||
cls: type[HERETransitDataUpdateCoordinator | HERERoutingDataUpdateCoordinator]
|
||||
if config_entry.data[CONF_MODE] in {TRAVEL_MODE_PUBLIC, "publicTransportTimeTable"}:
|
||||
cls = HERETransitDataUpdateCoordinator
|
||||
@@ -42,6 +49,29 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: HereConfigEntry)
|
||||
return True
|
||||
|
||||
|
||||
def alert_for_multiple_entries(hass: HomeAssistant) -> None:
|
||||
"""Check if there are multiple entries for the same API key."""
|
||||
if len(hass.config_entries.async_entries(DOMAIN)) > 1:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"multiple_here_travel_time_entries",
|
||||
learn_more_url="https://www.home-assistant.io/integrations/here_travel_time/",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="multiple_here_travel_time_entries",
|
||||
translation_placeholders={
|
||||
"pricing_page": "https://www.here.com/get-started/pricing",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_delete_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"multiple_here_travel_time_entries",
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: HereConfigEntry
|
||||
) -> bool:
|
||||
|
@@ -44,7 +44,7 @@ from .coordinator import (
|
||||
HERETransitDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
SCAN_INTERVAL = timedelta(minutes=30)
|
||||
|
||||
|
||||
def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]:
|
||||
|
@@ -107,5 +107,11 @@
|
||||
"name": "Destination"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"multiple_here_travel_time_entries": {
|
||||
"title": "More than one HERE Travel Time integration detected",
|
||||
"description": "HERE deprecated the previous free tier. The new Base Plan has only 5000 instead of the previous 30000 free requests per month.\n\nSince you have more than one HERE Travel Time integration configured, you will need to disable or remove the additional integrations to avoid exceeding the free request limit.\nYou can ignore this issue if you are okay with the additional cost."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.80", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -37,7 +37,6 @@ PLATFORMS = [
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
|
@@ -66,6 +66,14 @@
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"start_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
},
|
||||
"finish_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"operation_state": {
|
||||
"default": "mdi:state-machine",
|
||||
@@ -251,14 +259,6 @@
|
||||
"i_dos_2_active": {
|
||||
"default": "mdi:numeric-2-circle"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"start_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
},
|
||||
"finish_in_relative": {
|
||||
"default": "mdi:progress-clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1852,11 +1852,6 @@
|
||||
"i_dos2_active": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_i_dos2_active::name%]"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"alarm_clock": {
|
||||
"name": "Alarm clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,172 +0,0 @@
|
||||
"""Provides time entities for Home Connect."""
|
||||
|
||||
from datetime import time
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import SettingKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity
|
||||
from .utils import get_dict_from_home_connect_error
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
TIME_ENTITIES = (
|
||||
TimeEntityDescription(
|
||||
key=SettingKey.BSH_COMMON_ALARM_CLOCK,
|
||||
translation_key="alarm_clock",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
) -> list[HomeConnectEntity]:
|
||||
"""Get a list of entities."""
|
||||
return [
|
||||
HomeConnectTimeEntity(entry.runtime_data, appliance, description)
|
||||
for description in TIME_ENTITIES
|
||||
if description.key in appliance.settings
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Connect switch."""
|
||||
setup_home_connect_entry(
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
)
|
||||
|
||||
|
||||
def seconds_to_time(seconds: int) -> time:
|
||||
"""Convert seconds to a time object."""
|
||||
minutes, sec = divmod(seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
return time(hour=hours, minute=minutes, second=sec)
|
||||
|
||||
|
||||
def time_to_seconds(t: time) -> int:
|
||||
"""Convert a time object to seconds."""
|
||||
return t.hour * 3600 + t.minute * 60 + t.second
|
||||
|
||||
|
||||
class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity):
|
||||
"""Time setting class for Home Connect."""
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
items = automations + scripts
|
||||
if not items:
|
||||
return
|
||||
|
||||
entity_reg: er.EntityRegistry = er.async_get(self.hass)
|
||||
entity_automations = [
|
||||
automation_entity
|
||||
for automation_id in automations
|
||||
if (automation_entity := entity_reg.async_get(automation_id))
|
||||
]
|
||||
entity_scripts = [
|
||||
script_entity
|
||||
for script_id in scripts
|
||||
if (script_entity := entity_reg.async_get(script_id))
|
||||
]
|
||||
|
||||
items_list = [
|
||||
f"- [{item.original_name}](/config/automation/edit/{item.unique_id})"
|
||||
for item in entity_automations
|
||||
] + [
|
||||
f"- [{item.original_name}](/config/script/edit/{item.unique_id})"
|
||||
for item in entity_scripts
|
||||
]
|
||||
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.10.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_time_alarm_clock",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
"items": "\n".join(items_list),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Call when entity will be removed from hass."""
|
||||
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
|
||||
async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
|
||||
)
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}"
|
||||
)
|
||||
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Set the native value of the entity."""
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_time_alarm_clock_{self.entity_id}",
|
||||
breaks_in_ha_version="2025.10.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_time_alarm_clock",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
)
|
||||
try:
|
||||
await self.coordinator.client.set_setting(
|
||||
self.appliance.info.ha_id,
|
||||
setting_key=SettingKey(self.bsh_key),
|
||||
value=time_to_seconds(value),
|
||||
)
|
||||
except HomeConnectError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_setting_entity",
|
||||
translation_placeholders={
|
||||
**get_dict_from_home_connect_error(err),
|
||||
"entity_id": self.entity_id,
|
||||
"key": self.bsh_key,
|
||||
"value": str(value),
|
||||
},
|
||||
) from err
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Set the value of the entity."""
|
||||
data = self.appliance.settings[cast(SettingKey, self.bsh_key)]
|
||||
self._attr_native_value = seconds_to_time(data.value)
|
@@ -339,7 +339,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
reload_entries: set[str] = set()
|
||||
if ATTR_ENTRY_ID in call.data:
|
||||
reload_entries.add(call.data[ATTR_ENTRY_ID])
|
||||
reload_entries.update(await async_extract_config_entry_ids(hass, call))
|
||||
reload_entries.update(await async_extract_config_entry_ids(call))
|
||||
if not reload_entries:
|
||||
raise ValueError("There were no matching config entries to reload")
|
||||
await asyncio.gather(
|
||||
|
@@ -272,7 +272,7 @@ async def async_setup_platform(
|
||||
|
||||
async def delete_service(call: ServiceCall) -> None:
|
||||
"""Delete a dynamically created scene."""
|
||||
entity_ids = await async_extract_entity_ids(hass, call)
|
||||
entity_ids = await async_extract_entity_ids(call)
|
||||
|
||||
for entity_id in entity_ids:
|
||||
scene = platform.entities.get(entity_id)
|
||||
|
@@ -90,7 +90,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
firmware_name="OpenThread",
|
||||
expected_installed_firmware_type=ApplicationType.SPINEL,
|
||||
step_id="install_thread_firmware",
|
||||
next_step_id="start_otbr_addon",
|
||||
next_step_id="finish_thread_installation",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -53,11 +53,15 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -138,11 +142,15 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
|
@@ -50,6 +50,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_PICK_FIRMWARE_THREAD = "pick_firmware_thread"
|
||||
STEP_PICK_FIRMWARE_ZIGBEE = "pick_firmware_zigbee"
|
||||
STEP_PICK_FIRMWARE_THREAD_MIGRATE = "pick_firmware_thread_migrate"
|
||||
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE = "pick_firmware_zigbee_migrate"
|
||||
|
||||
|
||||
class PickedFirmwareType(StrEnum):
|
||||
@@ -86,7 +88,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_install_task: asyncio.Task | None = None
|
||||
self.addon_start_task: asyncio.Task | None = None
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task[None] | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
@@ -124,11 +126,23 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Thread or Zigbee firmware."""
|
||||
# Determine if ZHA or Thread are already configured to present migrate options
|
||||
zha_entries = self.hass.config_entries.async_entries(ZHA_DOMAIN)
|
||||
otbr_entries = self.hass.config_entries.async_entries(OTBR_DOMAIN)
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="pick_firmware",
|
||||
menu_options=[
|
||||
STEP_PICK_FIRMWARE_ZIGBEE,
|
||||
STEP_PICK_FIRMWARE_THREAD,
|
||||
(
|
||||
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE
|
||||
if zha_entries
|
||||
else STEP_PICK_FIRMWARE_ZIGBEE
|
||||
),
|
||||
(
|
||||
STEP_PICK_FIRMWARE_THREAD_MIGRATE
|
||||
if otbr_entries
|
||||
else STEP_PICK_FIRMWARE_THREAD
|
||||
),
|
||||
],
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
@@ -170,91 +184,17 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
step_id: str,
|
||||
next_step_id: str,
|
||||
) -> ConfigFlowResult:
|
||||
assert self._device is not None
|
||||
|
||||
"""Show progress dialog for installing firmware."""
|
||||
if not self.firmware_install_task:
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
self.installing_firmware_name = firmware_name
|
||||
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type
|
||||
!= expected_installed_firmware_type
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
|
||||
try:
|
||||
manifest = await client.async_update_data()
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing):
|
||||
_LOGGER.warning(
|
||||
"Failed to fetch firmware update manifest", exc_info=True
|
||||
)
|
||||
|
||||
# Not having internet access should not prevent setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to index download failure"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
|
||||
# Make sure we do not downgrade the firmware
|
||||
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
|
||||
fw_version = fw_metadata.get_public_version()
|
||||
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
|
||||
|
||||
if probed_fw_version >= fw_version:
|
||||
_LOGGER.debug(
|
||||
"Not downgrading firmware, installed %s is newer than available %s",
|
||||
probed_fw_version,
|
||||
fw_version,
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError):
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to image download failure"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
# Otherwise, fail
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="firmware_download_failed"
|
||||
)
|
||||
|
||||
self.firmware_install_task = self.hass.async_create_task(
|
||||
async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_type=None,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
self._install_firmware(
|
||||
fw_update_url,
|
||||
fw_type,
|
||||
firmware_name,
|
||||
expected_installed_firmware_type,
|
||||
),
|
||||
f"Flash {firmware_name} firmware",
|
||||
f"Install {firmware_name} firmware",
|
||||
)
|
||||
|
||||
if not self.firmware_install_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
@@ -268,12 +208,141 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except AbortFlow as err:
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=err.reason,
|
||||
)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
finally:
|
||||
self.firmware_install_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id=next_step_id)
|
||||
|
||||
async def _install_firmware(
|
||||
self,
|
||||
fw_update_url: str,
|
||||
fw_type: str,
|
||||
firmware_name: str,
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
) -> None:
|
||||
"""Install firmware."""
|
||||
if not await self._probe_firmware_info():
|
||||
raise AbortFlow(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
assert self._device is not None
|
||||
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
self.installing_firmware_name = firmware_name
|
||||
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
|
||||
try:
|
||||
manifest = await client.async_update_data()
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing) as err:
|
||||
_LOGGER.warning("Failed to fetch firmware update manifest", exc_info=True)
|
||||
|
||||
# Not having internet access should not prevent setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
|
||||
return
|
||||
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
|
||||
# Make sure we do not downgrade the firmware
|
||||
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
|
||||
fw_version = fw_metadata.get_public_version()
|
||||
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
|
||||
|
||||
if probed_fw_version >= fw_version:
|
||||
_LOGGER.debug(
|
||||
"Not downgrading firmware, installed %s is newer than available %s",
|
||||
probed_fw_version,
|
||||
fw_version,
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError) as err:
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug("Skipping firmware upgrade due to image download failure")
|
||||
return
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_type=None,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
)
|
||||
|
||||
async def _configure_and_start_otbr_addon(self) -> None:
|
||||
"""Configure and start the OTBR addon."""
|
||||
|
||||
# Before we start the addon, confirm that the correct firmware is running
|
||||
# and populate `self._probed_firmware_info` with the correct information
|
||||
if not await self._probe_firmware_info(probe_methods=(ApplicationType.SPINEL,)):
|
||||
raise AbortFlow(
|
||||
"unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
assert self._device is not None
|
||||
new_addon_config = {
|
||||
**addon_info.options,
|
||||
"device": self._device,
|
||||
"baudrate": 460800,
|
||||
"flow_control": True,
|
||||
"autoflash_firmware": False,
|
||||
}
|
||||
|
||||
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
|
||||
|
||||
try:
|
||||
await otbr_manager.async_set_addon_options(new_addon_config)
|
||||
except AddonError as err:
|
||||
_LOGGER.error(err)
|
||||
raise AbortFlow(
|
||||
"addon_set_config_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
) from err
|
||||
|
||||
await otbr_manager.async_start_addon_waiting()
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -300,6 +369,15 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_unsupported_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when unsupported firmware is detected."""
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_zigbee_installation_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -353,20 +431,42 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
async def _async_continue_picked_firmware(self) -> ConfigFlowResult:
|
||||
"""Continue to the picked firmware step."""
|
||||
if not await self._probe_firmware_info():
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
if self._picked_firmware_type == PickedFirmwareType.ZIGBEE:
|
||||
return await self.async_step_install_zigbee_firmware()
|
||||
|
||||
if result := await self._ensure_thread_addon_setup():
|
||||
return result
|
||||
return await self.async_step_prepare_thread_installation()
|
||||
|
||||
async def async_step_prepare_thread_installation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare for Thread installation by stopping the OTBR addon if needed."""
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(
|
||||
reason="not_hassio_thread",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
# Stop the addon before continuing to flash firmware
|
||||
await otbr_manager.async_stop_addon()
|
||||
|
||||
return await self.async_step_install_thread_firmware()
|
||||
|
||||
async def async_step_finish_thread_installation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish Thread installation by starting the OTBR addon."""
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
if addon_info.state == AddonState.NOT_INSTALLED:
|
||||
return await self.async_step_install_otbr_addon()
|
||||
|
||||
return await self.async_step_start_otbr_addon()
|
||||
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -374,6 +474,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self._picked_firmware_type = PickedFirmwareType.ZIGBEE
|
||||
return await self.async_step_zigbee_installation_type()
|
||||
|
||||
async def async_step_pick_firmware_zigbee_migrate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Zigbee firmware. Migration is automatic."""
|
||||
return await self.async_step_pick_firmware_zigbee()
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -436,39 +542,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Continue the ZHA flow."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def _ensure_thread_addon_setup(self) -> ConfigFlowResult | None:
|
||||
"""Ensure the OTBR addon is set up and not running."""
|
||||
|
||||
# We install the OTBR addon no matter what, since it is required to use Thread
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(
|
||||
reason="not_hassio_thread",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
if addon_info.state == AddonState.NOT_INSTALLED:
|
||||
return await self.async_step_install_otbr_addon()
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
# We only fail setup if we have an instance of OTBR running *and* it's
|
||||
# pointing to different hardware
|
||||
if addon_info.options["device"] != self._device:
|
||||
return self.async_abort(
|
||||
reason="otbr_addon_already_running",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
)
|
||||
|
||||
# Otherwise, stop the addon before continuing to flash firmware
|
||||
await otbr_manager.async_stop_addon()
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_pick_firmware_thread(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -476,6 +549,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self._picked_firmware_type = PickedFirmwareType.THREAD
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def async_step_pick_firmware_thread_migrate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Pick Thread firmware. Migration is automatic."""
|
||||
return await self.async_step_pick_firmware_thread()
|
||||
|
||||
async def async_step_install_thread_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -518,7 +597,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
finally:
|
||||
self.addon_install_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="install_thread_firmware")
|
||||
return self.async_show_progress_done(next_step_id="finish_thread_installation")
|
||||
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -527,43 +606,8 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
|
||||
if not self.addon_start_task:
|
||||
# Before we start the addon, confirm that the correct firmware is running
|
||||
# and populate `self._probed_firmware_info` with the correct information
|
||||
if not await self._probe_firmware_info(
|
||||
probe_methods=(ApplicationType.SPINEL,)
|
||||
):
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
assert self._device is not None
|
||||
new_addon_config = {
|
||||
**addon_info.options,
|
||||
"device": self._device,
|
||||
"baudrate": 460800,
|
||||
"flow_control": True,
|
||||
"autoflash_firmware": False,
|
||||
}
|
||||
|
||||
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
|
||||
|
||||
try:
|
||||
await otbr_manager.async_set_addon_options(new_addon_config)
|
||||
except AddonError as err:
|
||||
_LOGGER.error(err)
|
||||
raise AbortFlow(
|
||||
"addon_set_config_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": otbr_manager.addon_name,
|
||||
},
|
||||
) from err
|
||||
|
||||
self.addon_start_task = self.hass.async_create_task(
|
||||
otbr_manager.async_start_addon_waiting()
|
||||
self._configure_and_start_otbr_addon()
|
||||
)
|
||||
|
||||
if not self.addon_start_task.done():
|
||||
@@ -582,7 +626,9 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
except (AddonError, AbortFlow) as err:
|
||||
_LOGGER.error(err)
|
||||
self._failed_addon_name = otbr_manager.addon_name
|
||||
self._failed_addon_reason = "addon_start_failed"
|
||||
self._failed_addon_reason = (
|
||||
err.reason if isinstance(err, AbortFlow) else "addon_start_failed"
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="addon_operation_failed")
|
||||
finally:
|
||||
self.addon_start_task = None
|
||||
|
@@ -7,11 +7,15 @@
|
||||
"description": "You can use your {model} for a Zigbee or Thread network. Please check what type of devices you want to add to Home Assistant. You can always change this later.",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "Use as Zigbee adapter",
|
||||
"pick_firmware_thread": "Use as Thread adapter"
|
||||
"pick_firmware_thread": "Use as Thread adapter",
|
||||
"pick_firmware_zigbee_migrate": "Migrate Zigbee to a new adapter",
|
||||
"pick_firmware_thread_migrate": "Migrate Thread to a new adapter"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "Most common protocol.",
|
||||
"pick_firmware_thread": "Often used for Matter over Thread devices."
|
||||
"pick_firmware_thread": "Often used for Matter over Thread devices.",
|
||||
"pick_firmware_zigbee_migrate": "This will move your Zigbee network to the new adapter.",
|
||||
"pick_firmware_thread_migrate": "This will migrate your Thread Border Router to the new adapter."
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
|
@@ -106,7 +106,7 @@ class SkyConnectFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
firmware_name="OpenThread",
|
||||
expected_installed_firmware_type=ApplicationType.SPINEL,
|
||||
step_id="install_thread_firmware",
|
||||
next_step_id="start_otbr_addon",
|
||||
next_step_id="finish_thread_installation",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -53,11 +53,15 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
@@ -138,11 +142,15 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
|
@@ -105,7 +105,7 @@ class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
firmware_name="OpenThread",
|
||||
expected_installed_firmware_type=ApplicationType.SPINEL,
|
||||
step_id="install_thread_firmware",
|
||||
next_step_id="start_otbr_addon",
|
||||
next_step_id="finish_thread_installation",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -76,11 +76,15 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
"menu_options": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]"
|
||||
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
|
||||
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
|
||||
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
|
||||
}
|
||||
},
|
||||
"confirm_zigbee": {
|
||||
|
@@ -337,7 +337,14 @@ class HKDevice:
|
||||
# We need to explicitly poll characteristics to get fresh sensor readings
|
||||
# before processing the entity map and creating devices.
|
||||
# Use poll_all=True since entities haven't registered their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
try:
|
||||
await self.async_update(poll_all=True)
|
||||
except ValueError as exc:
|
||||
_LOGGER.debug(
|
||||
"Accessory %s responded with unparsable response, first update was skipped: %s",
|
||||
self.unique_id,
|
||||
exc,
|
||||
)
|
||||
|
||||
await self.async_process_entity_map()
|
||||
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.17"],
|
||||
"requirements": ["aiohomekit==3.2.18"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -124,7 +124,7 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the HomematicIP Cloud services."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
@verify_domain_control(DOMAIN)
|
||||
async def async_call_hmipc_service(service: ServiceCall) -> None:
|
||||
"""Call correct HomematicIP Cloud service."""
|
||||
service_name = service.service
|
||||
|
@@ -88,7 +88,7 @@
|
||||
"message": "Honeywell set temperature failed: invalid temperature {temperature}"
|
||||
},
|
||||
"temp_failed_range": {
|
||||
"message": "Honeywell set temperature failed: temperature out of range. Mode: {mode}, Heat Temperuature: {heat}, Cool Temperature: {cool}"
|
||||
"message": "Honeywell set temperature failed: temperature out of range. Mode: {mode}, Heat temperature: {heat}, Cool temperature: {cool}"
|
||||
},
|
||||
"set_hold_failed": {
|
||||
"message": "Honeywell could not set permanent hold"
|
||||
|
@@ -10,6 +10,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohue"],
|
||||
"requirements": ["aiohue==4.7.5"],
|
||||
"requirements": ["aiohue==4.8.0"],
|
||||
"zeroconf": ["_hue._tcp.local."]
|
||||
}
|
||||
|
@@ -64,7 +64,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_HUE_ACTIVATE_SCENE,
|
||||
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
||||
verify_domain_control(DOMAIN)(hue_activate_scene),
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_GROUP_NAME): cv.string,
|
||||
|
@@ -26,8 +26,8 @@
|
||||
}
|
||||
},
|
||||
"verification_code": {
|
||||
"title": "iCloud verification code",
|
||||
"description": "Please enter the verification code you just received from iCloud",
|
||||
"title": "Apple Account code",
|
||||
"description": "Please enter the verification code you just received from Apple",
|
||||
"data": {
|
||||
"verification_code": "Verification code"
|
||||
}
|
||||
@@ -47,11 +47,11 @@
|
||||
"services": {
|
||||
"update": {
|
||||
"name": "Update",
|
||||
"description": "Asks for a state update of all devices linked to an iCloud account.",
|
||||
"description": "Asks for a state update of all devices linked to an Apple Account.",
|
||||
"fields": {
|
||||
"account": {
|
||||
"name": "Account",
|
||||
"description": "Your iCloud account username (email) or account name."
|
||||
"description": "Your Apple Account username (email)."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -9,9 +9,8 @@ from aioimmich.assets.models import ImmichAsset
|
||||
from aioimmich.exceptions import ImmichError
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.media_player import MediaClass
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseError,
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
|
28
homeassistant/components/input_select/analytics.py
Normal file
28
homeassistant/components/input_select/analytics.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import (
|
||||
AnalyticsInput,
|
||||
AnalyticsModifications,
|
||||
EntityAnalyticsModifications,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
entities: dict[str, EntityAnalyticsModifications] = {}
|
||||
for entity_id in analytics_input.entity_ids:
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
if entity_entry.capabilities is not None:
|
||||
capabilities = dict(entity_entry.capabilities)
|
||||
capabilities["options"] = len(capabilities["options"])
|
||||
entities[entity_id] = EntityAnalyticsModifications(
|
||||
capabilities=capabilities
|
||||
)
|
||||
|
||||
return AnalyticsModifications(entities=entities)
|
@@ -36,6 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SOURCE_SENSOR: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
@@ -51,7 +52,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -89,13 +89,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
# Remove device link for entry, the source device may have changed.
|
||||
# The link will be recreated after load.
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))
|
||||
|
@@ -151,6 +151,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
40
homeassistant/components/irm_kmi/__init__.py
Normal file
40
homeassistant/components/irm_kmi/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Integration for IRM KMI weather."""
|
||||
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClientHa
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import IRM_KMI_TO_HA_CONDITION_MAP, PLATFORMS, USER_AGENT
|
||||
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
api_client = IrmKmiApiClientHa(
|
||||
session=async_get_clientsession(hass),
|
||||
user_agent=USER_AGENT,
|
||||
cdt_map=IRM_KMI_TO_HA_CONDITION_MAP,
|
||||
)
|
||||
|
||||
entry.runtime_data = IrmKmiCoordinator(hass, entry, api_client)
|
||||
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_reload_entry(hass: HomeAssistant, entry: IrmKmiConfigEntry) -> None:
|
||||
"""Reload config entry."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
132
homeassistant/components/irm_kmi/config_flow.py
Normal file
132
homeassistant/components/irm_kmi/config_flow.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Config flow to set up IRM KMI integration via the UI."""
|
||||
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClient, IrmKmiApiError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_UNIQUE_ID,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
LocationSelector,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_LANGUAGE_OVERRIDE,
|
||||
CONF_LANGUAGE_OVERRIDE_OPTIONS,
|
||||
DOMAIN,
|
||||
OUT_OF_BENELUX,
|
||||
USER_AGENT,
|
||||
)
|
||||
from .coordinator import IrmKmiConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrmKmiConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Configuration flow for the IRM KMI integration."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(_config_entry: IrmKmiConfigEntry) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return IrmKmiOptionFlow()
|
||||
|
||||
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Define the user step of the configuration flow."""
|
||||
errors: dict = {}
|
||||
|
||||
default_location = {
|
||||
ATTR_LATITUDE: self.hass.config.latitude,
|
||||
ATTR_LONGITUDE: self.hass.config.longitude,
|
||||
}
|
||||
|
||||
if user_input:
|
||||
_LOGGER.debug("Provided config user is: %s", user_input)
|
||||
|
||||
lat: float = user_input[CONF_LOCATION][ATTR_LATITUDE]
|
||||
lon: float = user_input[CONF_LOCATION][ATTR_LONGITUDE]
|
||||
|
||||
try:
|
||||
api_data = await IrmKmiApiClient(
|
||||
session=async_get_clientsession(self.hass),
|
||||
user_agent=USER_AGENT,
|
||||
).get_forecasts_coord({"lat": lat, "long": lon})
|
||||
except IrmKmiApiError:
|
||||
_LOGGER.exception(
|
||||
"Encountered an unexpected error while configuring the integration"
|
||||
)
|
||||
return self.async_abort(reason="api_error")
|
||||
|
||||
if api_data["cityName"] in OUT_OF_BENELUX:
|
||||
errors[CONF_LOCATION] = "out_of_benelux"
|
||||
|
||||
if not errors:
|
||||
name: str = api_data["cityName"]
|
||||
country: str = api_data["country"]
|
||||
unique_id: str = f"{name.lower()} {country.lower()}"
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input[CONF_UNIQUE_ID] = unique_id
|
||||
|
||||
return self.async_create_entry(title=name, data=user_input)
|
||||
|
||||
default_location = user_input[CONF_LOCATION]
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_LOCATION, default=default_location
|
||||
): LocationSelector()
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class IrmKmiOptionFlow(OptionsFlowWithReload):
|
||||
"""Option flow for the IRM KMI integration, help change the options once the integration was configured."""
|
||||
|
||||
async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if user_input is not None:
|
||||
_LOGGER.debug("Provided config user is: %s", user_input)
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_LANGUAGE_OVERRIDE,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_LANGUAGE_OVERRIDE, "none"
|
||||
),
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=CONF_LANGUAGE_OVERRIDE_OPTIONS,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=CONF_LANGUAGE_OVERRIDE,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
102
homeassistant/components/irm_kmi/const.py
Normal file
102
homeassistant/components/irm_kmi/const.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Constants for the IRM KMI integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_CLEAR_NIGHT,
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
ATTR_CONDITION_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SUNNY,
|
||||
)
|
||||
from homeassistant.const import Platform, __version__
|
||||
|
||||
DOMAIN: Final = "irm_kmi"
|
||||
PLATFORMS: Final = [Platform.WEATHER]
|
||||
|
||||
OUT_OF_BENELUX: Final = [
|
||||
"außerhalb der Benelux (Brussels)",
|
||||
"Hors de Belgique (Bxl)",
|
||||
"Outside the Benelux (Brussels)",
|
||||
"Buiten de Benelux (Brussel)",
|
||||
]
|
||||
LANGS: Final = ["en", "fr", "nl", "de"]
|
||||
|
||||
CONF_LANGUAGE_OVERRIDE: Final = "language_override"
|
||||
CONF_LANGUAGE_OVERRIDE_OPTIONS: Final = ["none", "fr", "nl", "de", "en"]
|
||||
|
||||
# Dict to map ('ww', 'dayNight') tuple from IRM KMI to HA conditions.
|
||||
IRM_KMI_TO_HA_CONDITION_MAP: Final = {
|
||||
(0, "d"): ATTR_CONDITION_SUNNY,
|
||||
(0, "n"): ATTR_CONDITION_CLEAR_NIGHT,
|
||||
(1, "d"): ATTR_CONDITION_SUNNY,
|
||||
(1, "n"): ATTR_CONDITION_CLEAR_NIGHT,
|
||||
(2, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(2, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(3, "d"): ATTR_CONDITION_PARTLYCLOUDY,
|
||||
(3, "n"): ATTR_CONDITION_PARTLYCLOUDY,
|
||||
(4, "d"): ATTR_CONDITION_POURING,
|
||||
(4, "n"): ATTR_CONDITION_POURING,
|
||||
(5, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(5, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(6, "d"): ATTR_CONDITION_POURING,
|
||||
(6, "n"): ATTR_CONDITION_POURING,
|
||||
(7, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(7, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(8, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(8, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(9, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(9, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(10, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(10, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(11, "d"): ATTR_CONDITION_SNOWY,
|
||||
(11, "n"): ATTR_CONDITION_SNOWY,
|
||||
(12, "d"): ATTR_CONDITION_SNOWY,
|
||||
(12, "n"): ATTR_CONDITION_SNOWY,
|
||||
(13, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(13, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(14, "d"): ATTR_CONDITION_CLOUDY,
|
||||
(14, "n"): ATTR_CONDITION_CLOUDY,
|
||||
(15, "d"): ATTR_CONDITION_CLOUDY,
|
||||
(15, "n"): ATTR_CONDITION_CLOUDY,
|
||||
(16, "d"): ATTR_CONDITION_POURING,
|
||||
(16, "n"): ATTR_CONDITION_POURING,
|
||||
(17, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(17, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
(18, "d"): ATTR_CONDITION_RAINY,
|
||||
(18, "n"): ATTR_CONDITION_RAINY,
|
||||
(19, "d"): ATTR_CONDITION_POURING,
|
||||
(19, "n"): ATTR_CONDITION_POURING,
|
||||
(20, "d"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(20, "n"): ATTR_CONDITION_SNOWY_RAINY,
|
||||
(21, "d"): ATTR_CONDITION_RAINY,
|
||||
(21, "n"): ATTR_CONDITION_RAINY,
|
||||
(22, "d"): ATTR_CONDITION_SNOWY,
|
||||
(22, "n"): ATTR_CONDITION_SNOWY,
|
||||
(23, "d"): ATTR_CONDITION_SNOWY,
|
||||
(23, "n"): ATTR_CONDITION_SNOWY,
|
||||
(24, "d"): ATTR_CONDITION_FOG,
|
||||
(24, "n"): ATTR_CONDITION_FOG,
|
||||
(25, "d"): ATTR_CONDITION_FOG,
|
||||
(25, "n"): ATTR_CONDITION_FOG,
|
||||
(26, "d"): ATTR_CONDITION_FOG,
|
||||
(26, "n"): ATTR_CONDITION_FOG,
|
||||
(27, "d"): ATTR_CONDITION_FOG,
|
||||
(27, "n"): ATTR_CONDITION_FOG,
|
||||
}
|
||||
|
||||
IRM_KMI_NAME: Final = {
|
||||
"fr": "Institut Royal Météorologique de Belgique",
|
||||
"nl": "Koninklijk Meteorologisch Instituut van België",
|
||||
"de": "Königliche Meteorologische Institut von Belgien",
|
||||
"en": "Royal Meteorological Institute of Belgium",
|
||||
}
|
||||
|
||||
USER_AGENT: Final = (
|
||||
f"https://www.home-assistant.io/integrations/irm_kmi (version {__version__})"
|
||||
)
|
95
homeassistant/components/irm_kmi/coordinator.py
Normal file
95
homeassistant/components/irm_kmi/coordinator.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""DataUpdateCoordinator for the IRM KMI integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from irm_kmi_api import IrmKmiApiClientHa, IrmKmiApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_LOCATION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
TimestampDataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .data import ProcessedCoordinatorData
|
||||
from .utils import preferred_language
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IrmKmiConfigEntry = ConfigEntry[IrmKmiCoordinator]
|
||||
|
||||
|
||||
class IrmKmiCoordinator(TimestampDataUpdateCoordinator[ProcessedCoordinatorData]):
|
||||
"""Coordinator to update data from IRM KMI."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: IrmKmiConfigEntry,
|
||||
api_client: IrmKmiApiClientHa,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="IRM KMI weather",
|
||||
update_interval=timedelta(minutes=7),
|
||||
)
|
||||
self._api = api_client
|
||||
self._location = entry.data[CONF_LOCATION]
|
||||
|
||||
async def _async_update_data(self) -> ProcessedCoordinatorData:
|
||||
"""Fetch data from API endpoint.
|
||||
|
||||
This is the place to pre-process the data to lookup tables so entities can quickly look up their data.
|
||||
:return: ProcessedCoordinatorData
|
||||
"""
|
||||
|
||||
self._api.expire_cache()
|
||||
|
||||
try:
|
||||
await self._api.refresh_forecasts_coord(
|
||||
{
|
||||
"lat": self._location[ATTR_LATITUDE],
|
||||
"long": self._location[ATTR_LONGITUDE],
|
||||
}
|
||||
)
|
||||
|
||||
except IrmKmiApiError as err:
|
||||
if (
|
||||
self.last_update_success_time is not None
|
||||
and self.update_interval is not None
|
||||
and self.last_update_success_time - utcnow()
|
||||
< timedelta(seconds=2.5 * self.update_interval.seconds)
|
||||
):
|
||||
return self.data
|
||||
|
||||
_LOGGER.warning(
|
||||
"Could not connect to the API since %s", self.last_update_success_time
|
||||
)
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with API for general forecast: {err}. "
|
||||
f"Last success time is: {self.last_update_success_time}"
|
||||
) from err
|
||||
|
||||
if not self.last_update_success:
|
||||
_LOGGER.warning("Successfully reconnected to the API")
|
||||
|
||||
return await self.process_api_data()
|
||||
|
||||
async def process_api_data(self) -> ProcessedCoordinatorData:
|
||||
"""From the API data, create the object that will be used in the entities."""
|
||||
tz = await dt_util.async_get_time_zone("Europe/Brussels")
|
||||
lang = preferred_language(self.hass, self.config_entry)
|
||||
|
||||
return ProcessedCoordinatorData(
|
||||
current_weather=self._api.get_current_weather(tz),
|
||||
daily_forecast=self._api.get_daily_forecast(tz, lang),
|
||||
hourly_forecast=self._api.get_hourly_forecast(tz),
|
||||
country=self._api.get_country(),
|
||||
)
|
17
homeassistant/components/irm_kmi/data.py
Normal file
17
homeassistant/components/irm_kmi/data.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Define data classes for the IRM KMI integration."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from irm_kmi_api import CurrentWeatherData, ExtendedForecast
|
||||
|
||||
from homeassistant.components.weather import Forecast
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessedCoordinatorData:
|
||||
"""Dataclass that will be exposed to the entities consuming data from an IrmKmiCoordinator."""
|
||||
|
||||
current_weather: CurrentWeatherData
|
||||
country: str
|
||||
hourly_forecast: list[Forecast] = field(default_factory=list)
|
||||
daily_forecast: list[ExtendedForecast] = field(default_factory=list)
|
28
homeassistant/components/irm_kmi/entity.py
Normal file
28
homeassistant/components/irm_kmi/entity.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Base class shared among IRM KMI entities."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, IRM_KMI_NAME
|
||||
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
|
||||
from .utils import preferred_language
|
||||
|
||||
|
||||
class IrmKmiBaseEntity(CoordinatorEntity[IrmKmiCoordinator]):
|
||||
"""Base methods for IRM KMI entities."""
|
||||
|
||||
_attr_attribution = (
|
||||
"Weather data from the Royal Meteorological Institute of Belgium meteo.be"
|
||||
)
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, entry: IrmKmiConfigEntry) -> None:
|
||||
"""Init base properties for IRM KMI entities."""
|
||||
coordinator = entry.runtime_data
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
manufacturer=IRM_KMI_NAME.get(preferred_language(self.hass, entry)),
|
||||
)
|
13
homeassistant/components/irm_kmi/manifest.json
Normal file
13
homeassistant/components/irm_kmi/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "irm_kmi",
|
||||
"name": "IRM KMI Weather Belgium",
|
||||
"codeowners": ["@jdejaegh"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["zone"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/irm_kmi",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["irm_kmi_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["irm-kmi-api==1.1.0"]
|
||||
}
|
86
homeassistant/components/irm_kmi/quality_scale.yaml
Normal file
86
homeassistant/components/irm_kmi/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: >
|
||||
Polling interval is set to 7 minutes.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: >
|
||||
No service action implemented in this integration at the moment.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is no authentication for this integration
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: >
|
||||
The integration does not look for devices on the network. It uses an online API.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: >
|
||||
The integration does not look for devices on the network. It uses an online API.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: >
|
||||
This integration does not integrate physical devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is no configuration per se, just a zone to pick.
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
50
homeassistant/components/irm_kmi/strings.json
Normal file
50
homeassistant/components/irm_kmi/strings.json
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"title": "Royal Meteorological Institute of Belgium",
|
||||
"common": {
|
||||
"language_override_description": "Override the Home Assistant language for the textual weather forecast."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"api_error": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"data_description": {
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"out_of_benelux": "The location is outside of Benelux. Pick a location in Benelux."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"language_override": {
|
||||
"options": {
|
||||
"none": "Follow Home Assistant server language",
|
||||
"fr": "French",
|
||||
"nl": "Dutch",
|
||||
"de": "German",
|
||||
"en": "English"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options",
|
||||
"data": {
|
||||
"language_override": "[%key:common::config_flow::data::language%]"
|
||||
},
|
||||
"data_description": {
|
||||
"language_override": "[%key:component::irm_kmi::common::language_override_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user