mirror of
https://github.com/home-assistant/core.git
synced 2025-11-05 00:49:37 +00:00
Compare commits
42 Commits
fix-data-e
...
matter-err
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08eee8d479 | ||
|
|
6bbaae7235 | ||
|
|
86a5dff3f5 | ||
|
|
34e137005d | ||
|
|
67baa2c737 | ||
|
|
f9c504fcde | ||
|
|
4b25d04326 | ||
|
|
4e3eb44e69 | ||
|
|
6c84d25024 | ||
|
|
c4dc4135e1 | ||
|
|
78bbdf108b | ||
|
|
47397fd736 | ||
|
|
2b62d2d636 | ||
|
|
350f99baab | ||
|
|
1245385371 | ||
|
|
c86852eb21 | ||
|
|
ad635d2eff | ||
|
|
cf0e2b85dd | ||
|
|
b9e7f1c628 | ||
|
|
079d65acea | ||
|
|
162737a473 | ||
|
|
d074c5b7c8 | ||
|
|
d6ae0c142e | ||
|
|
58182a344d | ||
|
|
1a1f3d6b4e | ||
|
|
71589d212f | ||
|
|
9364a40fd2 | ||
|
|
7ead8f9154 | ||
|
|
09ac47b35f | ||
|
|
404393d6fe | ||
|
|
de5a26830d | ||
|
|
c0b0ce0c16 | ||
|
|
88e27d9017 | ||
|
|
a37ba6dba4 | ||
|
|
f38c0d510e | ||
|
|
be9fa9a606 | ||
|
|
2bc6e728a3 | ||
|
|
4e4a0d1e28 | ||
|
|
c860aa1531 | ||
|
|
39c73cbbbd | ||
|
|
ca7332f597 | ||
|
|
eafedeb12a |
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -364,13 +364,13 @@ jobs:
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
pre-commit run --hook-stage manual check-json --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual prettier --all-files
|
||||
pre-commit run --hook-stage manual prettier --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
@@ -378,7 +378,7 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
pre-commit run --hook-stage manual prettier --show-diff-on-failure --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
@@ -386,7 +386,7 @@ jobs:
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
|
||||
@@ -4,7 +4,6 @@ from copy import deepcopy
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from blinkpy.auth import Auth
|
||||
from blinkpy.blinkpy import Blink
|
||||
import voluptuous as vol
|
||||
@@ -18,7 +17,6 @@ from homeassistant.const import (
|
||||
CONF_SCAN_INTERVAL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -83,22 +81,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> boo
|
||||
session = async_get_clientsession(hass)
|
||||
blink = Blink(session=session)
|
||||
auth_data = deepcopy(dict(entry.data))
|
||||
blink.auth = Auth(auth_data, no_prompt=True, session=session)
|
||||
blink.auth = Auth(
|
||||
auth_data,
|
||||
no_prompt=True,
|
||||
session=session,
|
||||
callback=lambda: _async_update_entry_data(hass, entry, blink),
|
||||
)
|
||||
blink.refresh_rate = entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
coordinator = BlinkUpdateCoordinator(hass, entry, blink)
|
||||
|
||||
try:
|
||||
await blink.start()
|
||||
except (ClientError, TimeoutError) as ex:
|
||||
raise ConfigEntryNotReady("Can not connect to host") from ex
|
||||
|
||||
if blink.auth.check_key_required():
|
||||
_LOGGER.debug("Attempting a reauth flow")
|
||||
raise ConfigEntryAuthFailed("Need 2FA for Blink")
|
||||
|
||||
if not blink.available:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
@@ -108,6 +99,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> boo
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def _async_update_entry_data(
|
||||
hass: HomeAssistant, entry: BlinkConfigEntry, blink: Blink
|
||||
) -> None:
|
||||
"""Update the config entry data after token refresh."""
|
||||
hass.config_entries.async_update_entry(entry, data=blink.auth.login_attributes)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_import_options_from_data_if_missing(
|
||||
hass: HomeAssistant, entry: BlinkConfigEntry
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from blinkpy.auth import UnauthorizedError
|
||||
from blinkpy.blinkpy import Blink, BlinkSyncModule
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
@@ -13,7 +14,7 @@ from homeassistant.components.alarm_control_panel import (
|
||||
)
|
||||
from homeassistant.const import ATTR_ATTRIBUTION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -91,6 +92,9 @@ class BlinkSyncModuleHA(
|
||||
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError("Blink failed to disarm camera") from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -101,5 +105,8 @@ class BlinkSyncModuleHA(
|
||||
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError("Blink failed to arm camera away") from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -6,13 +6,19 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from blinkpy.auth import UnauthorizedError
|
||||
from blinkpy.camera import BlinkCamera as BlinkCameraAPI
|
||||
from requests.exceptions import ChunkedEncodingError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
HomeAssistantError,
|
||||
ServiceValidationError,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -71,7 +77,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, coordinator: BlinkUpdateCoordinator, name, camera) -> None:
|
||||
def __init__(
|
||||
self, coordinator: BlinkUpdateCoordinator, name, camera: BlinkCameraAPI
|
||||
) -> None:
|
||||
"""Initialize a camera."""
|
||||
super().__init__(coordinator)
|
||||
Camera.__init__(self)
|
||||
@@ -101,6 +109,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_arm",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
self._camera.motion_enabled = True
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -114,6 +125,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_disarm",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
self._camera.motion_enabled = False
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -137,6 +151,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_clip",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -149,6 +166,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_snap",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -182,6 +202,9 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cant_write",
|
||||
) from err
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
async def save_video(self, filename) -> None:
|
||||
"""Handle save video service calls."""
|
||||
@@ -200,3 +223,6 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cant_write",
|
||||
) from err
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
@@ -6,13 +6,18 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from blinkpy.auth import Auth, LoginError, TokenRefreshFailed
|
||||
from blinkpy.auth import Auth, BlinkTwoFARequiredError, LoginError, TokenRefreshFailed
|
||||
from blinkpy.blinkpy import Blink, BlinkSetupError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_PIN, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
@@ -21,23 +26,18 @@ from .const import DEVICE_ID, DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def validate_input(auth: Auth) -> None:
|
||||
async def validate_input(blink: Blink) -> None:
|
||||
"""Validate the user input allows us to connect."""
|
||||
try:
|
||||
await auth.startup()
|
||||
await blink.start()
|
||||
except (LoginError, TokenRefreshFailed) as err:
|
||||
raise InvalidAuth from err
|
||||
if auth.check_key_required():
|
||||
raise Require2FA
|
||||
|
||||
|
||||
async def _send_blink_2fa_pin(hass: HomeAssistant, auth: Auth, pin: str | None) -> bool:
|
||||
async def _send_blink_2fa_pin(blink: Blink, pin: str | None) -> bool:
|
||||
"""Send 2FA pin to blink servers."""
|
||||
blink = Blink(session=async_get_clientsession(hass))
|
||||
blink.auth = auth
|
||||
blink.setup_login_ids()
|
||||
blink.setup_urls()
|
||||
return await auth.send_auth_key(blink, pin)
|
||||
await blink.send_2fa_code(pin)
|
||||
return True
|
||||
|
||||
|
||||
class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -48,6 +48,23 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the blink flow."""
|
||||
self.auth: Auth | None = None
|
||||
self.blink: Blink | None = None
|
||||
|
||||
async def _handle_user_input(self, user_input: dict[str, Any]):
|
||||
"""Handle user input."""
|
||||
self.auth = Auth(
|
||||
{**user_input, "device_id": DEVICE_ID},
|
||||
no_prompt=True,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
self.blink = Blink(session=async_get_clientsession(self.hass))
|
||||
self.blink.auth = self.auth
|
||||
await self.async_set_unique_id(user_input[CONF_USERNAME])
|
||||
if self.source not in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
await validate_input(self.blink)
|
||||
return self._async_finish_flow()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -55,19 +72,9 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
self.auth = Auth(
|
||||
{**user_input, "device_id": DEVICE_ID},
|
||||
no_prompt=True,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
await self.async_set_unique_id(user_input[CONF_USERNAME])
|
||||
if self.source != SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
await validate_input(self.auth)
|
||||
return self._async_finish_flow()
|
||||
except Require2FA:
|
||||
return await self._handle_user_input(user_input)
|
||||
except BlinkTwoFARequiredError:
|
||||
return await self.async_step_2fa()
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
@@ -93,19 +100,16 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
valid_token = await _send_blink_2fa_pin(
|
||||
self.hass, self.auth, user_input.get(CONF_PIN)
|
||||
)
|
||||
await _send_blink_2fa_pin(self.blink, user_input.get(CONF_PIN))
|
||||
except BlinkSetupError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except TokenRefreshFailed:
|
||||
errors["base"] = "invalid_access_token"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
else:
|
||||
if valid_token:
|
||||
return self._async_finish_flow()
|
||||
errors["base"] = "invalid_access_token"
|
||||
return self._async_finish_flow()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="2fa",
|
||||
@@ -118,19 +122,89 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon migration of old entries."""
|
||||
return await self.async_step_user(dict(entry_data))
|
||||
"""Perform reauth after an authentication error."""
|
||||
return await self.async_step_reauth_confirm(None)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirmation."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._handle_user_input(user_input)
|
||||
except BlinkTwoFARequiredError:
|
||||
return await self.async_step_2fa()
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
config_entry = self._get_reauth_entry()
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=config_entry.data[CONF_USERNAME]
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_PASSWORD, default=config_entry.data[CONF_PASSWORD]
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"username": config_entry.data[CONF_USERNAME]},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration initiated by the user."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._handle_user_input(user_input)
|
||||
except BlinkTwoFARequiredError:
|
||||
return await self.async_step_2fa()
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
config_entry = self._get_reconfigure_entry()
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_USERNAME, default=config_entry.data[CONF_USERNAME]
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_PASSWORD, default=config_entry.data[CONF_PASSWORD]
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_finish_flow(self) -> ConfigFlowResult:
|
||||
"""Finish with setup."""
|
||||
assert self.auth
|
||||
|
||||
if self.source in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry()
|
||||
if self.source == SOURCE_REAUTH
|
||||
else self._get_reconfigure_entry(),
|
||||
data_updates=self.auth.login_attributes,
|
||||
)
|
||||
|
||||
return self.async_create_entry(title=DOMAIN, data=self.auth.login_attributes)
|
||||
|
||||
|
||||
class Require2FA(HomeAssistantError):
|
||||
"""Error to indicate we require 2FA."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
@@ -6,10 +6,17 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from blinkpy.auth import BlinkTwoFARequiredError, UnauthorizedError
|
||||
from blinkpy.blinkpy import Blink
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -38,6 +45,23 @@ class BlinkUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_setup(self):
|
||||
"""Set up the coordinator."""
|
||||
try:
|
||||
await self.api.start()
|
||||
except (ClientError, TimeoutError) as ex:
|
||||
raise ConfigEntryNotReady("Can not connect to host") from ex
|
||||
except (BlinkTwoFARequiredError, UnauthorizedError) as ex:
|
||||
raise ConfigEntryAuthFailed("Required Blink re-authentication") from ex
|
||||
except Exception as ex:
|
||||
raise ConfigEntryError("Unknown error connecting to Blink") from ex
|
||||
|
||||
if not self.api.available:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Async update wrapper."""
|
||||
return await self.api.refresh(force=True)
|
||||
try:
|
||||
return await self.api.refresh(force=True)
|
||||
except UnauthorizedError as ex:
|
||||
raise ConfigEntryAuthFailed("Blink API authorization failed") from ex
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["blinkpy"],
|
||||
"requirements": ["blinkpy==0.23.0"]
|
||||
"requirements": ["blinkpy==0.24.1"]
|
||||
}
|
||||
|
||||
@@ -4,14 +4,12 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_PIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
|
||||
from .const import DOMAIN, SERVICE_SEND_PIN
|
||||
from .coordinator import BlinkConfigEntry
|
||||
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -23,25 +21,25 @@ SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
|
||||
async def _send_pin(call: ServiceCall) -> None:
|
||||
"""Call blink to send new pin."""
|
||||
config_entry: BlinkConfigEntry | None
|
||||
for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]:
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
await coordinator.api.auth.send_auth_key(
|
||||
coordinator.api,
|
||||
call.data[CONF_PIN],
|
||||
)
|
||||
# Create repair issue to inform user about service removal
|
||||
ir.async_create_issue(
|
||||
call.hass,
|
||||
DOMAIN,
|
||||
"service_send_pin_deprecation",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
breaks_in_ha_version="2026.5.0",
|
||||
translation_key="service_send_pin_deprecation",
|
||||
translation_placeholders={"service_name": f"{DOMAIN}.{SERVICE_SEND_PIN}"},
|
||||
)
|
||||
|
||||
# Service has been removed - raise exception
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_removed",
|
||||
translation_placeholders={"service_name": f"{DOMAIN}.{SERVICE_SEND_PIN}"},
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -17,6 +18,14 @@
|
||||
"description": "Enter the PIN sent via email or SMS",
|
||||
"title": "Two-factor authentication"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"description": "The credentials for {username} need to be updated",
|
||||
"title": "Re-authenticate Blink"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
@@ -73,6 +82,9 @@
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"service_removed": {
|
||||
"message": "The service {service_name} has been removed and is no longer needed. Home Assistant will automatically prompt for reauthentication when required."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
@@ -86,6 +98,10 @@
|
||||
}
|
||||
},
|
||||
"title": "Blink update service is being removed"
|
||||
},
|
||||
"service_send_pin_deprecation": {
|
||||
"description": "The service {service_name} has been removed and is no longer needed. When a new two-factor authentication code is required, Home Assistant will automatically prompt you to reauthenticate through the integration configuration. Please remove any automations or scripts that call this service.",
|
||||
"title": "Blink send PIN service has been removed"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
||||
@@ -4,13 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from blinkpy.auth import UnauthorizedError
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -77,6 +79,9 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_arm_motion",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -90,6 +95,9 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_disarm_motion",
|
||||
) from er
|
||||
except UnauthorizedError as er:
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
raise ConfigEntryAuthFailed("Blink authorization failed") from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==2.44.5",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
|
||||
@@ -30,7 +30,7 @@ from homeassistant.exceptions import (
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_PASSKEY, DOMAIN
|
||||
from .coordinator import BSBLanUpdateCoordinator
|
||||
from .coordinator import BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
@@ -41,7 +41,8 @@ type BSBLanConfigEntry = ConfigEntry[BSBLanData]
|
||||
class BSBLanData:
|
||||
"""BSBLan data stored in the Home Assistant data object."""
|
||||
|
||||
coordinator: BSBLanUpdateCoordinator
|
||||
fast_coordinator: BSBLanFastCoordinator
|
||||
slow_coordinator: BSBLanSlowCoordinator
|
||||
client: BSBLAN
|
||||
device: Device
|
||||
info: Info
|
||||
@@ -64,12 +65,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
session = async_get_clientsession(hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
|
||||
# Create and perform first refresh of the coordinator
|
||||
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
try:
|
||||
# Fetch all required data sequentially
|
||||
# Initialize the client first - this sets up internal caches and validates the connection
|
||||
await bsblan.initialize()
|
||||
# Fetch all required device metadata
|
||||
device = await bsblan.device()
|
||||
info = await bsblan.info()
|
||||
static = await bsblan.static_values()
|
||||
@@ -84,15 +83,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_auth_error",
|
||||
) from err
|
||||
except TimeoutError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_connection_error",
|
||||
translation_placeholders={"host": entry.data[CONF_HOST]},
|
||||
) from err
|
||||
except BSBLANError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_general_error",
|
||||
) from err
|
||||
|
||||
# Create coordinators with the already-initialized client
|
||||
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
|
||||
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
|
||||
|
||||
# Perform first refresh of both coordinators
|
||||
await fast_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Try to refresh slow coordinator, but don't fail if DHW is not available
|
||||
# This allows the integration to work even if the device doesn't support DHW
|
||||
await slow_coordinator.async_refresh()
|
||||
|
||||
entry.runtime_data = BSBLanData(
|
||||
client=bsblan,
|
||||
coordinator=coordinator,
|
||||
fast_coordinator=fast_coordinator,
|
||||
slow_coordinator=slow_coordinator,
|
||||
device=device,
|
||||
info=info,
|
||||
static=static,
|
||||
|
||||
@@ -71,12 +71,12 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
data: BSBLanData,
|
||||
) -> None:
|
||||
"""Initialize BSBLAN climate device."""
|
||||
super().__init__(data.coordinator, data)
|
||||
super().__init__(data.fast_coordinator, data)
|
||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||
|
||||
self._attr_min_temp = data.static.min_temp.value
|
||||
self._attr_max_temp = data.static.max_temp.value
|
||||
self._attr_temperature_unit = data.coordinator.client.get_temperature_unit
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
|
||||
@@ -180,10 +180,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirmation flow."""
|
||||
existing_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
assert existing_entry
|
||||
existing_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is None:
|
||||
# Preserve existing values as defaults
|
||||
|
||||
@@ -10,7 +10,9 @@ from typing import Final
|
||||
DOMAIN: Final = "bsblan"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
SCAN_INTERVAL = timedelta(seconds=12)
|
||||
SCAN_INTERVAL = timedelta(seconds=12) # Legacy interval, kept for compatibility
|
||||
SCAN_INTERVAL_FAST = timedelta(seconds=12) # For state/sensor data
|
||||
SCAN_INTERVAL_SLOW = timedelta(minutes=5) # For config data
|
||||
|
||||
# Services
|
||||
DATA_BSBLAN_CLIENT: Final = "bsblan_client"
|
||||
|
||||
@@ -8,6 +8,8 @@ from bsblan import (
|
||||
BSBLAN,
|
||||
BSBLANAuthError,
|
||||
BSBLANConnectionError,
|
||||
HotWaterConfig,
|
||||
HotWaterSchedule,
|
||||
HotWaterState,
|
||||
Sensor,
|
||||
State,
|
||||
@@ -19,20 +21,28 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL_FAST, SCAN_INTERVAL_SLOW
|
||||
|
||||
|
||||
@dataclass
|
||||
class BSBLanCoordinatorData:
|
||||
"""BSBLan data stored in the Home Assistant data object."""
|
||||
class BSBLanFastData:
|
||||
"""BSBLan fast-polling data."""
|
||||
|
||||
state: State
|
||||
sensor: Sensor
|
||||
dhw: HotWaterState
|
||||
|
||||
|
||||
class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
|
||||
"""The BSB-Lan update coordinator."""
|
||||
@dataclass
|
||||
class BSBLanSlowData:
|
||||
"""BSBLan slow-polling data."""
|
||||
|
||||
dhw_config: HotWaterConfig | None = None
|
||||
dhw_schedule: HotWaterSchedule | None = None
|
||||
|
||||
|
||||
class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
|
||||
"""Base BSB-Lan coordinator."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
@@ -41,44 +51,122 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
client: BSBLAN,
|
||||
name: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize the BSB-Lan coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=self._get_update_interval(),
|
||||
name=name,
|
||||
update_interval=update_interval,
|
||||
)
|
||||
self.client = client
|
||||
|
||||
|
||||
class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
"""The BSB-Lan fast update coordinator for frequently changing data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
client: BSBLAN,
|
||||
) -> None:
|
||||
"""Initialize the BSB-Lan fast coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
client,
|
||||
name=f"{DOMAIN}_fast_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=self._get_update_interval(),
|
||||
)
|
||||
|
||||
def _get_update_interval(self) -> timedelta:
|
||||
"""Get the update interval with a random offset.
|
||||
|
||||
Use the default scan interval and add a random number of seconds to avoid timeouts when
|
||||
Add a random number of seconds to avoid timeouts when
|
||||
the BSB-Lan device is already/still busy retrieving data,
|
||||
e.g. for MQTT or internal logging.
|
||||
"""
|
||||
return SCAN_INTERVAL + timedelta(seconds=randint(1, 8))
|
||||
return SCAN_INTERVAL_FAST + timedelta(seconds=randint(1, 8))
|
||||
|
||||
async def _async_update_data(self) -> BSBLanCoordinatorData:
|
||||
"""Get state and sensor data from BSB-Lan device."""
|
||||
async def _async_update_data(self) -> BSBLanFastData:
|
||||
"""Fetch fast-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# initialize the client, this is cached and will only be called once
|
||||
await self.client.initialize()
|
||||
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Fetch fast-changing data (state, sensor, DHW state)
|
||||
state = await self.client.state()
|
||||
sensor = await self.client.sensor()
|
||||
dhw = await self.client.hot_water_state()
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication failed for BSB-Lan device"
|
||||
) from err
|
||||
except BSBLANConnectionError as err:
|
||||
host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown"
|
||||
host = self.config_entry.data[CONF_HOST]
|
||||
raise UpdateFailed(
|
||||
f"Error while establishing connection with BSB-Lan device at {host}"
|
||||
) from err
|
||||
|
||||
# Update the interval with random jitter for next update
|
||||
self.update_interval = self._get_update_interval()
|
||||
return BSBLanCoordinatorData(state=state, sensor=sensor, dhw=dhw)
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
dhw=dhw,
|
||||
)
|
||||
|
||||
|
||||
class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
"""The BSB-Lan slow update coordinator for infrequently changing data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
client: BSBLAN,
|
||||
) -> None:
|
||||
"""Initialize the BSB-Lan slow coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
client,
|
||||
name=f"{DOMAIN}_slow_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=SCAN_INTERVAL_SLOW,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> BSBLanSlowData:
|
||||
"""Fetch slow-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Fetch slow-changing configuration data
|
||||
dhw_config = await self.client.hot_water_config()
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
# Device does not support DHW functionality
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
return BSBLanSlowData()
|
||||
except (BSBLANConnectionError, BSBLANAuthError) as err:
|
||||
# If config update fails, keep existing data
|
||||
LOGGER.debug(
|
||||
"Failed to fetch DHW config from %s: %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
err,
|
||||
)
|
||||
if self.data:
|
||||
return self.data
|
||||
# First fetch failed, return empty data
|
||||
return BSBLanSlowData()
|
||||
|
||||
return BSBLanSlowData(
|
||||
dhw_config=dhw_config,
|
||||
dhw_schedule=dhw_schedule,
|
||||
)
|
||||
|
||||
@@ -15,12 +15,28 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
data = entry.runtime_data
|
||||
|
||||
return {
|
||||
# Build diagnostic data from both coordinators
|
||||
diagnostics = {
|
||||
"info": data.info.to_dict(),
|
||||
"device": data.device.to_dict(),
|
||||
"coordinator_data": {
|
||||
"state": data.coordinator.data.state.to_dict(),
|
||||
"sensor": data.coordinator.data.sensor.to_dict(),
|
||||
"fast_coordinator_data": {
|
||||
"state": data.fast_coordinator.data.state.to_dict(),
|
||||
"sensor": data.fast_coordinator.data.sensor.to_dict(),
|
||||
"dhw": data.fast_coordinator.data.dhw.to_dict(),
|
||||
},
|
||||
"static": data.static.to_dict(),
|
||||
}
|
||||
|
||||
# Add DHW config and schedule from slow coordinator if available
|
||||
if data.slow_coordinator.data:
|
||||
slow_data = {}
|
||||
if data.slow_coordinator.data.dhw_config:
|
||||
slow_data["dhw_config"] = data.slow_coordinator.data.dhw_config.to_dict()
|
||||
if data.slow_coordinator.data.dhw_schedule:
|
||||
slow_data["dhw_schedule"] = (
|
||||
data.slow_coordinator.data.dhw_schedule.to_dict()
|
||||
)
|
||||
if slow_data:
|
||||
diagnostics["slow_coordinator_data"] = slow_data
|
||||
|
||||
return diagnostics
|
||||
|
||||
@@ -11,17 +11,17 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import BSBLanData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BSBLanUpdateCoordinator
|
||||
from .coordinator import BSBLanCoordinator, BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
|
||||
|
||||
class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]):
|
||||
"""Defines a base BSBLan entity."""
|
||||
class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
"""Base BSBLan entity with common device info setup."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity."""
|
||||
super().__init__(coordinator, data)
|
||||
def __init__(self, coordinator: _T, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity with device info."""
|
||||
super().__init__(coordinator)
|
||||
host = coordinator.config_entry.data["host"]
|
||||
mac = data.device.MAC
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -33,3 +33,33 @@ class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]):
|
||||
sw_version=data.device.version,
|
||||
configuration_url=f"http://{host}",
|
||||
)
|
||||
|
||||
|
||||
class BSBLanEntity(BSBLanEntityBase[BSBLanFastCoordinator]):
|
||||
"""Defines a base BSBLan entity using the fast coordinator."""
|
||||
|
||||
def __init__(self, coordinator: BSBLanFastCoordinator, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity."""
|
||||
super().__init__(coordinator, data)
|
||||
|
||||
|
||||
class BSBLanDualCoordinatorEntity(BSBLanEntity):
|
||||
"""Entity that listens to both fast and slow coordinators."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fast_coordinator: BSBLanFastCoordinator,
|
||||
slow_coordinator: BSBLanSlowCoordinator,
|
||||
data: BSBLanData,
|
||||
) -> None:
|
||||
"""Initialize BSBLan entity with both coordinators."""
|
||||
super().__init__(fast_coordinator, data)
|
||||
self.slow_coordinator = slow_coordinator
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
# Also listen to slow coordinator updates
|
||||
self.async_on_remove(
|
||||
self.slow_coordinator.async_add_listener(self._handle_coordinator_update)
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==2.1.0"],
|
||||
"requirements": ["python-bsblan==3.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import BSBLanConfigEntry, BSBLanData
|
||||
from .coordinator import BSBLanCoordinatorData
|
||||
from .coordinator import BSBLanFastData
|
||||
from .entity import BSBLanEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -27,8 +27,8 @@ PARALLEL_UPDATES = 1
|
||||
class BSBLanSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes BSB-Lan sensor entity."""
|
||||
|
||||
value_fn: Callable[[BSBLanCoordinatorData], StateType]
|
||||
exists_fn: Callable[[BSBLanCoordinatorData], bool] = lambda data: True
|
||||
value_fn: Callable[[BSBLanFastData], StateType]
|
||||
exists_fn: Callable[[BSBLanFastData], bool] = lambda data: True
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[BSBLanSensorEntityDescription, ...] = (
|
||||
@@ -73,7 +73,7 @@ async def async_setup_entry(
|
||||
entities = [
|
||||
BSBLanSensor(data, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.exists_fn(data.coordinator.data)
|
||||
if description.exists_fn(data.fast_coordinator.data)
|
||||
]
|
||||
|
||||
if entities:
|
||||
@@ -91,10 +91,10 @@ class BSBLanSensor(BSBLanEntity, SensorEntity):
|
||||
description: BSBLanSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize BSB-Lan sensor."""
|
||||
super().__init__(data.coordinator, data)
|
||||
super().__init__(data.fast_coordinator, data)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{data.device.MAC}-{description.key}"
|
||||
self._attr_temperature_unit = data.coordinator.client.get_temperature_unit
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BSBLanConfigEntry, BSBLanData
|
||||
from .const import DOMAIN
|
||||
from .entity import BSBLanEntity
|
||||
from .entity import BSBLanDualCoordinatorEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -44,7 +44,7 @@ async def async_setup_entry(
|
||||
|
||||
# Only create water heater entity if DHW (Domestic Hot Water) is available
|
||||
# Check if we have any DHW-related data indicating water heater support
|
||||
dhw_data = data.coordinator.data.dhw
|
||||
dhw_data = data.fast_coordinator.data.dhw
|
||||
if (
|
||||
dhw_data.operating_mode is None
|
||||
and dhw_data.nominal_setpoint is None
|
||||
@@ -56,7 +56,7 @@ async def async_setup_entry(
|
||||
async_add_entities([BSBLANWaterHeater(data)])
|
||||
|
||||
|
||||
class BSBLANWaterHeater(BSBLanEntity, WaterHeaterEntity):
|
||||
class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
"""Defines a BSBLAN water heater entity."""
|
||||
|
||||
_attr_name = None
|
||||
@@ -67,16 +67,43 @@ class BSBLANWaterHeater(BSBLanEntity, WaterHeaterEntity):
|
||||
|
||||
def __init__(self, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLAN water heater."""
|
||||
super().__init__(data.coordinator, data)
|
||||
super().__init__(data.fast_coordinator, data.slow_coordinator, data)
|
||||
self._attr_unique_id = format_mac(data.device.MAC)
|
||||
self._attr_operation_list = list(OPERATION_MODES_REVERSE.keys())
|
||||
|
||||
# Set temperature limits based on device capabilities
|
||||
self._attr_temperature_unit = data.coordinator.client.get_temperature_unit
|
||||
if data.coordinator.data.dhw.reduced_setpoint is not None:
|
||||
self._attr_min_temp = data.coordinator.data.dhw.reduced_setpoint.value
|
||||
if data.coordinator.data.dhw.nominal_setpoint_max is not None:
|
||||
self._attr_max_temp = data.coordinator.data.dhw.nominal_setpoint_max.value
|
||||
# Set temperature unit
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
# Initialize available attribute to resolve multiple inheritance conflict
|
||||
self._attr_available = True
|
||||
|
||||
# Set temperature limits based on device capabilities from slow coordinator
|
||||
# For min_temp: Use reduced_setpoint from config data (slow polling)
|
||||
if (
|
||||
data.slow_coordinator.data
|
||||
and data.slow_coordinator.data.dhw_config is not None
|
||||
and data.slow_coordinator.data.dhw_config.reduced_setpoint is not None
|
||||
and hasattr(data.slow_coordinator.data.dhw_config.reduced_setpoint, "value")
|
||||
):
|
||||
self._attr_min_temp = float(
|
||||
data.slow_coordinator.data.dhw_config.reduced_setpoint.value
|
||||
)
|
||||
else:
|
||||
self._attr_min_temp = 10.0 # Default minimum
|
||||
|
||||
# For max_temp: Use nominal_setpoint_max from config data (slow polling)
|
||||
if (
|
||||
data.slow_coordinator.data
|
||||
and data.slow_coordinator.data.dhw_config is not None
|
||||
and data.slow_coordinator.data.dhw_config.nominal_setpoint_max is not None
|
||||
and hasattr(
|
||||
data.slow_coordinator.data.dhw_config.nominal_setpoint_max, "value"
|
||||
)
|
||||
):
|
||||
self._attr_max_temp = float(
|
||||
data.slow_coordinator.data.dhw_config.nominal_setpoint_max.value
|
||||
)
|
||||
else:
|
||||
self._attr_max_temp = 65.0 # Default maximum
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
|
||||
@@ -53,6 +53,7 @@ from .const import (
|
||||
CONF_ACME_SERVER,
|
||||
CONF_ALEXA,
|
||||
CONF_ALIASES,
|
||||
CONF_API_SERVER,
|
||||
CONF_COGNITO_CLIENT_ID,
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
@@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
|
||||
vol.Optional(CONF_ACCOUNTS_SERVER): str,
|
||||
vol.Optional(CONF_ACME_SERVER): str,
|
||||
vol.Optional(CONF_API_SERVER): str,
|
||||
vol.Optional(CONF_RELAYER_SERVER): str,
|
||||
vol.Optional(CONF_REMOTESTATE_SERVER): str,
|
||||
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,
|
||||
|
||||
@@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id"
|
||||
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||
CONF_ACME_SERVER = "acme_server"
|
||||
CONF_API_SERVER = "api_server"
|
||||
CONF_RELAYER_SERVER = "relayer_server"
|
||||
CONF_REMOTESTATE_SERVER = "remotestate_server"
|
||||
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"
|
||||
|
||||
@@ -1242,10 +1242,14 @@ class DefaultAgent(ConversationEntity):
|
||||
intent_name: {
|
||||
combo_key: [
|
||||
SlotCombinationInfo(
|
||||
name_domains=(set(name_domains) if name_domains else None)
|
||||
name_domains=(
|
||||
set(combo_info.name_domains)
|
||||
if combo_info.name_domains
|
||||
else None
|
||||
)
|
||||
)
|
||||
]
|
||||
for combo_key, name_domains in intent_combos.items()
|
||||
for combo_key, combo_info in intent_combos.items()
|
||||
}
|
||||
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.3.0", "home-assistant-intents==2025.10.1"]
|
||||
"requirements": ["hassil==3.3.0", "home-assistant-intents==2025.10.28"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/denonavr",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["denonavr"],
|
||||
"requirements": ["denonavr==1.1.2"],
|
||||
"requirements": ["denonavr==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -274,7 +274,7 @@ class DenonDevice(MediaPlayerEntity):
|
||||
and MediaPlayerEntityFeature.SELECT_SOUND_MODE
|
||||
)
|
||||
|
||||
async def _telnet_callback(self, zone: str, event: str, parameter: str) -> None:
|
||||
def _telnet_callback(self, zone: str, event: str, parameter: str) -> None:
|
||||
"""Process a telnet command callback."""
|
||||
# There are multiple checks implemented which reduce unnecessary updates of the ha state machine
|
||||
if zone not in (self._receiver.zone, ALL_ZONES):
|
||||
|
||||
@@ -151,14 +151,14 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
key="RAIN_COUNT_MM",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.2.0",
|
||||
"aioesphomeapi==42.4.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -15,7 +15,7 @@ from .const import DEFAULT_INTERVAL, DOMAIN, LOGGER
|
||||
type FastdotcomConfigEntry = ConfigEntry[FastdotcomDataUpdateCoordinator]
|
||||
|
||||
|
||||
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
|
||||
class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float] | None]):
|
||||
"""Class to manage fetching Fast.com data API."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: FastdotcomConfigEntry) -> None:
|
||||
@@ -28,7 +28,7 @@ class FastdotcomDataUpdateCoordinator(DataUpdateCoordinator[float]):
|
||||
update_interval=timedelta(hours=DEFAULT_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> float:
|
||||
async def _async_update_data(self) -> dict[str, float] | None:
|
||||
"""Run an executor job to retrieve Fast.com data."""
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(fast_com)
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fastdotcom"],
|
||||
"requirements": ["fastdotcom==0.0.3"],
|
||||
"requirements": ["fastdotcom==0.0.6"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -51,6 +51,8 @@ class SpeedtestSensor(CoordinatorEntity[FastdotcomDataUpdateCoordinator], Sensor
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> float:
|
||||
) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.coordinator.data
|
||||
if self.coordinator.data is None:
|
||||
return None
|
||||
return self.coordinator.data.get("download_speed")
|
||||
|
||||
@@ -13,6 +13,7 @@ import struct
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import GreenOptions, YellowOptions # noqa: F401
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
@@ -123,11 +124,6 @@ from .discovery import async_setup_discovery_view
|
||||
from .handler import ( # noqa: F401
|
||||
HassIO,
|
||||
HassioAPIError,
|
||||
async_create_backup,
|
||||
async_get_green_settings,
|
||||
async_get_yellow_settings,
|
||||
async_set_green_settings,
|
||||
async_set_yellow_settings,
|
||||
async_update_diagnostics,
|
||||
get_supervisor_client,
|
||||
)
|
||||
|
||||
@@ -15,13 +15,14 @@ from aiohasupervisor.models import (
|
||||
AddonsOptions,
|
||||
AddonState as SupervisorAddonState,
|
||||
InstalledAddonComplete,
|
||||
PartialBackupOptions,
|
||||
StoreAddonUpdate,
|
||||
)
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .handler import HassioAPIError, async_create_backup, get_supervisor_client
|
||||
from .handler import HassioAPIError, get_supervisor_client
|
||||
|
||||
type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Awaitable[_R]]
|
||||
type _ReturnFuncType[_T, **_P, _R] = Callable[
|
||||
@@ -261,17 +262,18 @@ class AddonManager:
|
||||
"""Stop the managed add-on."""
|
||||
await self._supervisor_client.addons.stop_addon(self.addon_slug)
|
||||
|
||||
@api_error("Failed to create a backup of the {addon_name} add-on")
|
||||
@api_error(
|
||||
"Failed to create a backup of the {addon_name} add-on",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_create_backup(self) -> None:
|
||||
"""Create a partial backup of the managed add-on."""
|
||||
addon_info = await self.async_get_addon_info()
|
||||
name = f"addon_{self.addon_slug}_{addon_info.version}"
|
||||
|
||||
self._logger.debug("Creating backup: %s", name)
|
||||
await async_create_backup(
|
||||
self._hass,
|
||||
{"name": name, "addons": [self.addon_slug]},
|
||||
partial=True,
|
||||
await self._supervisor_client.backups.partial_backup(
|
||||
PartialBackupOptions(name=name, addons={self.addon_slug})
|
||||
)
|
||||
|
||||
async def async_configure_addon(
|
||||
|
||||
@@ -10,6 +10,7 @@ import os
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorClient
|
||||
from aiohasupervisor.models import SupervisorOptions
|
||||
import aiohttp
|
||||
from yarl import URL
|
||||
|
||||
@@ -22,7 +23,6 @@ from homeassistant.components.http import (
|
||||
from homeassistant.const import SERVER_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .const import ATTR_MESSAGE, ATTR_RESULT, DATA_COMPONENT, X_HASS_SOURCE
|
||||
|
||||
@@ -66,73 +66,6 @@ def api_data[**_P](
|
||||
return _wrapper
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bool:
|
||||
"""Update Supervisor diagnostics toggle.
|
||||
|
||||
The caller of the function should handle HassioAPIError.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.update_diagnostics(diagnostics)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@api_data
|
||||
async def async_create_backup(
|
||||
hass: HomeAssistant, payload: dict, partial: bool = False
|
||||
) -> dict:
|
||||
"""Create a full or partial backup.
|
||||
|
||||
The caller of the function should handle HassioAPIError.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
backup_type = "partial" if partial else "full"
|
||||
command = f"/backups/new/{backup_type}"
|
||||
return await hassio.send_command(command, payload=payload, timeout=None)
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]:
|
||||
"""Return settings specific to Home Assistant Green."""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command("/os/boards/green", method="get")
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_set_green_settings(
|
||||
hass: HomeAssistant, settings: dict[str, bool]
|
||||
) -> dict:
|
||||
"""Set settings specific to Home Assistant Green.
|
||||
|
||||
Returns an empty dict.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command(
|
||||
"/os/boards/green", method="post", payload=settings
|
||||
)
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_get_yellow_settings(hass: HomeAssistant) -> dict[str, bool]:
|
||||
"""Return settings specific to Home Assistant Yellow."""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command("/os/boards/yellow", method="get")
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_set_yellow_settings(
|
||||
hass: HomeAssistant, settings: dict[str, bool]
|
||||
) -> dict:
|
||||
"""Set settings specific to Home Assistant Yellow.
|
||||
|
||||
Returns an empty dict.
|
||||
"""
|
||||
hassio = hass.data[DATA_COMPONENT]
|
||||
return await hassio.send_command(
|
||||
"/os/boards/yellow", method="post", payload=settings
|
||||
)
|
||||
|
||||
|
||||
class HassIO:
|
||||
"""Small API wrapper for Hass.io."""
|
||||
|
||||
@@ -257,16 +190,6 @@ class HassIO:
|
||||
"/supervisor/options", payload={"timezone": timezone, "country": country}
|
||||
)
|
||||
|
||||
@_api_bool
|
||||
def update_diagnostics(self, diagnostics: bool) -> Coroutine:
|
||||
"""Update Supervisor diagnostics setting.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command(
|
||||
"/supervisor/options", payload={"diagnostics": diagnostics}
|
||||
)
|
||||
|
||||
async def send_command(
|
||||
self,
|
||||
command: str,
|
||||
@@ -341,3 +264,13 @@ def get_supervisor_client(hass: HomeAssistant) -> SupervisorClient:
|
||||
os.environ.get("SUPERVISOR_TOKEN", ""),
|
||||
session=hassio.websession,
|
||||
)
|
||||
|
||||
|
||||
async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> None:
|
||||
"""Update Supervisor diagnostics toggle.
|
||||
|
||||
The caller of the function should handle SupervisorError.
|
||||
"""
|
||||
await get_supervisor_client(hass).supervisor.set_options(
|
||||
SupervisorOptions(diagnostics=diagnostics)
|
||||
)
|
||||
|
||||
@@ -81,6 +81,7 @@ set_program_and_options:
|
||||
- dishcare_dishwasher_program_steam_fresh
|
||||
- dishcare_dishwasher_program_maximum_cleaning
|
||||
- dishcare_dishwasher_program_mixed_load
|
||||
- dishcare_dishwasher_program_learning_dishwasher
|
||||
- laundry_care_dryer_program_cotton
|
||||
- laundry_care_dryer_program_synthetic
|
||||
- laundry_care_dryer_program_mix
|
||||
|
||||
@@ -268,6 +268,7 @@
|
||||
"dishcare_dishwasher_program_intensiv_70": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_intensiv_70%]",
|
||||
"dishcare_dishwasher_program_intensiv_power": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_intensiv_power%]",
|
||||
"dishcare_dishwasher_program_kurz_60": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_kurz_60%]",
|
||||
"dishcare_dishwasher_program_learning_dishwasher": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_learning_dishwasher%]",
|
||||
"dishcare_dishwasher_program_machine_care": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_machine_care%]",
|
||||
"dishcare_dishwasher_program_magic_daily": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_magic_daily%]",
|
||||
"dishcare_dishwasher_program_maximum_cleaning": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_maximum_cleaning%]",
|
||||
@@ -590,6 +591,7 @@
|
||||
"dishcare_dishwasher_program_intensiv_70": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_intensiv_70%]",
|
||||
"dishcare_dishwasher_program_intensiv_power": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_intensiv_power%]",
|
||||
"dishcare_dishwasher_program_kurz_60": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_kurz_60%]",
|
||||
"dishcare_dishwasher_program_learning_dishwasher": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_learning_dishwasher%]",
|
||||
"dishcare_dishwasher_program_machine_care": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_machine_care%]",
|
||||
"dishcare_dishwasher_program_magic_daily": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_magic_daily%]",
|
||||
"dishcare_dishwasher_program_maximum_cleaning": "[%key:component::home_connect::selector::programs::options::dishcare_dishwasher_program_maximum_cleaning%]",
|
||||
@@ -1537,6 +1539,7 @@
|
||||
"dishcare_dishwasher_program_intensiv_70": "Intensive 70ºC",
|
||||
"dishcare_dishwasher_program_intensiv_power": "Intensive power",
|
||||
"dishcare_dishwasher_program_kurz_60": "Speed 60ºC",
|
||||
"dishcare_dishwasher_program_learning_dishwasher": "Intelligent",
|
||||
"dishcare_dishwasher_program_machine_care": "Machine care",
|
||||
"dishcare_dishwasher_program_magic_daily": "Magic daily",
|
||||
"dishcare_dishwasher_program_maximum_cleaning": "Maximum cleaning",
|
||||
|
||||
@@ -6,13 +6,12 @@ import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import (
|
||||
HassioAPIError,
|
||||
async_get_green_settings,
|
||||
async_set_green_settings,
|
||||
GreenOptions,
|
||||
SupervisorError,
|
||||
get_supervisor_client,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
@@ -20,7 +19,7 @@ from homeassistant.config_entries import (
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
@@ -49,7 +48,7 @@ class HomeAssistantGreenConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeAssistantGreenOptionsFlow:
|
||||
"""Return the options flow."""
|
||||
return HomeAssistantGreenOptionsFlow()
|
||||
return HomeAssistantGreenOptionsFlow(async_get_hass())
|
||||
|
||||
async def async_step_system(
|
||||
self, data: dict[str, Any] | None = None
|
||||
@@ -63,6 +62,11 @@ class HomeAssistantGreenOptionsFlow(OptionsFlow):
|
||||
|
||||
_hw_settings: dict[str, bool] | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -76,27 +80,27 @@ class HomeAssistantGreenOptionsFlow(OptionsFlow):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle hardware settings."""
|
||||
|
||||
if user_input is not None:
|
||||
if self._hw_settings == user_input:
|
||||
return self.async_create_entry(data={})
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
await async_set_green_settings(self.hass, user_input)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
await self._supervisor_client.os.set_green_options(
|
||||
GreenOptions.from_dict(user_input)
|
||||
)
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to write hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="write_hw_settings_error")
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
self._hw_settings: dict[str, bool] = await async_get_green_settings(
|
||||
self.hass
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
green_info = await self._supervisor_client.os.green_info()
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to read hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="read_hw_settings_error")
|
||||
|
||||
self._hw_settings: dict[str, bool] = green_info.to_dict()
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
STEP_HW_SETTINGS_SCHEMA, self._hw_settings
|
||||
)
|
||||
|
||||
@@ -7,13 +7,11 @@ import asyncio
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Protocol, final
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import (
|
||||
HassioAPIError,
|
||||
async_get_yellow_settings,
|
||||
async_set_yellow_settings,
|
||||
SupervisorError,
|
||||
YellowOptions,
|
||||
get_supervisor_client,
|
||||
)
|
||||
from homeassistant.components.homeassistant_hardware.firmware_config_flow import (
|
||||
@@ -222,21 +220,22 @@ class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC):
|
||||
return self.async_create_entry(data={})
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
await async_set_yellow_settings(self.hass, user_input)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
await self._supervisor_client.os.set_yellow_options(
|
||||
YellowOptions.from_dict(user_input)
|
||||
)
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to write hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="write_hw_settings_error")
|
||||
return await self.async_step_reboot_menu()
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
self._hw_settings: dict[str, bool] = await async_get_yellow_settings(
|
||||
self.hass
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError, HassioAPIError) as err:
|
||||
yellow_info = await self._supervisor_client.os.yellow_info()
|
||||
except (TimeoutError, SupervisorError) as err:
|
||||
_LOGGER.warning("Failed to read hardware settings", exc_info=err)
|
||||
return self.async_abort(reason="read_hw_settings_error")
|
||||
|
||||
self._hw_settings: dict[str, bool] = yellow_info.to_dict()
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
STEP_HW_SETTINGS_SCHEMA, self._hw_settings
|
||||
)
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.6.0"]
|
||||
"requirements": ["aioautomower==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.4", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "led-ble==1.1.7"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.4", "led-ble==1.1.7"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["letpot"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["letpot==0.6.2"]
|
||||
"requirements": ["letpot==0.6.3"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.Objects import ClusterCommand, NullValue
|
||||
from matter_server.client.models import device_types
|
||||
from matter_server.common.errors import MatterError
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
@@ -18,6 +19,7 @@ from homeassistant.components.switch import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
@@ -54,15 +56,21 @@ class MatterSwitch(MatterEntity, SwitchEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn switch on."""
|
||||
await self.send_device_command(
|
||||
clusters.OnOff.Commands.On(),
|
||||
)
|
||||
try:
|
||||
await self.send_device_command(
|
||||
clusters.OnOff.Commands.On(),
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(f"Failed to set value: {err}") from err
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn switch off."""
|
||||
await self.send_device_command(
|
||||
clusters.OnOff.Commands.Off(),
|
||||
)
|
||||
try:
|
||||
await self.send_device_command(
|
||||
clusters.OnOff.Commands.Off(),
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(f"Failed to set value: {err}") from err
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
@@ -83,18 +91,24 @@ class MatterGenericCommandSwitch(MatterSwitch):
|
||||
"""Turn switch on."""
|
||||
if self.entity_description.on_command:
|
||||
# custom command defined to set the new value
|
||||
await self.send_device_command(
|
||||
self.entity_description.on_command(),
|
||||
self.entity_description.command_timeout,
|
||||
)
|
||||
try:
|
||||
await self.send_device_command(
|
||||
self.entity_description.on_command(),
|
||||
self.entity_description.command_timeout,
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(f"Failed to set value: {err}") from err
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn switch off."""
|
||||
if self.entity_description.off_command:
|
||||
await self.send_device_command(
|
||||
self.entity_description.off_command(),
|
||||
self.entity_description.command_timeout,
|
||||
)
|
||||
try:
|
||||
await self.send_device_command(
|
||||
self.entity_description.off_command(),
|
||||
self.entity_description.command_timeout,
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(f"Failed to set value: {err}") from err
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
@@ -111,13 +125,16 @@ class MatterGenericCommandSwitch(MatterSwitch):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Send device command with timeout."""
|
||||
await self.matter_client.send_device_command(
|
||||
node_id=self._endpoint.node.node_id,
|
||||
endpoint_id=self._endpoint.endpoint_id,
|
||||
command=command,
|
||||
timed_request_timeout_ms=command_timeout,
|
||||
**kwargs,
|
||||
)
|
||||
try:
|
||||
await self.matter_client.send_device_command(
|
||||
node_id=self._endpoint.node.node_id,
|
||||
endpoint_id=self._endpoint.endpoint_id,
|
||||
command=command,
|
||||
timed_request_timeout_ms=command_timeout,
|
||||
**kwargs,
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(f"Failed to set value: {err}") from err
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
|
||||
@@ -64,6 +64,10 @@ ATTR_BIT_DEPTH = "bit_depth"
|
||||
ATTR_STREAM_TITLE = "stream_title"
|
||||
ATTR_PROVIDER = "provider"
|
||||
ATTR_ITEM_ID = "item_id"
|
||||
ATTR_EXPLICIT = "explicit"
|
||||
ATTR_BITRATE = "bit_rate"
|
||||
ATTR_DISCART_IMAGE = "discart_image"
|
||||
ATTR_FANART_IMAGE = "fanart_image"
|
||||
|
||||
ATTR_CONF_EXPOSE_PLAYER_TO_HA = "expose_player_to_ha"
|
||||
|
||||
|
||||
@@ -4,7 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from music_assistant_models.enums import MediaType
|
||||
from music_assistant_models.enums import ImageType, MediaType
|
||||
from music_assistant_models.media_items import ItemMapping
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_NAME
|
||||
@@ -17,11 +18,16 @@ from .const import (
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_BIT_DEPTH,
|
||||
ATTR_BITRATE,
|
||||
ATTR_CONTENT_TYPE,
|
||||
ATTR_CURRENT_INDEX,
|
||||
ATTR_CURRENT_ITEM,
|
||||
ATTR_DISCART_IMAGE,
|
||||
ATTR_DURATION,
|
||||
ATTR_ELAPSED_TIME,
|
||||
ATTR_EXPLICIT,
|
||||
ATTR_FANART_IMAGE,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_IMAGE,
|
||||
ATTR_ITEM_ID,
|
||||
ATTR_ITEMS,
|
||||
@@ -49,7 +55,7 @@ from .const import (
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import ItemMapping, MediaItemType
|
||||
from music_assistant_models.media_items import MediaItemType
|
||||
from music_assistant_models.queue_item import QueueItem
|
||||
|
||||
MEDIA_ITEM_SCHEMA = vol.Schema(
|
||||
@@ -58,7 +64,11 @@ MEDIA_ITEM_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_URI): cv.string,
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
vol.Required(ATTR_VERSION): cv.string,
|
||||
vol.Optional(ATTR_IMAGE, default=None): vol.Any(None, cv.string),
|
||||
vol.Required(ATTR_IMAGE, default=None): vol.Any(None, cv.string),
|
||||
vol.Optional(ATTR_FAVORITE): bool,
|
||||
vol.Optional(ATTR_EXPLICIT): vol.Any(None, bool),
|
||||
vol.Optional(ATTR_DISCART_IMAGE): vol.Any(None, cv.string),
|
||||
vol.Optional(ATTR_FANART_IMAGE): vol.Any(None, cv.string),
|
||||
vol.Optional(ATTR_ARTISTS): [vol.Self],
|
||||
vol.Optional(ATTR_ALBUM): vol.Self,
|
||||
}
|
||||
@@ -70,20 +80,39 @@ def media_item_dict_from_mass_item(
|
||||
item: MediaItemType | ItemMapping,
|
||||
) -> dict[str, Any]:
|
||||
"""Parse a Music Assistant MediaItem."""
|
||||
base: dict[str, Any] = {
|
||||
result: dict[str, Any] = {
|
||||
ATTR_MEDIA_TYPE: item.media_type,
|
||||
ATTR_URI: item.uri,
|
||||
ATTR_NAME: item.name,
|
||||
ATTR_VERSION: item.version,
|
||||
ATTR_IMAGE: mass.get_media_item_image_url(item),
|
||||
}
|
||||
|
||||
if isinstance(item, ItemMapping):
|
||||
return result
|
||||
|
||||
result[ATTR_FAVORITE] = item.favorite
|
||||
result[ATTR_EXPLICIT] = item.metadata.explicit
|
||||
|
||||
if item.media_type is MediaType.ALBUM:
|
||||
result[ATTR_DISCART_IMAGE] = mass.get_media_item_image_url(
|
||||
item, type=ImageType.DISCART
|
||||
)
|
||||
if item.media_type is MediaType.ARTIST:
|
||||
result[ATTR_FANART_IMAGE] = mass.get_media_item_image_url(
|
||||
item, type=ImageType.FANART
|
||||
)
|
||||
|
||||
artists: list[ItemMapping] | None
|
||||
if artists := getattr(item, "artists", None):
|
||||
base[ATTR_ARTISTS] = [media_item_dict_from_mass_item(mass, x) for x in artists]
|
||||
result[ATTR_ARTISTS] = [
|
||||
media_item_dict_from_mass_item(mass, x) for x in artists
|
||||
]
|
||||
album: ItemMapping | None
|
||||
if album := getattr(item, "album", None):
|
||||
base[ATTR_ALBUM] = media_item_dict_from_mass_item(mass, album)
|
||||
return base
|
||||
result[ATTR_ALBUM] = media_item_dict_from_mass_item(mass, album)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
SEARCH_RESULT_SCHEMA = vol.Schema(
|
||||
@@ -126,11 +155,12 @@ LIBRARY_RESULTS_SCHEMA = vol.Schema(
|
||||
|
||||
AUDIO_FORMAT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_PROVIDER): str,
|
||||
vol.Required(ATTR_ITEM_ID): str,
|
||||
vol.Required(ATTR_CONTENT_TYPE): str,
|
||||
vol.Required(ATTR_SAMPLE_RATE): int,
|
||||
vol.Required(ATTR_BIT_DEPTH): int,
|
||||
vol.Required(ATTR_PROVIDER): str,
|
||||
vol.Required(ATTR_ITEM_ID): str,
|
||||
vol.Optional(ATTR_BITRATE): int,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -142,8 +172,8 @@ QUEUE_ITEM_SCHEMA = vol.Schema(
|
||||
vol.Optional(ATTR_MEDIA_ITEM, default=None): vol.Any(
|
||||
None, vol.Schema(MEDIA_ITEM_SCHEMA)
|
||||
),
|
||||
vol.Optional(ATTR_STREAM_DETAILS): vol.Schema(AUDIO_FORMAT_SCHEMA),
|
||||
vol.Optional(ATTR_STREAM_TITLE, default=None): vol.Any(None, cv.string),
|
||||
vol.Optional(ATTR_STREAM_DETAILS): vol.Schema(AUDIO_FORMAT_SCHEMA),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -155,7 +185,7 @@ def queue_item_dict_from_mass_item(
|
||||
"""Parse a Music Assistant QueueItem."""
|
||||
if not item:
|
||||
return None
|
||||
base = {
|
||||
result = {
|
||||
ATTR_QUEUE_ITEM_ID: item.queue_item_id,
|
||||
ATTR_NAME: item.name,
|
||||
ATTR_DURATION: item.duration,
|
||||
@@ -166,16 +196,19 @@ def queue_item_dict_from_mass_item(
|
||||
),
|
||||
}
|
||||
if streamdetails := item.streamdetails:
|
||||
base[ATTR_STREAM_TITLE] = streamdetails.stream_title
|
||||
base[ATTR_STREAM_DETAILS] = {
|
||||
result[ATTR_STREAM_TITLE] = streamdetails.stream_title
|
||||
stream_details_dict: dict[str, Any] = {
|
||||
ATTR_PROVIDER: streamdetails.provider,
|
||||
ATTR_ITEM_ID: streamdetails.item_id,
|
||||
ATTR_CONTENT_TYPE: streamdetails.audio_format.content_type.value,
|
||||
ATTR_SAMPLE_RATE: streamdetails.audio_format.sample_rate,
|
||||
ATTR_BIT_DEPTH: streamdetails.audio_format.bit_depth,
|
||||
ATTR_PROVIDER: streamdetails.provider,
|
||||
ATTR_ITEM_ID: streamdetails.item_id,
|
||||
}
|
||||
if streamdetails.audio_format.bit_rate is not None:
|
||||
stream_details_dict[ATTR_BITRATE] = streamdetails.audio_format.bit_rate
|
||||
result[ATTR_STREAM_DETAILS] = stream_details_dict
|
||||
|
||||
return base
|
||||
return result
|
||||
|
||||
|
||||
QUEUE_DETAILS_SCHEMA = vol.Schema(
|
||||
|
||||
@@ -11,10 +11,13 @@ from pynintendoparental.exceptions import (
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_SESSION_TOKEN, DOMAIN
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
@@ -23,6 +26,14 @@ _PLATFORMS: list[Platform] = [
|
||||
Platform.NUMBER,
|
||||
]
|
||||
|
||||
PLATFORM_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Nintendo Switch Parental Controls integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: NintendoParentalControlsConfigEntry
|
||||
|
||||
@@ -7,3 +7,5 @@ CONF_SESSION_TOKEN = "session_token"
|
||||
BEDTIME_ALARM_MIN = "16:00"
|
||||
BEDTIME_ALARM_MAX = "23:00"
|
||||
BEDTIME_ALARM_DISABLE = "00:00"
|
||||
|
||||
ATTR_BONUS_TIME = "bonus_time"
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"add_bonus_time": {
|
||||
"service": "mdi:timer-plus-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,13 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
"""Services for Nintendo Parental integration."""
|
||||
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import ATTR_BONUS_TIME, DOMAIN
|
||||
from .coordinator import NintendoParentalControlsConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NintendoParentalServices(StrEnum):
|
||||
"""Store keys for Nintendo Parental services."""
|
||||
|
||||
ADD_BONUS_TIME = "add_bonus_time"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(
|
||||
hass: HomeAssistant,
|
||||
):
|
||||
"""Set up the Nintendo Parental services."""
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
service=NintendoParentalServices.ADD_BONUS_TIME,
|
||||
service_func=async_add_bonus_time,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Required(ATTR_BONUS_TIME): vol.All(int, vol.Range(min=5, max=30)),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_nintendo_device_id(dev: dr.DeviceEntry) -> str | None:
|
||||
"""Get the Nintendo device ID from a device entry."""
|
||||
for identifier in dev.identifiers:
|
||||
if identifier[0] == DOMAIN:
|
||||
return identifier[1].split("_")[-1]
|
||||
return None
|
||||
|
||||
|
||||
async def async_add_bonus_time(call: ServiceCall) -> None:
|
||||
"""Add bonus time to a device."""
|
||||
config_entry: NintendoParentalControlsConfigEntry | None
|
||||
data = call.data
|
||||
device_id: str = data[ATTR_DEVICE_ID]
|
||||
bonus_time: int = data[ATTR_BONUS_TIME]
|
||||
device = dr.async_get(call.hass).async_get(device_id)
|
||||
if device is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_not_found",
|
||||
)
|
||||
for entry_id in device.config_entries:
|
||||
config_entry = call.hass.config_entries.async_get_entry(entry_id)
|
||||
if config_entry is not None and config_entry.domain == DOMAIN:
|
||||
break
|
||||
nintendo_device_id = _get_nintendo_device_id(device)
|
||||
if config_entry and nintendo_device_id:
|
||||
await config_entry.runtime_data.api.devices[nintendo_device_id].add_extra_time(
|
||||
bonus_time
|
||||
)
|
||||
@@ -0,0 +1,17 @@
|
||||
add_bonus_time:
|
||||
fields:
|
||||
bonus_time:
|
||||
required: true
|
||||
example: 30
|
||||
selector:
|
||||
number:
|
||||
min: -1
|
||||
max: 1440
|
||||
unit_of_measurement: minutes
|
||||
mode: box
|
||||
device_id:
|
||||
required: true
|
||||
example: 1234567890abcdef1234567890abcdef
|
||||
selector:
|
||||
device:
|
||||
integration: nintendo_parental_controls
|
||||
@@ -61,6 +61,29 @@
|
||||
},
|
||||
"bedtime_alarm_out_of_range": {
|
||||
"message": "{value} not accepted. Bedtime Alarm must be between {bedtime_alarm_min} and {bedtime_alarm_max}. To disable, set to {bedtime_alarm_disable}."
|
||||
},
|
||||
"config_entry_not_found": {
|
||||
"message": "Config entry not found."
|
||||
},
|
||||
"device_not_found": {
|
||||
"message": "Device not found."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_bonus_time": {
|
||||
"description": "Add bonus screen time to the selected Nintendo Switch.",
|
||||
"fields": {
|
||||
"bonus_time": {
|
||||
"description": "The amount of bonus time to add in minutes. Maximum is 30 minutes, minimum is 5.",
|
||||
"name": "Bonus Time"
|
||||
},
|
||||
"device_id": {
|
||||
"description": "The ID of the device to add bonus time to.",
|
||||
"example": "1234567890abcdef",
|
||||
"name": "Device"
|
||||
}
|
||||
},
|
||||
"name": "Add Bonus Time"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ import logging
|
||||
from icmplib import SocketPermissionError, async_ping
|
||||
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -32,6 +32,19 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PingConfigEntry) -> bool:
|
||||
"""Set up Ping (ICMP) from a config entry."""
|
||||
|
||||
# Migrate device registry identifiers from homeassistant domain to ping domain
|
||||
registry = dr.async_get(hass)
|
||||
if (
|
||||
device := registry.async_get_device(
|
||||
identifiers={(HOMEASSISTANT_DOMAIN, entry.entry_id)}
|
||||
)
|
||||
) is not None and entry.entry_id in device.config_entries:
|
||||
registry.async_update_device(
|
||||
device_id=device.id,
|
||||
new_identifiers={(DOMAIN, entry.entry_id)},
|
||||
)
|
||||
|
||||
privileged = hass.data[DATA_PRIVILEGED_KEY]
|
||||
|
||||
host: str = entry.options[CONF_HOST]
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""Base entity for the Ping component."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PingConfigEntry, PingUpdateCoordinator
|
||||
|
||||
|
||||
@@ -23,6 +23,6 @@ class PingEntity(CoordinatorEntity[PingUpdateCoordinator]):
|
||||
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(HOMEASSISTANT_DOMAIN, config_entry.entry_id)},
|
||||
identifiers={(DOMAIN, config_entry.entry_id)},
|
||||
manufacturer="Ping",
|
||||
)
|
||||
|
||||
@@ -61,30 +61,53 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Portainer binary sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[BinarySensorEntity] = []
|
||||
|
||||
for endpoint in coordinator.data.values():
|
||||
entities.extend(
|
||||
def _async_add_new_endpoints(endpoints: list[PortainerCoordinatorData]) -> None:
|
||||
"""Add new endpoint binary sensors."""
|
||||
async_add_entities(
|
||||
PortainerEndpointSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
endpoint,
|
||||
)
|
||||
for entity_description in ENDPOINT_SENSORS
|
||||
for endpoint in endpoints
|
||||
if entity_description.state_fn(endpoint)
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
def _async_add_new_containers(
|
||||
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
|
||||
) -> None:
|
||||
"""Add new container binary sensors."""
|
||||
async_add_entities(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for container in endpoint.containers.values()
|
||||
for (endpoint, container) in containers
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
if entity_description.state_fn(container)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
|
||||
coordinator.new_containers_callbacks.append(_async_add_new_containers)
|
||||
|
||||
_async_add_new_endpoints(
|
||||
[
|
||||
endpoint
|
||||
for endpoint in coordinator.data.values()
|
||||
if endpoint.id in coordinator.known_endpoints
|
||||
]
|
||||
)
|
||||
_async_add_new_containers(
|
||||
[
|
||||
(endpoint, container)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyportainer import Portainer
|
||||
@@ -30,8 +29,6 @@ from .const import DOMAIN
|
||||
from .coordinator import PortainerCoordinator, PortainerCoordinatorData
|
||||
from .entity import PortainerContainerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerButtonDescription(ButtonEntityDescription):
|
||||
@@ -64,18 +61,30 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Portainer buttons."""
|
||||
coordinator: PortainerCoordinator = entry.runtime_data
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PortainerButton(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
device_info=container,
|
||||
via_device=endpoint,
|
||||
def _async_add_new_containers(
|
||||
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
|
||||
) -> None:
|
||||
"""Add new container button sensors."""
|
||||
async_add_entities(
|
||||
PortainerButton(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for (endpoint, container) in containers
|
||||
for entity_description in BUTTONS
|
||||
)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in BUTTONS
|
||||
|
||||
coordinator.new_containers_callbacks.append(_async_add_new_containers)
|
||||
_async_add_new_containers(
|
||||
[
|
||||
(endpoint, container)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -130,6 +130,55 @@ class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
suggested_values = {
|
||||
CONF_URL: reconf_entry.data[CONF_URL],
|
||||
CONF_API_TOKEN: reconf_entry.data[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: reconf_entry.data[CONF_VERIFY_SSL],
|
||||
}
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except PortainerTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values=user_input or suggested_values,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -64,6 +65,16 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
)
|
||||
self.portainer = portainer
|
||||
|
||||
self.known_endpoints: set[int] = set()
|
||||
self.known_containers: set[tuple[int, str]] = set()
|
||||
|
||||
self.new_endpoints_callbacks: list[
|
||||
Callable[[list[PortainerCoordinatorData]], None]
|
||||
] = []
|
||||
self.new_containers_callbacks: list[
|
||||
Callable[[list[tuple[PortainerCoordinatorData, DockerContainer]]], None]
|
||||
] = []
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the Portainer Data Update Coordinator."""
|
||||
try:
|
||||
@@ -152,4 +163,27 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
docker_info=docker_info,
|
||||
)
|
||||
|
||||
self._async_add_remove_endpoints(mapped_endpoints)
|
||||
|
||||
return mapped_endpoints
|
||||
|
||||
def _async_add_remove_endpoints(
|
||||
self, mapped_endpoints: dict[int, PortainerCoordinatorData]
|
||||
) -> None:
|
||||
"""Add new endpoints, remove non-existing endpoints."""
|
||||
current_endpoints = {endpoint.id for endpoint in mapped_endpoints.values()}
|
||||
new_endpoints = current_endpoints - self.known_endpoints
|
||||
if new_endpoints:
|
||||
_LOGGER.debug("New endpoints found: %s", new_endpoints)
|
||||
self.known_endpoints.update(new_endpoints)
|
||||
|
||||
# Surprise, we also handle containers here :)
|
||||
current_containers = {
|
||||
(endpoint.id, container.id)
|
||||
for endpoint in mapped_endpoints.values()
|
||||
for container in endpoint.containers.values()
|
||||
}
|
||||
new_containers = current_containers - self.known_containers
|
||||
if new_containers:
|
||||
_LOGGER.debug("New containers found: %s", new_containers)
|
||||
self.known_containers.update(new_containers)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.9"]
|
||||
"requirements": ["pyportainer==1.0.11"]
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ rules:
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
|
||||
@@ -159,30 +159,53 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Portainer sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[SensorEntity] = []
|
||||
|
||||
for endpoint in coordinator.data.values():
|
||||
entities.extend(
|
||||
def _async_add_new_endpoints(endpoints: list[PortainerCoordinatorData]) -> None:
|
||||
"""Add new endpoint sensor."""
|
||||
async_add_entities(
|
||||
PortainerEndpointSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
endpoint,
|
||||
)
|
||||
for entity_description in ENDPOINT_SENSORS
|
||||
for endpoint in endpoints
|
||||
if entity_description.value_fn(endpoint)
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
def _async_add_new_containers(
|
||||
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
|
||||
) -> None:
|
||||
"""Add new container sensors."""
|
||||
async_add_entities(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for container in endpoint.containers.values()
|
||||
for (endpoint, container) in containers
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
if entity_description.value_fn(container)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
|
||||
coordinator.new_containers_callbacks.append(_async_add_new_containers)
|
||||
|
||||
_async_add_new_endpoints(
|
||||
[
|
||||
endpoint
|
||||
for endpoint in coordinator.data.values()
|
||||
if endpoint.id in coordinator.known_endpoints
|
||||
]
|
||||
)
|
||||
_async_add_new_containers(
|
||||
[
|
||||
(endpoint, container)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -20,6 +21,20 @@
|
||||
},
|
||||
"description": "The access token for your Portainer instance needs to be re-authenticated. You can create a new access token in the Portainer UI. Go to **My account > Access tokens** and select **Add access token**"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "[%key:component::portainer::config::step::user::data_description::api_token%]",
|
||||
"url": "[%key:component::portainer::config::step::user::data_description::url%]",
|
||||
"verify_ssl": "[%key:component::portainer::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Portainer instance.",
|
||||
"title": "Reconfigure Portainer Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
|
||||
@@ -85,19 +85,30 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Portainer switch sensors."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PortainerContainerSwitch(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
device_info=container,
|
||||
via_device=endpoint,
|
||||
def _async_add_new_containers(
|
||||
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
|
||||
) -> None:
|
||||
"""Add new container switch sensors."""
|
||||
async_add_entities(
|
||||
PortainerContainerSwitch(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for (endpoint, container) in containers
|
||||
for entity_description in SWITCHES
|
||||
)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in SWITCHES
|
||||
|
||||
coordinator.new_containers_callbacks.append(_async_add_new_containers)
|
||||
_async_add_new_containers(
|
||||
[
|
||||
(endpoint, container)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.3"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.4"]
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
"name": "Push flow meter data"
|
||||
},
|
||||
"push_weather_data": {
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integraion.\nSee details of RainMachine API here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
|
||||
"description": "Sends weather data from Home Assistant to the RainMachine device.\nLocal Weather Push service should be enabled from Settings > Weather > Developer tab for RainMachine to consider the values being sent. Units must be sent in metric; no conversions are performed by the integration.\nSee details of RainMachine API here: https://rainmachine.docs.apiary.io/#reference/weather-services/parserdata/post.",
|
||||
"fields": {
|
||||
"condition": {
|
||||
"description": "Current weather condition code (WNUM).",
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, PLATFORMS, PLATFORMS_WITH_AUTH
|
||||
from .coordinator import SFRDataUpdateCoordinator
|
||||
@@ -22,7 +22,7 @@ from .models import DomainData
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up SFR box as config entry."""
|
||||
box = SFRBox(ip=entry.data[CONF_HOST], client=get_async_client(hass))
|
||||
box = SFRBox(ip=entry.data[CONF_HOST], client=async_get_clientsession(hass))
|
||||
platforms = PLATFORMS
|
||||
if (username := entry.data.get(CONF_USERNAME)) and (
|
||||
password := entry.data.get(CONF_PASSWORD)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from sfrbox_api.bridge import SFRBox
|
||||
@@ -12,10 +13,12 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_HOST, DEFAULT_USERNAME, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): selector.TextSelector(),
|
||||
@@ -44,10 +47,13 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
box = SFRBox(ip=user_input[CONF_HOST], client=get_async_client(self.hass))
|
||||
box = SFRBox(
|
||||
ip=user_input[CONF_HOST], client=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
system_info = await box.system_get_info()
|
||||
except SFRBoxError:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
@@ -119,5 +125,7 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle failed credentials."""
|
||||
self._box = SFRBox(ip=entry_data[CONF_HOST], client=get_async_client(self.hass))
|
||||
self._box = SFRBox(
|
||||
ip=entry_data[CONF_HOST], client=async_get_clientsession(self.hass)
|
||||
)
|
||||
return await self.async_step_auth()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sfr_box",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["sfrbox-api==0.0.12"]
|
||||
"requirements": ["sfrbox-api==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -73,7 +73,8 @@
|
||||
"button1": "First button",
|
||||
"button2": "Second button",
|
||||
"button3": "Third button",
|
||||
"button4": "Fourth button"
|
||||
"button4": "Fourth button",
|
||||
"button5": "Fifth button"
|
||||
},
|
||||
"trigger_type": {
|
||||
"btn_down": "{subtype} button down",
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["psutil"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.1.0"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.1.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -359,7 +359,7 @@ MODULES: dict[str, ModuleType] = {
|
||||
PLATFORM_WEBHOOKS: webhooks,
|
||||
}
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.NOTIFY]
|
||||
PLATFORMS: list[Platform] = [Platform.EVENT, Platform.NOTIFY]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
@@ -44,6 +44,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.util.ssl import get_default_context, get_default_no_verify_context
|
||||
|
||||
from .const import (
|
||||
@@ -107,6 +108,7 @@ from .const import (
|
||||
SERVICE_SEND_STICKER,
|
||||
SERVICE_SEND_VIDEO,
|
||||
SERVICE_SEND_VOICE,
|
||||
SIGNAL_UPDATE_EVENT,
|
||||
)
|
||||
|
||||
_FILE_TYPES = ("animation", "document", "photo", "sticker", "video", "voice")
|
||||
@@ -167,6 +169,7 @@ class BaseTelegramBot:
|
||||
|
||||
_LOGGER.debug("Firing event %s: %s", event_type, event_data)
|
||||
self.hass.bus.async_fire(event_type, event_data, context=event_context)
|
||||
async_dispatcher_send(self.hass, SIGNAL_UPDATE_EVENT, event_type, event_data)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
@@ -547,6 +550,9 @@ class TelegramNotificationService:
|
||||
self.hass.bus.async_fire(
|
||||
EVENT_TELEGRAM_SENT, event_data, context=context
|
||||
)
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_UPDATE_EVENT, EVENT_TELEGRAM_SENT, event_data
|
||||
)
|
||||
except TelegramError as exc:
|
||||
if not suppress_error:
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -51,6 +51,7 @@ SERVICE_ANSWER_CALLBACK_QUERY = "answer_callback_query"
|
||||
SERVICE_DELETE_MESSAGE = "delete_message"
|
||||
SERVICE_LEAVE_CHAT = "leave_chat"
|
||||
|
||||
SIGNAL_UPDATE_EVENT = "telegram_bot_update_event"
|
||||
EVENT_TELEGRAM_CALLBACK = "telegram_callback"
|
||||
EVENT_TELEGRAM_COMMAND = "telegram_command"
|
||||
EVENT_TELEGRAM_TEXT = "telegram_text"
|
||||
|
||||
67
homeassistant/components/telegram_bot/event.py
Normal file
67
homeassistant/components/telegram_bot/event.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Event platform for Telegram bot integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.event import EventEntity, EventEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .bot import TelegramBotConfigEntry
|
||||
from .const import (
|
||||
EVENT_TELEGRAM_ATTACHMENT,
|
||||
EVENT_TELEGRAM_CALLBACK,
|
||||
EVENT_TELEGRAM_COMMAND,
|
||||
EVENT_TELEGRAM_SENT,
|
||||
EVENT_TELEGRAM_TEXT,
|
||||
SIGNAL_UPDATE_EVENT,
|
||||
)
|
||||
from .entity import TelegramBotEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: TelegramBotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the event platform."""
|
||||
async_add_entities([TelegramBotEventEntity(config_entry)])
|
||||
|
||||
|
||||
class TelegramBotEventEntity(TelegramBotEntity, EventEntity):
|
||||
"""An event entity."""
|
||||
|
||||
_attr_event_types = [
|
||||
EVENT_TELEGRAM_ATTACHMENT,
|
||||
EVENT_TELEGRAM_CALLBACK,
|
||||
EVENT_TELEGRAM_COMMAND,
|
||||
EVENT_TELEGRAM_TEXT,
|
||||
EVENT_TELEGRAM_SENT,
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: TelegramBotConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(
|
||||
config_entry,
|
||||
EventEntityDescription(key="update_event", translation_key="update_event"),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
SIGNAL_UPDATE_EVENT,
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event_type: str, event_data: dict[str, Any]) -> None:
|
||||
"""Handle the event."""
|
||||
self._trigger_event(event_type, event_data)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,4 +1,11 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"update_event": {
|
||||
"default": "mdi:message-reply"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"answer_callback_query": {
|
||||
"service": "mdi:check"
|
||||
|
||||
@@ -103,6 +103,75 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"update_event": {
|
||||
"name": "Update event",
|
||||
"state_attributes": {
|
||||
"args": {
|
||||
"name": "Received command arguments"
|
||||
},
|
||||
"chat_id": {
|
||||
"name": "Chat ID"
|
||||
},
|
||||
"command": {
|
||||
"name": "Received command"
|
||||
},
|
||||
"data": {
|
||||
"name": "Received callback query"
|
||||
},
|
||||
"date": {
|
||||
"name": "Received datetime"
|
||||
},
|
||||
"event_type": {
|
||||
"state": {
|
||||
"telegram_attachment": "Attachment received",
|
||||
"telegram_callback": "Callback query received",
|
||||
"telegram_command": "Command received",
|
||||
"telegram_sent": "Message sent",
|
||||
"telegram_text": "Text message received"
|
||||
}
|
||||
},
|
||||
"file_id": {
|
||||
"name": "Received file ID"
|
||||
},
|
||||
"file_mime_type": {
|
||||
"name": "Received file MIME type"
|
||||
},
|
||||
"file_name": {
|
||||
"name": "Received file name"
|
||||
},
|
||||
"file_size": {
|
||||
"name": "Received file size (bytes)"
|
||||
},
|
||||
"from_first": {
|
||||
"name": "Received From first name"
|
||||
},
|
||||
"from_last": {
|
||||
"name": "Received from last name"
|
||||
},
|
||||
"id": {
|
||||
"name": "ID"
|
||||
},
|
||||
"message_id": {
|
||||
"name": "Sent message ID"
|
||||
},
|
||||
"message_tag": {
|
||||
"name": "Sent message tag"
|
||||
},
|
||||
"message_thread_id": {
|
||||
"name": "Received message thread ID"
|
||||
},
|
||||
"text": {
|
||||
"name": "Received text/caption"
|
||||
},
|
||||
"user_id": {
|
||||
"name": "Received from user ID"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"action_failed": {
|
||||
"message": "Action failed. {error}"
|
||||
|
||||
@@ -84,22 +84,26 @@ def validate_binary_sensor_auto_off_has_trigger(obj: dict) -> dict:
|
||||
if CONF_TRIGGERS not in obj and DOMAIN_BINARY_SENSOR in obj:
|
||||
binary_sensors: list[ConfigType] = obj[DOMAIN_BINARY_SENSOR]
|
||||
for binary_sensor in binary_sensors:
|
||||
if binary_sensor_platform.CONF_AUTO_OFF in binary_sensor:
|
||||
identifier = f"{CONF_NAME}: {binary_sensor_platform.DEFAULT_NAME}"
|
||||
if (
|
||||
(name := binary_sensor.get(CONF_NAME))
|
||||
and isinstance(name, Template)
|
||||
and name.template != binary_sensor_platform.DEFAULT_NAME
|
||||
):
|
||||
identifier = f"{CONF_NAME}: {name.template}"
|
||||
elif default_entity_id := binary_sensor.get(CONF_DEFAULT_ENTITY_ID):
|
||||
identifier = f"{CONF_DEFAULT_ENTITY_ID}: {default_entity_id}"
|
||||
elif unique_id := binary_sensor.get(CONF_UNIQUE_ID):
|
||||
identifier = f"{CONF_UNIQUE_ID}: {unique_id}"
|
||||
if binary_sensor_platform.CONF_AUTO_OFF not in binary_sensor:
|
||||
continue
|
||||
|
||||
raise vol.Invalid(
|
||||
f"The auto_off option for template binary sensor: {identifier} requires a trigger, remove the auto_off option or rewrite configuration to use a trigger"
|
||||
)
|
||||
identifier = f"{CONF_NAME}: {binary_sensor_platform.DEFAULT_NAME}"
|
||||
if (
|
||||
(name := binary_sensor.get(CONF_NAME))
|
||||
and isinstance(name, Template)
|
||||
and name.template != binary_sensor_platform.DEFAULT_NAME
|
||||
):
|
||||
identifier = f"{CONF_NAME}: {name.template}"
|
||||
elif default_entity_id := binary_sensor.get(CONF_DEFAULT_ENTITY_ID):
|
||||
identifier = f"{CONF_DEFAULT_ENTITY_ID}: {default_entity_id}"
|
||||
elif unique_id := binary_sensor.get(CONF_UNIQUE_ID):
|
||||
identifier = f"{CONF_UNIQUE_ID}: {unique_id}"
|
||||
|
||||
raise vol.Invalid(
|
||||
f"The auto_off option for template binary sensor: {identifier} "
|
||||
"requires a trigger, remove the auto_off option or rewrite "
|
||||
"configuration to use a trigger"
|
||||
)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ SERVICE_ADD_TORRENT_SCHEMA = vol.All(
|
||||
SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_TORRENT): cv.string,
|
||||
vol.Optional(ATTR_DOWNLOAD_PATH, default=None): cv.string,
|
||||
vol.Optional(ATTR_DOWNLOAD_PATH): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@engrbm87", "@JPHutchins"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/transmission",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["transmissionrpc"],
|
||||
"requirements": ["transmission-rpc==7.0.3"]
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::transmission::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "The password for {username} is invalid.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
@@ -26,7 +29,12 @@
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"path": "The RPC request target path. E.g. `/transmission/rpc`"
|
||||
"host": "The hostname or IP address of the Transmission instance.",
|
||||
"password": "The password to authenticate with the Transmission instance, if set.",
|
||||
"path": "The RPC request target path, for example, `/transmission/rpc`.",
|
||||
"port": "The port number of the Transmission instance.",
|
||||
"ssl": "Whether to verify SSL certificates. Disable this for self-signed certificates.",
|
||||
"username": "The username to authenticate with the Transmission instance, if set."
|
||||
},
|
||||
"title": "Set up Transmission client"
|
||||
}
|
||||
@@ -85,10 +93,24 @@
|
||||
"data": {
|
||||
"limit": "Limit",
|
||||
"order": "Order"
|
||||
},
|
||||
"data_description": {
|
||||
"limit": "Maximum number of torrents to show in the torrent info attributes.",
|
||||
"order": "Order in which torrents are listed in the torrent info attributes."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"order": {
|
||||
"options": {
|
||||
"best_ratio_first": "Best ratio first",
|
||||
"newest_first": "Newest first",
|
||||
"oldest_first": "Oldest first",
|
||||
"worst_ratio_first": "Worst ratio first"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_torrent": {
|
||||
"description": "Adds a new torrent to download (URL, magnet link or Base64 encoded).",
|
||||
|
||||
@@ -8,19 +8,38 @@ from homeassistant.core import HomeAssistant, ServiceCall
|
||||
|
||||
from .const import DOMAIN, LOGGER, PLATFORMS
|
||||
|
||||
type VeluxConfigEntry = ConfigEntry[PyVLX]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the velux component."""
|
||||
module = VeluxModule(hass, entry.data)
|
||||
try:
|
||||
module.setup()
|
||||
await module.async_start()
|
||||
host = entry.data[CONF_HOST]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
pyvlx = PyVLX(host=host, password=password)
|
||||
|
||||
LOGGER.debug("Velux interface started")
|
||||
try:
|
||||
await pyvlx.load_scenes()
|
||||
await pyvlx.load_nodes()
|
||||
except PyVLXException as ex:
|
||||
LOGGER.exception("Can't connect to velux interface: %s", ex)
|
||||
return False
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = module
|
||||
entry.runtime_data = pyvlx
|
||||
|
||||
async def on_hass_stop(event):
|
||||
"""Close connection when hass stops."""
|
||||
LOGGER.debug("Velux interface terminated")
|
||||
await pyvlx.disconnect()
|
||||
|
||||
async def async_reboot_gateway(service_call: ServiceCall) -> None:
|
||||
await pyvlx.reboot_gateway()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
|
||||
hass.services.async_register(DOMAIN, "reboot_gateway", async_reboot_gateway)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -30,39 +49,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
class VeluxModule:
|
||||
"""Abstraction for velux component."""
|
||||
|
||||
def __init__(self, hass, domain_config):
|
||||
"""Initialize for velux component."""
|
||||
self.pyvlx = None
|
||||
self._hass = hass
|
||||
self._domain_config = domain_config
|
||||
|
||||
def setup(self):
|
||||
"""Velux component setup."""
|
||||
|
||||
async def on_hass_stop(event):
|
||||
"""Close connection when hass stops."""
|
||||
LOGGER.debug("Velux interface terminated")
|
||||
await self.pyvlx.disconnect()
|
||||
|
||||
async def async_reboot_gateway(service_call: ServiceCall) -> None:
|
||||
await self.pyvlx.reboot_gateway()
|
||||
|
||||
self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
host = self._domain_config.get(CONF_HOST)
|
||||
password = self._domain_config.get(CONF_PASSWORD)
|
||||
self.pyvlx = PyVLX(host=host, password=password)
|
||||
|
||||
self._hass.services.async_register(
|
||||
DOMAIN, "reboot_gateway", async_reboot_gateway
|
||||
)
|
||||
|
||||
async def async_start(self):
|
||||
"""Start velux component."""
|
||||
LOGGER.debug("Velux interface started")
|
||||
await self.pyvlx.load_scenes()
|
||||
await self.pyvlx.load_nodes()
|
||||
|
||||
@@ -11,11 +11,11 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from . import VeluxConfigEntry
|
||||
from .const import LOGGER
|
||||
from .entity import VeluxEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -24,15 +24,15 @@ SCAN_INTERVAL = timedelta(minutes=5) # Use standard polling
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigEntry,
|
||||
config: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up rain sensor(s) for Velux platform."""
|
||||
module = hass.data[DOMAIN][config.entry_id]
|
||||
pyvlx = config.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
VeluxRainSensor(node, config.entry_id)
|
||||
for node in module.pyvlx.nodes
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, Window) and node.rain_sensor
|
||||
)
|
||||
|
||||
|
||||
@@ -21,11 +21,10 @@ from homeassistant.components.cover import (
|
||||
CoverEntity,
|
||||
CoverEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import VeluxConfigEntry
|
||||
from .entity import VeluxEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -33,14 +32,14 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigEntry,
|
||||
config: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up cover(s) for Velux platform."""
|
||||
module = hass.data[DOMAIN][config.entry_id]
|
||||
pyvlx = config.runtime_data
|
||||
async_add_entities(
|
||||
VeluxCover(node, config.entry_id)
|
||||
for node in module.pyvlx.nodes
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, OpeningDevice)
|
||||
)
|
||||
|
||||
|
||||
@@ -7,11 +7,10 @@ from typing import Any
|
||||
from pyvlx import Intensity, LighteningDevice
|
||||
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import VeluxConfigEntry
|
||||
from .entity import VeluxEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -19,15 +18,14 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigEntry,
|
||||
config: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up light(s) for Velux platform."""
|
||||
module = hass.data[DOMAIN][config.entry_id]
|
||||
|
||||
pyvlx = config.runtime_data
|
||||
async_add_entities(
|
||||
VeluxLight(node, config.entry_id)
|
||||
for node in module.pyvlx.nodes
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, LighteningDevice)
|
||||
)
|
||||
|
||||
|
||||
@@ -5,24 +5,23 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.scene import Scene
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import VeluxConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigEntry,
|
||||
config: VeluxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the scenes for Velux platform."""
|
||||
module = hass.data[DOMAIN][config.entry_id]
|
||||
pyvlx = config.runtime_data
|
||||
|
||||
entities = [VeluxScene(scene) for scene in module.pyvlx.scenes]
|
||||
entities = [VeluxScene(scene) for scene in pyvlx.scenes]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,10 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from xbox.webapi.api.provider.people.models import Person
|
||||
from xbox.webapi.api.provider.titlehub.models import Title
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -18,7 +20,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import XboxConfigEntry
|
||||
from .entity import XboxBaseEntity, check_deprecated_entity
|
||||
from .entity import XboxBaseEntity, XboxBaseEntityDescription, check_deprecated_entity
|
||||
|
||||
|
||||
class XboxBinarySensor(StrEnum):
|
||||
@@ -32,15 +34,16 @@ class XboxBinarySensor(StrEnum):
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class XboxBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
class XboxBinarySensorEntityDescription(
|
||||
XboxBaseEntityDescription, BinarySensorEntityDescription
|
||||
):
|
||||
"""Xbox binary sensor description."""
|
||||
|
||||
is_on_fn: Callable[[Person], bool | None]
|
||||
entity_picture_fn: Callable[[Person], str | None] | None = None
|
||||
deprecated: bool | None = None
|
||||
|
||||
|
||||
def profile_pic(person: Person) -> str | None:
|
||||
def profile_pic(person: Person, _: Title | None) -> str | None:
|
||||
"""Return the gamer pic."""
|
||||
|
||||
# Xbox sometimes returns a domain that uses a wrong certificate which
|
||||
@@ -57,6 +60,15 @@ def profile_pic(person: Person) -> str | None:
|
||||
return str(url.with_query(query))
|
||||
|
||||
|
||||
def profile_attributes(person: Person, _: Title | None) -> dict[str, Any]:
|
||||
"""Attributes for the profile."""
|
||||
attributes: dict[str, Any] = {}
|
||||
attributes["display_name"] = person.display_name
|
||||
attributes["real_name"] = person.real_name or None
|
||||
attributes["bio"] = person.detail.bio if person.detail else None
|
||||
return attributes
|
||||
|
||||
|
||||
def in_game(person: Person) -> bool:
|
||||
"""True if person is in a game."""
|
||||
|
||||
@@ -80,6 +92,7 @@ SENSOR_DESCRIPTIONS: tuple[XboxBinarySensorEntityDescription, ...] = (
|
||||
is_on_fn=lambda x: x.presence_state == "Online",
|
||||
name=None,
|
||||
entity_picture_fn=profile_pic,
|
||||
attributes_fn=profile_attributes,
|
||||
),
|
||||
XboxBinarySensorEntityDescription(
|
||||
key=XboxBinarySensor.IN_PARTY,
|
||||
@@ -146,13 +159,3 @@ class XboxBinarySensorEntity(XboxBaseEntity, BinarySensorEntity):
|
||||
"""Return the status of the requested attribute."""
|
||||
|
||||
return self.entity_description.is_on_fn(self.data)
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the gamer pic."""
|
||||
|
||||
return (
|
||||
fn(self.data)
|
||||
if (fn := self.entity_description.entity_picture_fn) is not None
|
||||
else super().entity_picture
|
||||
)
|
||||
|
||||
@@ -49,7 +49,7 @@ async def build_item_response(
|
||||
"""Create response payload for the provided media query."""
|
||||
apps: InstalledPackagesList = await client.smartglass.get_installed_apps(device_id)
|
||||
|
||||
if media_content_type in (None, "library"):
|
||||
if not media_content_type or media_content_type == "library":
|
||||
children: list[BrowseMedia] = []
|
||||
library_info = BrowseMedia(
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from httpx import HTTPStatusError, RequestError, TimeoutException
|
||||
@@ -15,6 +16,7 @@ from xbox.webapi.api.provider.smartglass.models import (
|
||||
SmartglassConsoleList,
|
||||
SmartglassConsoleStatus,
|
||||
)
|
||||
from xbox.webapi.api.provider.titlehub.models import Title
|
||||
from xbox.webapi.common.signed_session import SignedSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -45,6 +47,7 @@ class XboxData:
|
||||
|
||||
consoles: dict[str, ConsoleData] = field(default_factory=dict)
|
||||
presence: dict[str, Person] = field(default_factory=dict)
|
||||
title_info: dict[str, Title] = field(default_factory=dict)
|
||||
|
||||
|
||||
class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
|
||||
@@ -199,6 +202,42 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
|
||||
{friend.xuid: friend for friend in friends.people if friend.is_favorite}
|
||||
)
|
||||
|
||||
# retrieve title details
|
||||
title_data: dict[str, Title] = {}
|
||||
for person in presence_data.values():
|
||||
if presence_detail := next(
|
||||
(
|
||||
d
|
||||
for d in person.presence_details or []
|
||||
if d.state == "Active" and d.title_id and d.is_game and d.is_primary
|
||||
),
|
||||
None,
|
||||
):
|
||||
try:
|
||||
title = await self.client.titlehub.get_title_info(
|
||||
presence_detail.title_id
|
||||
)
|
||||
except TimeoutException as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_exception",
|
||||
) from e
|
||||
except HTTPStatusError as e:
|
||||
_LOGGER.debug("Xbox exception:", exc_info=True)
|
||||
if e.response.status_code == HTTPStatus.NOT_FOUND:
|
||||
continue
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="request_exception",
|
||||
) from e
|
||||
except RequestError as e:
|
||||
_LOGGER.debug("Xbox exception:", exc_info=True)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="request_exception",
|
||||
) from e
|
||||
title_data[person.xuid] = title.titles[0]
|
||||
|
||||
if (
|
||||
self.current_friends - (new_friends := set(presence_data))
|
||||
or not self.current_friends
|
||||
@@ -206,7 +245,7 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
|
||||
self.remove_stale_devices(new_friends)
|
||||
self.current_friends = new_friends
|
||||
|
||||
return XboxData(new_console_data, presence_data)
|
||||
return XboxData(new_console_data, presence_data, title_data)
|
||||
|
||||
def remove_stale_devices(self, xuids: set[str]) -> None:
|
||||
"""Remove stale devices from registry."""
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from xbox.webapi.api.provider.people.models import Person
|
||||
from xbox.webapi.api.provider.smartglass.models import ConsoleType, SmartglassConsole
|
||||
from xbox.webapi.api.provider.titlehub.models import Title
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
@@ -13,7 +19,7 @@ from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ConsoleData, Person, XboxUpdateCoordinator
|
||||
from .coordinator import ConsoleData, XboxUpdateCoordinator
|
||||
|
||||
MAP_MODEL = {
|
||||
ConsoleType.XboxOne: "Xbox One",
|
||||
@@ -25,16 +31,27 @@ MAP_MODEL = {
|
||||
}
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class XboxBaseEntityDescription(EntityDescription):
|
||||
"""Xbox base entity description."""
|
||||
|
||||
entity_picture_fn: Callable[[Person, Title | None], str | None] | None = None
|
||||
attributes_fn: Callable[[Person, Title | None], Mapping[str, Any] | None] | None = (
|
||||
None
|
||||
)
|
||||
|
||||
|
||||
class XboxBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]):
|
||||
"""Base Sensor for the Xbox Integration."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: XboxBaseEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: XboxUpdateCoordinator,
|
||||
xuid: str,
|
||||
entity_description: EntityDescription,
|
||||
entity_description: XboxBaseEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Xbox entity."""
|
||||
super().__init__(coordinator)
|
||||
@@ -53,9 +70,35 @@ class XboxBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]):
|
||||
|
||||
@property
|
||||
def data(self) -> Person:
|
||||
"""Return coordinator data for this console."""
|
||||
"""Return coordinator data for this person."""
|
||||
return self.coordinator.data.presence[self.xuid]
|
||||
|
||||
@property
|
||||
def title_info(self) -> Title | None:
|
||||
"""Return title info."""
|
||||
return self.coordinator.data.title_info.get(self.xuid)
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the entity picture."""
|
||||
|
||||
return (
|
||||
entity_picture
|
||||
if (fn := self.entity_description.entity_picture_fn) is not None
|
||||
and (entity_picture := fn(self.data, self.title_info)) is not None
|
||||
else super().entity_picture
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, float | None] | None:
|
||||
"""Return entity specific state attributes."""
|
||||
return (
|
||||
fn(self.data, self.title_info)
|
||||
if hasattr(self.entity_description, "attributes_fn")
|
||||
and (fn := self.entity_description.attributes_fn)
|
||||
else super().extra_state_attributes
|
||||
)
|
||||
|
||||
|
||||
class XboxConsoleBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]):
|
||||
"""Console base entity for the Xbox integration."""
|
||||
|
||||
@@ -24,6 +24,13 @@
|
||||
"last_online": {
|
||||
"default": "mdi:account-clock"
|
||||
},
|
||||
"now_playing": {
|
||||
"default": "mdi:microsoft-xbox-controller",
|
||||
"state": {
|
||||
"unavailable": "mdi:microsoft-xbox-controller-off",
|
||||
"unknown": "mdi:microsoft-xbox-controller-off"
|
||||
}
|
||||
},
|
||||
"status": {
|
||||
"default": "mdi:message-text-outline"
|
||||
}
|
||||
|
||||
@@ -6,8 +6,10 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from xbox.webapi.api.provider.people.models import Person
|
||||
from xbox.webapi.api.provider.titlehub.models import Title
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
@@ -20,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import XboxConfigEntry
|
||||
from .entity import XboxBaseEntity, check_deprecated_entity
|
||||
from .entity import XboxBaseEntity, XboxBaseEntityDescription, check_deprecated_entity
|
||||
|
||||
|
||||
class XboxSensor(StrEnum):
|
||||
@@ -33,42 +35,102 @@ class XboxSensor(StrEnum):
|
||||
LAST_ONLINE = "last_online"
|
||||
FOLLOWING = "following"
|
||||
FOLLOWER = "follower"
|
||||
NOW_PLAYING = "now_playing"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class XboxSensorEntityDescription(SensorEntityDescription):
|
||||
class XboxSensorEntityDescription(XboxBaseEntityDescription, SensorEntityDescription):
|
||||
"""Xbox sensor description."""
|
||||
|
||||
value_fn: Callable[[Person], StateType | datetime]
|
||||
value_fn: Callable[[Person, Title | None], StateType | datetime]
|
||||
deprecated: bool | None = None
|
||||
|
||||
|
||||
def now_playing_attributes(_: Person, title: Title | None) -> dict[str, Any]:
|
||||
"""Attributes of the currently played title."""
|
||||
attributes: dict[str, Any] = {
|
||||
"short_description": None,
|
||||
"genres": None,
|
||||
"developer": None,
|
||||
"publisher": None,
|
||||
"release_date": None,
|
||||
"min_age": None,
|
||||
"achievements": None,
|
||||
"gamerscore": None,
|
||||
"progress": None,
|
||||
}
|
||||
if not title:
|
||||
return attributes
|
||||
if title.detail is not None:
|
||||
attributes.update(
|
||||
{
|
||||
"short_description": title.detail.short_description,
|
||||
"genres": (
|
||||
", ".join(title.detail.genres) if title.detail.genres else None
|
||||
),
|
||||
"developer": title.detail.developer_name,
|
||||
"publisher": title.detail.publisher_name,
|
||||
"release_date": (
|
||||
title.detail.release_date.replace(tzinfo=UTC).date()
|
||||
if title.detail.release_date
|
||||
else None
|
||||
),
|
||||
"min_age": title.detail.min_age,
|
||||
}
|
||||
)
|
||||
if (achievement := title.achievement) is not None:
|
||||
attributes.update(
|
||||
{
|
||||
"achievements": (
|
||||
f"{achievement.current_achievements} / {achievement.total_achievements}"
|
||||
),
|
||||
"gamerscore": (
|
||||
f"{achievement.current_gamerscore} / {achievement.total_gamerscore}"
|
||||
),
|
||||
"progress": f"{int(achievement.progress_percentage)} %",
|
||||
}
|
||||
)
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def title_logo(_: Person, title: Title | None) -> str | None:
|
||||
"""Get the game logo."""
|
||||
|
||||
return (
|
||||
next((i.url for i in title.images if i.type == "Tile"), None)
|
||||
or next((i.url for i in title.images if i.type == "Logo"), None)
|
||||
if title and title.images
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[XboxSensorEntityDescription, ...] = (
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.STATUS,
|
||||
translation_key=XboxSensor.STATUS,
|
||||
value_fn=lambda x: x.presence_text,
|
||||
value_fn=lambda x, _: x.presence_text,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.GAMER_SCORE,
|
||||
translation_key=XboxSensor.GAMER_SCORE,
|
||||
value_fn=lambda x: x.gamer_score,
|
||||
value_fn=lambda x, _: x.gamer_score,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.ACCOUNT_TIER,
|
||||
value_fn=lambda _: None,
|
||||
value_fn=lambda _, __: None,
|
||||
deprecated=True,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.GOLD_TENURE,
|
||||
value_fn=lambda _: None,
|
||||
value_fn=lambda _, __: None,
|
||||
deprecated=True,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.LAST_ONLINE,
|
||||
translation_key=XboxSensor.LAST_ONLINE,
|
||||
value_fn=(
|
||||
lambda x: x.last_seen_date_time_utc.replace(tzinfo=UTC)
|
||||
lambda x, _: x.last_seen_date_time_utc.replace(tzinfo=UTC)
|
||||
if x.last_seen_date_time_utc
|
||||
else None
|
||||
),
|
||||
@@ -77,12 +139,19 @@ SENSOR_DESCRIPTIONS: tuple[XboxSensorEntityDescription, ...] = (
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.FOLLOWING,
|
||||
translation_key=XboxSensor.FOLLOWING,
|
||||
value_fn=lambda x: x.detail.following_count if x.detail else None,
|
||||
value_fn=lambda x, _: x.detail.following_count if x.detail else None,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.FOLLOWER,
|
||||
translation_key=XboxSensor.FOLLOWER,
|
||||
value_fn=lambda x: x.detail.follower_count if x.detail else None,
|
||||
value_fn=lambda x, _: x.detail.follower_count if x.detail else None,
|
||||
),
|
||||
XboxSensorEntityDescription(
|
||||
key=XboxSensor.NOW_PLAYING,
|
||||
translation_key=XboxSensor.NOW_PLAYING,
|
||||
value_fn=lambda _, title: title.name if title else None,
|
||||
attributes_fn=now_playing_attributes,
|
||||
entity_picture_fn=title_logo,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -127,4 +196,4 @@ class XboxSensorEntity(XboxBaseEntity, SensorEntity):
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the requested attribute."""
|
||||
return self.entity_description.value_fn(self.data)
|
||||
return self.entity_description.value_fn(self.data, self.title_info)
|
||||
|
||||
@@ -35,6 +35,13 @@
|
||||
},
|
||||
"in_game": {
|
||||
"name": "In game"
|
||||
},
|
||||
"online": {
|
||||
"state_attributes": {
|
||||
"bio": { "name": "Bio" },
|
||||
"display_name": { "name": "Display name" },
|
||||
"real_name": { "name": "Real name" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -53,6 +60,22 @@
|
||||
"last_online": {
|
||||
"name": "Last online"
|
||||
},
|
||||
"now_playing": {
|
||||
"name": "Now playing",
|
||||
"state_attributes": {
|
||||
"achievements": { "name": "Achievements" },
|
||||
"developer": { "name": "Developer" },
|
||||
"gamerscore": {
|
||||
"name": "[%key:component::xbox::entity::sensor::gamer_score::name%]"
|
||||
},
|
||||
"genres": { "name": "Genres" },
|
||||
"min_age": { "name": "Minimum age" },
|
||||
"progress": { "name": "Progress" },
|
||||
"publisher": { "name": "Publisher" },
|
||||
"release_date": { "name": "Release date" },
|
||||
"short_description": { "name": "Short description" }
|
||||
}
|
||||
},
|
||||
"status": {
|
||||
"name": "Status"
|
||||
}
|
||||
|
||||
@@ -59,6 +59,10 @@ DEVICE_CLOUD_CONFIG = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
CLOUD_STEP_PLACEHOLDERS = {
|
||||
"country_servers_url": "https://www.openhab.org/addons/bindings/miio/#country-servers",
|
||||
}
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Options for the component."""
|
||||
@@ -224,7 +228,10 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if not cloud_username or not cloud_password or not cloud_country:
|
||||
errors["base"] = "cloud_credentials_incomplete"
|
||||
return self.async_show_form(
|
||||
step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors
|
||||
step_id="cloud",
|
||||
data_schema=DEVICE_CLOUD_CONFIG,
|
||||
errors=errors,
|
||||
description_placeholders=CLOUD_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
miio_cloud = await self.hass.async_add_executor_job(
|
||||
@@ -241,7 +248,10 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors
|
||||
step_id="cloud",
|
||||
data_schema=DEVICE_CLOUD_CONFIG,
|
||||
errors=errors,
|
||||
description_placeholders=CLOUD_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -255,7 +265,10 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if not devices_raw:
|
||||
errors["base"] = "cloud_no_devices"
|
||||
return self.async_show_form(
|
||||
step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors
|
||||
step_id="cloud",
|
||||
data_schema=DEVICE_CLOUD_CONFIG,
|
||||
errors=errors,
|
||||
description_placeholders=CLOUD_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
self.cloud_devices = {}
|
||||
@@ -284,7 +297,10 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_select()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors
|
||||
step_id="cloud",
|
||||
data_schema=DEVICE_CLOUD_CONFIG,
|
||||
errors=errors,
|
||||
description_placeholders=CLOUD_STEP_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async def async_step_select(
|
||||
@@ -322,7 +338,14 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
schema = DEVICE_CONFIG
|
||||
|
||||
return self.async_show_form(step_id="manual", data_schema=schema, errors=errors)
|
||||
return self.async_show_form(
|
||||
step_id="manual",
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"retrieving_token_url": "https://www.home-assistant.io/integrations/xiaomi_miio#retrieving-the-access-token",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_connect(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
"cloud_username": "[%key:common::config_flow::data::username%]",
|
||||
"manual": "Configure manually (not recommended)"
|
||||
},
|
||||
"description": "Log in to Xiaomi Home, see https://www.openhab.org/addons/bindings/miio/#country-servers for the server region to use."
|
||||
"description": "Log in to Xiaomi Home, see {country_servers_url} for the server region to use."
|
||||
},
|
||||
"connect": {
|
||||
"data": {
|
||||
@@ -37,7 +37,7 @@
|
||||
"host": "[%key:common::config_flow::data::ip%]",
|
||||
"token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"description": "You will need the 32 character API token, see https://www.home-assistant.io/integrations/xiaomi_miio#retrieving-the-access-token for instructions. Please note, that this API token is different from the key used by the Xiaomi Aqara integration."
|
||||
"description": "You will need the 32 character API token, see {retrieving_token_url} for instructions. Please note that this API token is different from the key used by the Xiaomi Aqara integration."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "The Xiaomi Home integration needs to re-authenticate your account in order to update the tokens or add missing credentials.",
|
||||
|
||||
@@ -645,24 +645,12 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
__progress_task: asyncio.Task[Any] | None = None
|
||||
__no_progress_task_reported = False
|
||||
deprecated_show_progress = False
|
||||
__progress_step_data: ProgressStepData[_FlowResultT] | None = None
|
||||
|
||||
@property
|
||||
def _progress_step_data(self) -> ProgressStepData[_FlowResultT]:
|
||||
"""Return progress step data.
|
||||
|
||||
A property is used instead of a simple attribute as derived classes
|
||||
do not call super().__init__.
|
||||
The property makes sure that the dict is initialized if needed.
|
||||
"""
|
||||
if not self.__progress_step_data:
|
||||
self.__progress_step_data = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
return self.__progress_step_data
|
||||
_progress_step_data: ProgressStepData[_FlowResultT] = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
@@ -789,10 +777,9 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> _FlowResultT:
|
||||
"""Abort the flow."""
|
||||
progress_step_data = self._progress_step_data
|
||||
return self.async_abort(
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
@@ -808,15 +795,14 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
without using async_show_progress_done.
|
||||
If no next step is set, abort the flow.
|
||||
"""
|
||||
progress_step_data = self._progress_step_data
|
||||
if (next_step_result := progress_step_data["next_step_result"]) is None:
|
||||
if self._progress_step_data["next_step_result"] is None:
|
||||
return self.async_abort(
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
return next_step_result
|
||||
return self._progress_step_data["next_step_result"]
|
||||
|
||||
@callback
|
||||
def async_external_step(
|
||||
@@ -1035,9 +1021,9 @@ def progress_step[
|
||||
self: FlowHandler[Any, ResultT], *args: P.args, **kwargs: P.kwargs
|
||||
) -> ResultT:
|
||||
step_id = func.__name__.replace("async_step_", "")
|
||||
progress_step_data = self._progress_step_data
|
||||
|
||||
# Check if we have a progress task running
|
||||
progress_task = progress_step_data["tasks"].get(step_id)
|
||||
progress_task = self._progress_step_data["tasks"].get(step_id)
|
||||
|
||||
if progress_task is None:
|
||||
# First call - create and start the progress task
|
||||
@@ -1045,30 +1031,30 @@ def progress_step[
|
||||
func(self, *args, **kwargs), # type: ignore[arg-type]
|
||||
f"Progress step {step_id}",
|
||||
)
|
||||
progress_step_data["tasks"][step_id] = progress_task
|
||||
self._progress_step_data["tasks"][step_id] = progress_task
|
||||
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
|
||||
# Task is done or this is a subsequent call
|
||||
try:
|
||||
progress_task_result = await progress_task
|
||||
self._progress_step_data["next_step_result"] = await progress_task
|
||||
except AbortFlow as err:
|
||||
progress_step_data["abort_reason"] = err.reason
|
||||
progress_step_data["abort_description_placeholders"] = (
|
||||
self._progress_step_data["abort_reason"] = err.reason
|
||||
self._progress_step_data["abort_description_placeholders"] = (
|
||||
err.description_placeholders or {}
|
||||
)
|
||||
return self.async_show_progress_done(
|
||||
@@ -1076,14 +1062,7 @@ def progress_step[
|
||||
)
|
||||
finally:
|
||||
# Clean up task reference
|
||||
progress_step_data["tasks"].pop(step_id, None)
|
||||
|
||||
# If the result type is FlowResultType.SHOW_PROGRESS_DONE
|
||||
# an earlier show progress step has already been run and stored its result.
|
||||
# In this case we should not overwrite the result,
|
||||
# but just use the stored one.
|
||||
if progress_task_result["type"] != FlowResultType.SHOW_PROGRESS_DONE:
|
||||
progress_step_data["next_step_result"] = progress_task_result
|
||||
self._progress_step_data["tasks"].pop(step_id, None)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="_progress_step_progress_done"
|
||||
|
||||
@@ -7046,7 +7046,7 @@
|
||||
},
|
||||
"transmission": {
|
||||
"name": "Transmission",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
|
||||
@@ -6,7 +6,7 @@ aiodns==3.5.0
|
||||
aiohasupervisor==0.3.3
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp==3.13.1
|
||||
aiohttp==3.13.2
|
||||
aiohttp_cors==0.8.1
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
@@ -24,7 +24,7 @@ bleak-retry-connector==4.4.3
|
||||
bleak==1.0.1
|
||||
bluetooth-adapters==2.1.0
|
||||
bluetooth-auto-recovery==1.5.3
|
||||
bluetooth-data-tools==1.28.3
|
||||
bluetooth-data-tools==1.28.4
|
||||
cached-ipaddress==1.0.1
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
@@ -40,7 +40,7 @@ hass-nabucasa==1.4.0
|
||||
hassil==3.3.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251001.4
|
||||
home-assistant-intents==2025.10.1
|
||||
home-assistant-intents==2025.10.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
||||
@@ -29,7 +29,7 @@ dependencies = [
|
||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.3.3",
|
||||
"aiohttp==3.13.1",
|
||||
"aiohttp==3.13.2",
|
||||
"aiohttp_cors==0.8.1",
|
||||
"aiohttp-fast-zlib==0.3.0",
|
||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -5,7 +5,7 @@
|
||||
# Home Assistant Core
|
||||
aiodns==3.5.0
|
||||
aiohasupervisor==0.3.3
|
||||
aiohttp==3.13.1
|
||||
aiohttp==3.13.2
|
||||
aiohttp_cors==0.8.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user