mirror of
https://github.com/home-assistant/core.git
synced 2025-11-18 23:40:09 +00:00
Compare commits
62 Commits
sql_adjust
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee05adfca1 | ||
|
|
168c915b5f | ||
|
|
6c80be52af | ||
|
|
ead92cdf82 | ||
|
|
c0f0cfef59 | ||
|
|
cefc0ba96e | ||
|
|
ad091b1062 | ||
|
|
876bc6d8c4 | ||
|
|
9f206d4363 | ||
|
|
a2d11e6d98 | ||
|
|
3b38af3984 | ||
|
|
3875f91bb9 | ||
|
|
c813776b0c | ||
|
|
3afb421cba | ||
|
|
c16633568b | ||
|
|
87f8ff2bb4 | ||
|
|
b423303f1e | ||
|
|
f6ff222679 | ||
|
|
0152fa0c03 | ||
|
|
37ebbe83bc | ||
|
|
63e036d39e | ||
|
|
f0cbf34a78 | ||
|
|
596bc89ee6 | ||
|
|
b8c877e1d2 | ||
|
|
197d9781cb | ||
|
|
f3f323637e | ||
|
|
9748abc103 | ||
|
|
596f049971 | ||
|
|
dee80cb6f5 | ||
|
|
b4ab73468b | ||
|
|
a300199a97 | ||
|
|
09dd765583 | ||
|
|
0c8b765415 | ||
|
|
0824ec502f | ||
|
|
9e0e353a5f | ||
|
|
e934b006e2 | ||
|
|
05479bb8fd | ||
|
|
d07247566d | ||
|
|
19e6097df6 | ||
|
|
2cff3cf29c | ||
|
|
5cac9b8e5e | ||
|
|
c2a516ea32 | ||
|
|
192b38d3e2 | ||
|
|
bb018e3546 | ||
|
|
4919d73cc5 | ||
|
|
f3ddffb5ff | ||
|
|
9bdfa77fa0 | ||
|
|
c65003009f | ||
|
|
0f722109b7 | ||
|
|
f7d86dec3c | ||
|
|
6b49c8a70c | ||
|
|
ab9a8f3e53 | ||
|
|
4e12628266 | ||
|
|
e6d8d4de42 | ||
|
|
6620b90eb4 | ||
|
|
6fd3af8891 | ||
|
|
46979b8418 | ||
|
|
1718a11de2 | ||
|
|
2016b1d8c7 | ||
|
|
4b72e45fc2 | ||
|
|
ead5ce905b | ||
|
|
f233f2da3f |
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -6,7 +6,6 @@ Sending HOTP through notify service
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -304,14 +303,15 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
if not self._available_notify_services:
|
||||
return self.async_abort(reason="no_available_service")
|
||||
|
||||
schema: dict[str, Any] = OrderedDict()
|
||||
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||
schema["target"] = vol.Optional(str)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
||||
vol.Optional("target"): str,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
||||
|
||||
async def async_step_setup(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
|
||||
@@ -179,12 +179,18 @@ class Data:
|
||||
user_hash = base64.b64decode(found["password"])
|
||||
|
||||
# bcrypt.checkpw is timing-safe
|
||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
||||
raise InvalidAuth
|
||||
|
||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||
"""Encode a password."""
|
||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
||||
|
||||
if for_storage:
|
||||
hashed = base64.b64encode(hashed)
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
|
||||
from .const import DOMAIN, MFCT_ID
|
||||
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
|
||||
if user_input is not None:
|
||||
if (
|
||||
self._discovered_device is not None
|
||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||
):
|
||||
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.context["title_placeholders"]["name"], data={}
|
||||
title=self.context["title_placeholders"]["name"],
|
||||
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
@@ -164,7 +164,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._discovered_device = discovery
|
||||
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
return self.async_create_entry(
|
||||
title=discovery.name,
|
||||
data={DEVICE_MODEL: discovery.device.model.value},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
devices: list[BluetoothServiceInfoBleak] = []
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
"""Constants for Airthings BLE."""
|
||||
|
||||
from airthings_ble import AirthingsDeviceType
|
||||
|
||||
DOMAIN = "airthings_ble"
|
||||
MFCT_ID = 820
|
||||
|
||||
VOLUME_BECQUEREL = "Bq/m³"
|
||||
VOLUME_PICOCURIE = "pCi/L"
|
||||
|
||||
DEVICE_MODEL = "device_model"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 300
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
||||
|
||||
MAX_RETRIES_AFTER_STARTUP = 5
|
||||
|
||||
@@ -16,7 +16,12 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
from .const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,12 +39,18 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
self.airthings = AirthingsBluetoothDeviceData(
|
||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
|
||||
device_model = entry.data.get(DEVICE_MODEL)
|
||||
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
device_model, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
update_interval=timedelta(seconds=interval),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
@@ -58,11 +69,29 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
)
|
||||
self.ble_device = ble_device
|
||||
|
||||
if DEVICE_MODEL not in self.config_entry.data:
|
||||
_LOGGER.debug("Fetching device info for migration")
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data for migration: {err}"
|
||||
) from err
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
||||
)
|
||||
self.update_interval = timedelta(
|
||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
data.model.value, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
return data
|
||||
|
||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -15,6 +16,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
@@ -42,6 +44,9 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
name=entry.title,
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=30, immediate=False
|
||||
),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
session,
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.5.6"]
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
}
|
||||
|
||||
@@ -6,8 +6,9 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import (
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.sounds import SOUNDS_LIST
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -74,8 +74,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
super().__init__(data.fast_coordinator, data)
|
||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||
|
||||
self._attr_min_temp = data.static.min_temp.value
|
||||
self._attr_max_temp = data.static.max_temp.value
|
||||
# Set temperature range if available, otherwise use Home Assistant defaults
|
||||
if data.static.min_temp is not None and data.static.min_temp.value is not None:
|
||||
self._attr_min_temp = data.static.min_temp.value
|
||||
if data.static.max_temp is not None and data.static.max_temp.value is not None:
|
||||
self._attr_max_temp = data.static.max_temp.value
|
||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||
|
||||
@property
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==3.1.0"],
|
||||
"requirements": ["python-bsblan==3.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -57,9 +57,9 @@ async def _async_reproduce_states(
|
||||
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
||||
|
||||
if (
|
||||
(ATTR_TEMPERATURE in state.attributes)
|
||||
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
|
||||
or (ATTR_TARGET_TEMP_LOW in state.attributes)
|
||||
(state.attributes.get(ATTR_TEMPERATURE) is not None)
|
||||
or (state.attributes.get(ATTR_TARGET_TEMP_HIGH) is not None)
|
||||
or (state.attributes.get(ATTR_TARGET_TEMP_LOW) is not None)
|
||||
):
|
||||
await call_service(
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
|
||||
@@ -37,13 +37,6 @@ USER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -175,36 +168,55 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
if user_input is not None:
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
try:
|
||||
data_to_validate = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
data_updates = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
)
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
||||
): cv.string,
|
||||
vol.Required(
|
||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
||||
): cv.port,
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.10.28"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from urllib.parse import quote
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -152,7 +153,9 @@ class HassFoscamCamera(FoscamEntity, Camera):
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the stream source."""
|
||||
if self._rtsp_port:
|
||||
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
||||
_username = quote(self._username)
|
||||
_password = quote(self._password)
|
||||
return f"rtsp://{_username}:{_password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ class FoscamDeviceInfo:
|
||||
supports_speak_volume_adjustment: bool
|
||||
supports_pet_adjustment: bool
|
||||
supports_car_adjustment: bool
|
||||
supports_human_adjustment: bool
|
||||
supports_wdr_adjustment: bool
|
||||
supports_hdr_adjustment: bool
|
||||
|
||||
@@ -115,20 +116,28 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
is_open_wdr = None
|
||||
is_open_hdr = None
|
||||
reserve3 = product_info.get("reserve4")
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
is_open_hdr = bool(int(mode))
|
||||
|
||||
model = product_info.get("model")
|
||||
model_int = int(model) if model is not None else 7002
|
||||
if model_int > 7001:
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = (
|
||||
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
)
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = (
|
||||
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
)
|
||||
is_open_hdr = bool(int(mode))
|
||||
else:
|
||||
supports_wdr_adjustment_val = False
|
||||
supports_hdr_adjustment_val = False
|
||||
ret_sw, software_capabilities = self.session.getSWCapabilities()
|
||||
|
||||
supports_speak_volume_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities1")) & 32)
|
||||
if ret_sw == 0
|
||||
@@ -144,24 +153,32 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
ret_md, mothion_config_val = self.session.get_motion_detect_config()
|
||||
human_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities2")) & 128)
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
ret_md, motion_config_val = self.session.get_motion_detect_config()
|
||||
if pet_adjustment_val:
|
||||
is_pet_detection_on_val = (
|
||||
mothion_config_val["petEnable"] == "1" if ret_md == 0 else False
|
||||
motion_config_val.get("petEnable") == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_pet_detection_on_val = False
|
||||
|
||||
if car_adjustment_val:
|
||||
is_car_detection_on_val = (
|
||||
mothion_config_val["carEnable"] == "1" if ret_md == 0 else False
|
||||
motion_config_val.get("carEnable") == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_car_detection_on_val = False
|
||||
|
||||
is_human_detection_on_val = (
|
||||
mothion_config_val["humanEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
if human_adjustment_val:
|
||||
is_human_detection_on_val = (
|
||||
motion_config_val.get("humanEnable") == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_human_detection_on_val = False
|
||||
|
||||
return FoscamDeviceInfo(
|
||||
dev_info=dev_info,
|
||||
@@ -179,6 +196,7 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
||||
supports_pet_adjustment=pet_adjustment_val,
|
||||
supports_car_adjustment=car_adjustment_val,
|
||||
supports_human_adjustment=human_adjustment_val,
|
||||
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
||||
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
||||
is_open_wdr=is_open_wdr,
|
||||
|
||||
@@ -143,6 +143,7 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
native_value_fn=lambda data: data.is_human_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_human_adjustment,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -136,6 +136,21 @@ async def async_setup_entry(
|
||||
new_data[CONF_URL] = url
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
|
||||
# Migrate legacy config entries without auth_type field
|
||||
if CONF_AUTH_TYPE not in config:
|
||||
new_data = dict(config_entry.data)
|
||||
# Detect auth type based on which fields are present
|
||||
if CONF_TOKEN in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
|
||||
elif CONF_USERNAME in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
|
||||
else:
|
||||
raise ConfigEntryError(
|
||||
"Unable to determine authentication type from config entry."
|
||||
)
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
config = config_entry.data
|
||||
|
||||
# Determine API version
|
||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||
api_version = "v1"
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_connect_zbt2",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"quality_scale": "bronze",
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.config_entries import (
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
@@ -97,6 +97,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task[None] | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
self._install_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
self._start_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
|
||||
# Progress flow steps cannot abort so we need to store the abort reason and then
|
||||
# re-raise it in a dedicated step
|
||||
self._progress_error: AbortFlow | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
@@ -106,6 +112,11 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
if self._probed_firmware_info is not None
|
||||
else "unknown"
|
||||
),
|
||||
"firmware_name": (
|
||||
self.installing_firmware_name
|
||||
if self.installing_firmware_name is not None
|
||||
else "unknown"
|
||||
),
|
||||
"model": self._hardware_name,
|
||||
}
|
||||
|
||||
@@ -182,22 +193,22 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action="install_firmware",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": firmware_name,
|
||||
},
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
progress_task=self.firmware_install_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except AbortFlow as err:
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=err.reason,
|
||||
)
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
self._progress_error = AbortFlow(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self.firmware_install_task = None
|
||||
|
||||
@@ -241,7 +252,10 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
|
||||
return
|
||||
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
@@ -270,7 +284,10 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
@@ -313,41 +330,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
await otbr_manager.async_start_addon_waiting()
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware download failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_firmware_install_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware install failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_unsupported_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when unsupported firmware is detected."""
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_zigbee_installation_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -511,16 +493,15 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Install Thread firmware."""
|
||||
raise NotImplementedError
|
||||
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_install_otbr_addon(
|
||||
async def async_step_progress_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
"""Abort when progress step failed."""
|
||||
assert self._progress_error is not None
|
||||
raise self._progress_error
|
||||
|
||||
async def _async_install_otbr_addon(self) -> None:
|
||||
"""Do the work of installing the OTBR addon."""
|
||||
addon_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(addon_manager)
|
||||
|
||||
@@ -538,18 +519,39 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
return await self.async_step_finish_thread_installation()
|
||||
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_start_otbr_addon(
|
||||
async def async_step_install_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
if self._install_otbr_addon_task is None:
|
||||
self._install_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_install_otbr_addon(),
|
||||
"Install OTBR addon",
|
||||
)
|
||||
|
||||
if not self._install_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="install_otbr_addon",
|
||||
progress_action="install_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._install_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._install_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._install_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="finish_thread_installation")
|
||||
|
||||
async def _async_start_otbr_addon(self) -> None:
|
||||
"""Do the work of starting the OTBR addon."""
|
||||
try:
|
||||
await self._configure_and_start_otbr_addon()
|
||||
except AddonError as err:
|
||||
@@ -562,7 +564,36 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
return await self.async_step_pre_confirm_otbr()
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
if self._start_otbr_addon_task is None:
|
||||
self._start_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_start_otbr_addon(),
|
||||
"Start OTBR addon",
|
||||
)
|
||||
|
||||
if not self._start_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="start_otbr_addon",
|
||||
progress_action="start_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._start_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._start_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._start_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="pre_confirm_otbr")
|
||||
|
||||
async def async_step_pre_confirm_otbr(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_sky_connect",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*skyconnect v1.0*",
|
||||
|
||||
@@ -7,5 +7,11 @@
|
||||
"dependencies": ["hardware", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_yellow",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -121,12 +121,15 @@ class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):
|
||||
"""Initialize AutomowerEntity."""
|
||||
super().__init__(coordinator)
|
||||
self.mower_id = mower_id
|
||||
parts = self.mower_attributes.system.model.split(maxsplit=2)
|
||||
model_witout_manufacturer = self.mower_attributes.system.model.removeprefix(
|
||||
"Husqvarna "
|
||||
).removeprefix("HUSQVARNA ")
|
||||
parts = model_witout_manufacturer.split(maxsplit=1)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mower_id)},
|
||||
manufacturer=parts[0],
|
||||
model=parts[1],
|
||||
model_id=parts[2],
|
||||
manufacturer="Husqvarna",
|
||||
model=parts[0].capitalize().removesuffix("®"),
|
||||
model_id=parts[1],
|
||||
name=self.mower_attributes.system.name,
|
||||
serial_number=self.mower_attributes.system.serial_number,
|
||||
suggested_area="Garden",
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Any
|
||||
from aiohttp import web
|
||||
from hyperion import client
|
||||
from hyperion.const import (
|
||||
KEY_DATA,
|
||||
KEY_IMAGE,
|
||||
KEY_IMAGE_STREAM,
|
||||
KEY_LEDCOLORS,
|
||||
@@ -155,7 +156,8 @@ class HyperionCamera(Camera):
|
||||
"""Update Hyperion components."""
|
||||
if not img:
|
||||
return
|
||||
img_data = img.get(KEY_RESULT, {}).get(KEY_IMAGE)
|
||||
# Prefer KEY_DATA (Hyperion server >= 2.1.1); fall back to KEY_RESULT for older server versions
|
||||
img_data = img.get(KEY_DATA, img.get(KEY_RESULT, {})).get(KEY_IMAGE)
|
||||
if not img_data or not img_data.startswith(IMAGE_STREAM_JPG_SENTINEL):
|
||||
return
|
||||
async with self._image_cond:
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -69,7 +68,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.device_tracker import TrackerEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -40,12 +38,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
|
||||
"""Initialize the device tracker."""
|
||||
super().__init__(coordinator, license_plate, "device_tracker")
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[0]
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def longitude(self) -> float | None:
|
||||
"""Return longitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[1]
|
||||
|
||||
@@ -6,7 +6,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -133,7 +132,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the device."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.10.0",
|
||||
"xknx==3.10.1",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
|
||||
@@ -359,7 +359,7 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
|
||||
write=False, state_required=True, valid_dpt="9.001"
|
||||
),
|
||||
vol.Optional(CONF_GA_HUMIDITY_CURRENT): GASelector(
|
||||
write=False, valid_dpt="9.002"
|
||||
write=False, valid_dpt="9.007"
|
||||
),
|
||||
vol.Required(CONF_TARGET_TEMPERATURE): GroupSelect(
|
||||
GroupSelectOption(
|
||||
|
||||
@@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
await self.coordinator.device.update_firmware()
|
||||
while (
|
||||
update_progress := await self.coordinator.device.get_firmware()
|
||||
).command_status is UpdateStatus.IN_PROGRESS:
|
||||
).command_status is not UpdateStatus.UPDATED:
|
||||
if counter >= MAX_UPDATE_WAIT:
|
||||
_raise_timeout_error()
|
||||
self._attr_update_percentage = update_progress.progress_percentage
|
||||
|
||||
@@ -139,7 +139,7 @@ class MeteoLtWeatherEntity(CoordinatorEntity[MeteoLtUpdateCoordinator], WeatherE
|
||||
forecasts_by_date[date].append(timestamp)
|
||||
|
||||
daily_forecasts = []
|
||||
for date in sorted(forecasts_by_date.keys())[:5]:
|
||||
for date in sorted(forecasts_by_date.keys()):
|
||||
day_forecasts = forecasts_by_date[date]
|
||||
if not day_forecasts:
|
||||
continue
|
||||
@@ -186,5 +186,5 @@ class MeteoLtWeatherEntity(CoordinatorEntity[MeteoLtUpdateCoordinator], WeatherE
|
||||
return None
|
||||
return [
|
||||
self._convert_forecast_data(forecast_data)
|
||||
for forecast_data in self.coordinator.data.forecast_timestamps[:24]
|
||||
for forecast_data in self.coordinator.data.forecast_timestamps
|
||||
]
|
||||
|
||||
@@ -6,7 +6,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientResponseError
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -153,11 +153,12 @@ class MieleButton(MieleEntity, ButtonEntity):
|
||||
self._device_id,
|
||||
{PROCESS_ACTION: self.entity_description.press_data},
|
||||
)
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting button state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
) from err
|
||||
|
||||
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientResponseError
|
||||
from pymiele import MieleDevice, MieleTemperature
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -250,7 +250,8 @@ class MieleClimate(MieleEntity, ClimateEntity):
|
||||
cast(float, kwargs.get(ATTR_TEMPERATURE)),
|
||||
self.entity_description.zone,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting climate state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
|
||||
@@ -73,7 +73,7 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
_LOGGER.debug(
|
||||
"Error fetching actions for device %s: Status: %s, Message: %s",
|
||||
device_id,
|
||||
err.status,
|
||||
str(err.status),
|
||||
err.message,
|
||||
)
|
||||
actions_json = {}
|
||||
|
||||
@@ -142,14 +142,15 @@ class MieleFan(MieleEntity, FanEntity):
|
||||
await self.api.send_action(
|
||||
self._device_id, {VENTILATION_STEP: ventilation_step}
|
||||
)
|
||||
except ClientResponseError as ex:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting fan state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
) from err
|
||||
self.device.state_ventilation_step = ventilation_step
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -171,6 +172,7 @@ class MieleFan(MieleEntity, FanEntity):
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
"err_status": str(ex.status),
|
||||
},
|
||||
) from ex
|
||||
|
||||
@@ -188,6 +190,7 @@ class MieleFan(MieleEntity, FanEntity):
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
"err_status": str(ex.status),
|
||||
},
|
||||
) from ex
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Final
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientResponseError
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ColorMode,
|
||||
@@ -131,7 +131,8 @@ class MieleLight(MieleEntity, LightEntity):
|
||||
await self.api.send_action(
|
||||
self._device_id, {self.entity_description.light_type: mode}
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting light state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
|
||||
@@ -943,13 +943,19 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
# Guard for corrupt restored value
|
||||
restored_value = (
|
||||
self._attr_native_value
|
||||
if isinstance(self._attr_native_value, (int, float))
|
||||
else 0
|
||||
)
|
||||
last_value = (
|
||||
float(cast(str, self._attr_native_value))
|
||||
float(cast(str, restored_value))
|
||||
if self._attr_native_value is not None
|
||||
else 0
|
||||
)
|
||||
|
||||
# force unknown when appliance is not able to report consumption
|
||||
# Force unknown when appliance is not able to report consumption
|
||||
if current_status in (
|
||||
StateStatus.ON,
|
||||
StateStatus.OFF,
|
||||
|
||||
@@ -4,7 +4,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientResponseError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID, ATTR_TEMPERATURE
|
||||
@@ -107,7 +107,7 @@ async def set_program(call: ServiceCall) -> None:
|
||||
data = {"programId": call.data[ATTR_PROGRAM_ID]}
|
||||
try:
|
||||
await api.set_program(serial_number, data)
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_program_error",
|
||||
@@ -137,7 +137,7 @@ async def set_program_oven(call: ServiceCall) -> None:
|
||||
data["temperature"] = call.data[ATTR_TEMPERATURE]
|
||||
try:
|
||||
await api.set_program(serial_number, data)
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_program_oven_error",
|
||||
@@ -157,7 +157,7 @@ async def get_programs(call: ServiceCall) -> ServiceResponse:
|
||||
|
||||
try:
|
||||
programs = await api.get_programs(serial_number)
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
except ClientResponseError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="get_programs_error",
|
||||
|
||||
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientResponseError
|
||||
from pymiele import MieleDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
@@ -165,7 +165,8 @@ class MieleSwitch(MieleEntity, SwitchEntity):
|
||||
"""Set switch to mode."""
|
||||
try:
|
||||
await self.api.send_action(self._device_id, mode)
|
||||
except aiohttp.ClientError as err:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting switch state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
@@ -197,7 +198,8 @@ class MielePowerSwitch(MieleSwitch):
|
||||
"""Set switch to mode."""
|
||||
try:
|
||||
await self.api.send_action(self._device_id, mode)
|
||||
except aiohttp.ClientError as err:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting switch state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
|
||||
@@ -189,14 +189,15 @@ class MieleVacuum(MieleEntity, StateVacuumEntity):
|
||||
"""Send action to the device."""
|
||||
try:
|
||||
await self.api.send_action(device_id, action)
|
||||
except ClientResponseError as ex:
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug("Error setting vacuum state for %s: %s", self.entity_id, err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_state_error",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
},
|
||||
) from ex
|
||||
) from err
|
||||
|
||||
async def async_clean_spot(self, **kwargs: Any) -> None:
|
||||
"""Clean spot."""
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.14.0", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.14.1", "mill-local==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -61,10 +61,12 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities([MobileAppBinarySensor(data, config_entry)])
|
||||
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -72,10 +72,12 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities([MobileAppSensor(data, config_entry)])
|
||||
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -239,6 +239,7 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity):
|
||||
angle = kwargs.get(ATTR_TILT_POSITION)
|
||||
if angle is not None:
|
||||
angle = angle * 180 / 100
|
||||
angle = 180 - angle
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(
|
||||
self._blind.Set_position,
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.15"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.16"]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aio_ownet"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aio-ownet==0.0.4"],
|
||||
"requirements": ["aio-ownet==0.0.5"],
|
||||
"zeroconf": ["_owserver._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.2"]
|
||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.3"]
|
||||
}
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/palazzetti",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pypalazzetti==0.1.19"]
|
||||
"requirements": ["pypalazzetti==0.1.20"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.12"]
|
||||
"requirements": ["pyportainer==1.0.14"]
|
||||
}
|
||||
|
||||
@@ -26,6 +26,9 @@ def validate_db_schema(instance: Recorder) -> set[str]:
|
||||
schema_errors |= validate_table_schema_supports_utf8(
|
||||
instance, StatisticsMeta, (StatisticsMeta.statistic_id,)
|
||||
)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(
|
||||
instance, StatisticsMeta
|
||||
)
|
||||
for table in (Statistics, StatisticsShortTerm):
|
||||
schema_errors |= validate_db_schema_precision(instance, table)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
|
||||
|
||||
@@ -54,7 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||
STATES_META_SCHEMA_VERSION = 38
|
||||
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||
UNIT_CLASS_SCHEMA_VERSION = 51
|
||||
UNIT_CLASS_SCHEMA_VERSION = 52
|
||||
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
||||
|
||||
@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
|
||||
"""Base class for tables, used for schema migration."""
|
||||
|
||||
|
||||
SCHEMA_VERSION = 51
|
||||
SCHEMA_VERSION = 52
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -13,7 +13,15 @@ from typing import TYPE_CHECKING, Any, TypedDict, cast, final
|
||||
from uuid import UUID
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update
|
||||
from sqlalchemy import (
|
||||
ForeignKeyConstraint,
|
||||
MetaData,
|
||||
Table,
|
||||
cast as cast_,
|
||||
func,
|
||||
text,
|
||||
update,
|
||||
)
|
||||
from sqlalchemy.engine import CursorResult, Engine
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
@@ -26,8 +34,9 @@ from sqlalchemy.exc import (
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
|
||||
from sqlalchemy.sql.expression import true
|
||||
from sqlalchemy.sql.expression import and_, true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.types import BINARY
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -2044,14 +2053,74 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
|
||||
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# Add unit class column to StatisticsMeta
|
||||
# Replaced with version 52 which corrects issues with MySQL string comparisons.
|
||||
|
||||
|
||||
class _SchemaVersion52Migrator(_SchemaVersionMigrator, target_version=52):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
if self.engine.dialect.name == SupportedDialect.MYSQL:
|
||||
self._apply_update_mysql()
|
||||
else:
|
||||
self._apply_update_postgresql_sqlite()
|
||||
|
||||
def _apply_update_mysql(self) -> None:
|
||||
"""Version specific update method for mysql."""
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
case_sensitive_units = {
|
||||
u.encode("utf-8") if u else u for u in conv.VALID_UNITS
|
||||
}
|
||||
# Reset unit_class to None for entries that do not match
|
||||
# the valid units (case sensitive) but matched before due to
|
||||
# case insensitive comparisons.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).not_in(
|
||||
case_sensitive_units
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(unit_class=None)
|
||||
)
|
||||
# Do an explicitly case sensitive match (actually binary) to set the
|
||||
# correct unit_class. This is needed because we use the case sensitive
|
||||
# utf8mb4_unicode_ci collation.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).in_(
|
||||
case_sensitive_units
|
||||
),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
def _apply_update_postgresql_sqlite(self) -> None:
|
||||
"""Version specific update method for postgresql and sqlite."""
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
# Set the correct unit_class. Unlike MySQL, Postgres and SQLite
|
||||
# have case sensitive string comparisons by default, so we
|
||||
# can directly match on the valid units.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.16.4"]
|
||||
"requirements": ["reolink-aio==0.16.5"]
|
||||
}
|
||||
|
||||
@@ -81,4 +81,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if entry.data.get(CONF_USERNAME) and entry.data.get(CONF_PASSWORD):
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
entry, PLATFORMS_WITH_AUTH
|
||||
)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.3.1"]
|
||||
"requirements": ["pysmartthings==3.3.3"]
|
||||
}
|
||||
|
||||
@@ -57,4 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) ->
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SolarEdgeConfigEntry) -> bool:
|
||||
"""Unload SolarEdge config entry."""
|
||||
if DATA_API_CLIENT not in entry.runtime_data:
|
||||
return True # Nothing to unload
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -133,8 +133,11 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if api_key_ok and web_login_ok:
|
||||
data = {CONF_SITE_ID: site_id}
|
||||
data.update(api_auth)
|
||||
data.update(web_auth)
|
||||
if api_key:
|
||||
data[CONF_API_KEY] = api_key
|
||||
if username:
|
||||
data[CONF_USERNAME] = username
|
||||
data[CONF_PASSWORD] = web_auth[CONF_PASSWORD]
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -237,7 +237,7 @@ class TeslemetryStreamingUpdateEntity(
|
||||
if self._download_percentage > 1 and self._download_percentage < 100:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = self._download_percentage
|
||||
elif self._install_percentage > 1:
|
||||
elif self._install_percentage > 10:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = self._install_percentage
|
||||
else:
|
||||
|
||||
@@ -181,15 +181,14 @@ class TPLinkClimateEntity(CoordinatedTPLinkModuleEntity, ClimateEntity):
|
||||
HVACMode.HEAT if self._thermostat_module.state else HVACMode.OFF
|
||||
)
|
||||
|
||||
if (
|
||||
self._thermostat_module.mode not in STATE_TO_ACTION
|
||||
and self._attr_hvac_action is not HVACAction.OFF
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Unknown thermostat state, defaulting to OFF: %s",
|
||||
self._thermostat_module.mode,
|
||||
)
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if self._thermostat_module.mode not in STATE_TO_ACTION:
|
||||
# Report a warning on the first non-default unknown mode
|
||||
if self._attr_hvac_action is not HVACAction.OFF:
|
||||
_LOGGER.warning(
|
||||
"Unknown thermostat state, defaulting to OFF: %s",
|
||||
self._thermostat_module.mode,
|
||||
)
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
return True
|
||||
|
||||
self._attr_hvac_action = STATE_TO_ACTION[self._thermostat_module.mode]
|
||||
|
||||
@@ -43,5 +43,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["tuya_sharing"],
|
||||
"requirements": ["tuya-device-sharing-sdk==0.2.4"]
|
||||
"requirements": ["tuya-device-sharing-sdk==0.2.5"]
|
||||
}
|
||||
|
||||
@@ -2,13 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import aiohttp
|
||||
from uasiren.client import Client
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_REGION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import UkraineAlarmDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Ukraine Alarm as config entry."""
|
||||
@@ -30,3 +40,56 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug("Migrating from version %s", config_entry.version)
|
||||
|
||||
if config_entry.version == 1:
|
||||
# Version 1 had states as first-class selections
|
||||
# Version 2 only allows states w/o districts, districts and communities
|
||||
region_id = config_entry.data[CONF_REGION]
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
try:
|
||||
regions_data = await Client(websession).get_regions()
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
_LOGGER.warning(
|
||||
"Could not migrate config entry %s: failed to fetch current regions: %s",
|
||||
config_entry.entry_id,
|
||||
err,
|
||||
)
|
||||
return False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(regions_data, dict)
|
||||
|
||||
state_with_districts = None
|
||||
for state in regions_data["states"]:
|
||||
if state["regionId"] == region_id and state.get("regionChildIds"):
|
||||
state_with_districts = state
|
||||
break
|
||||
|
||||
if state_with_districts:
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_state_region_{config_entry.entry_id}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_state_region",
|
||||
translation_placeholders={
|
||||
"region_name": config_entry.data.get(CONF_NAME, region_id),
|
||||
},
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
hass.config_entries.async_update_entry(config_entry, version=2)
|
||||
_LOGGER.info("Migration to version %s successful", 2)
|
||||
return True
|
||||
|
||||
_LOGGER.error("Unknown version %s", config_entry.version)
|
||||
return False
|
||||
|
||||
@@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Ukraine Alarm."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new UkraineAlarmConfigFlow."""
|
||||
@@ -112,7 +112,7 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self._async_finish_flow()
|
||||
|
||||
regions = {}
|
||||
if self.selected_region:
|
||||
if self.selected_region and step_id != "district":
|
||||
regions[self.selected_region["regionId"]] = self.selected_region[
|
||||
"regionName"
|
||||
]
|
||||
|
||||
@@ -13,19 +13,19 @@
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::user::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state and district, choose its specific community"
|
||||
"description": "Choose the district you selected above or select a specific community within that district"
|
||||
},
|
||||
"district": {
|
||||
"data": {
|
||||
"region": "[%key:component::ukraine_alarm::config::step::user::data::region%]"
|
||||
},
|
||||
"description": "If you want to monitor not only state, choose its specific district"
|
||||
"description": "Choose a district to monitor within the selected state"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"region": "Region"
|
||||
},
|
||||
"description": "Choose state to monitor"
|
||||
"description": "Choose a state"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -50,5 +50,11 @@
|
||||
"name": "Urban fights"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_state_region": {
|
||||
"description": "The region `{region_name}` is a state-level region, which is no longer supported. Please remove this integration entry and add it again, selecting a district or community instead of the entire state.",
|
||||
"title": "State-level region monitoring is no longer supported"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "helper",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6"]
|
||||
"requirements": ["cronsim==2.7"]
|
||||
}
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/vesync",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyvesync"],
|
||||
"requirements": ["pyvesync==3.1.4"]
|
||||
"requirements": ["pyvesync==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
from datetime import datetime, timedelta
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
@@ -237,7 +237,7 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
|
||||
translation_key="request_exception",
|
||||
) from e
|
||||
title_data[person.xuid] = title.titles[0]
|
||||
|
||||
person.last_seen_date_time_utc = self.last_seen_timestamp(person)
|
||||
if (
|
||||
self.current_friends - (new_friends := set(presence_data))
|
||||
or not self.current_friends
|
||||
@@ -247,6 +247,22 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
|
||||
|
||||
return XboxData(new_console_data, presence_data, title_data)
|
||||
|
||||
def last_seen_timestamp(self, person: Person) -> datetime | None:
|
||||
"""Returns the most recent of two timestamps."""
|
||||
|
||||
# The Xbox API constantly fluctuates the "last seen" timestamp between two close values,
|
||||
# causing unnecessary updates. We only accept the most recent one as valild to prevent this.
|
||||
if not (prev_data := self.data.presence.get(person.xuid)):
|
||||
return person.last_seen_date_time_utc
|
||||
|
||||
prev_dt = prev_data.last_seen_date_time_utc
|
||||
cur_dt = person.last_seen_date_time_utc
|
||||
|
||||
if prev_dt and cur_dt:
|
||||
return max(prev_dt, cur_dt)
|
||||
|
||||
return cur_dt
|
||||
|
||||
def remove_stale_devices(self, xuids: set[str]) -> None:
|
||||
"""Remove stale devices from registry."""
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import collections
|
||||
from contextlib import suppress
|
||||
from enum import StrEnum
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -39,7 +40,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.selector import FileSelector, FileSelectorConfig
|
||||
@@ -57,6 +58,8 @@ from .radio_manager import (
|
||||
ZhaRadioManager,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_MANUAL_PATH = "Enter Manually"
|
||||
DECONZ_DOMAIN = "deconz"
|
||||
|
||||
@@ -191,8 +194,14 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._hass = None # type: ignore[assignment]
|
||||
self._radio_mgr = ZhaRadioManager()
|
||||
self._restore_backup_task: asyncio.Task[None] | None = None
|
||||
self._reset_old_radio_task: asyncio.Task[None] | None = None
|
||||
self._form_network_task: asyncio.Task[None] | None = None
|
||||
self._extra_network_config: dict[str, Any] = {}
|
||||
|
||||
# Progress flow steps cannot abort so we need to store the abort reason and then
|
||||
# re-raise it in a dedicated step
|
||||
self._progress_error: AbortFlow | None = None
|
||||
|
||||
@property
|
||||
def hass(self) -> HomeAssistant:
|
||||
"""Return hass."""
|
||||
@@ -224,6 +233,13 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
async def _async_create_radio_entry(self) -> ConfigFlowResult:
|
||||
"""Create a config entry with the current flow state."""
|
||||
|
||||
async def async_step_progress_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when progress step failed."""
|
||||
assert self._progress_error is not None
|
||||
raise self._progress_error
|
||||
|
||||
async def async_step_choose_serial_port(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -464,7 +480,22 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._radio_mgr.chosen_backup = self._radio_mgr.backups[0]
|
||||
return await self.async_step_maybe_reset_old_radio()
|
||||
|
||||
@progress_step()
|
||||
async def _async_reset_old_radio(self, config_entry: ConfigEntry) -> None:
|
||||
"""Do the work of resetting the old radio."""
|
||||
|
||||
# Unload ZHA before connecting to the old adapter
|
||||
with suppress(OperationNotAllowed):
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
# Create a radio manager to connect to the old stick to reset it
|
||||
temp_radio_mgr = ZhaRadioManager()
|
||||
temp_radio_mgr.hass = self.hass
|
||||
temp_radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
temp_radio_mgr.device_settings = config_entry.data[CONF_DEVICE]
|
||||
temp_radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]]
|
||||
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
|
||||
async def async_step_maybe_reset_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -475,30 +506,37 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
|
||||
if config_entries:
|
||||
if not config_entries:
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
if self._reset_old_radio_task is None:
|
||||
# This will only ever be called during migration, so there must be an
|
||||
# existing config entry
|
||||
assert len(config_entries) == 1
|
||||
config_entry = config_entries[0]
|
||||
|
||||
# Unload ZHA before connecting to the old adapter
|
||||
with suppress(OperationNotAllowed):
|
||||
await self.hass.config_entries.async_unload(config_entry.entry_id)
|
||||
self._reset_old_radio_task = self.hass.async_create_task(
|
||||
self._async_reset_old_radio(config_entry),
|
||||
"Reset old radio",
|
||||
)
|
||||
|
||||
# Create a radio manager to connect to the old stick to reset it
|
||||
temp_radio_mgr = ZhaRadioManager()
|
||||
temp_radio_mgr.hass = self.hass
|
||||
temp_radio_mgr.device_path = config_entry.data[CONF_DEVICE][
|
||||
CONF_DEVICE_PATH
|
||||
]
|
||||
temp_radio_mgr.device_settings = config_entry.data[CONF_DEVICE]
|
||||
temp_radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]]
|
||||
if not self._reset_old_radio_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="maybe_reset_old_radio",
|
||||
progress_action="maybe_reset_old_radio",
|
||||
progress_task=self._reset_old_radio_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
except HomeAssistantError:
|
||||
# Old adapter not found or cannot connect, show prompt to plug back in
|
||||
return await self.async_step_plug_in_old_radio()
|
||||
try:
|
||||
await self._reset_old_radio_task
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to reset old radio during migration")
|
||||
# Old adapter not found or cannot connect, show prompt to plug back in
|
||||
return self.async_show_progress_done(next_step_id="plug_in_old_radio")
|
||||
finally:
|
||||
self._reset_old_radio_task = None
|
||||
|
||||
return await self.async_step_restore_backup()
|
||||
return self.async_show_progress_done(next_step_id="restore_backup")
|
||||
|
||||
async def async_step_plug_in_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -618,16 +656,35 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
# This step exists only for translations, it does nothing new
|
||||
return await self.async_step_form_new_network(user_input)
|
||||
|
||||
@progress_step()
|
||||
async def _async_form_new_network(self) -> None:
|
||||
"""Do the work of forming a new network."""
|
||||
await self._radio_mgr.async_form_network(config=self._extra_network_config)
|
||||
# Load the newly formed network settings to get the network info
|
||||
await self._radio_mgr.async_load_network_settings()
|
||||
|
||||
async def async_step_form_new_network(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Form a brand-new network."""
|
||||
await self._radio_mgr.async_form_network(config=self._extra_network_config)
|
||||
if self._form_network_task is None:
|
||||
self._form_network_task = self.hass.async_create_task(
|
||||
self._async_form_new_network(),
|
||||
"Form new network",
|
||||
)
|
||||
|
||||
# Load the newly formed network settings to get the network info
|
||||
await self._radio_mgr.async_load_network_settings()
|
||||
return await self._async_create_radio_entry()
|
||||
if not self._form_network_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="form_new_network",
|
||||
progress_action="form_new_network",
|
||||
progress_task=self._form_network_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._form_network_task
|
||||
finally:
|
||||
self._form_network_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="create_entry")
|
||||
|
||||
def _parse_uploaded_backup(
|
||||
self, uploaded_file_id: str
|
||||
@@ -732,13 +789,15 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
next_step_id="pre_confirm_ezsp_ieee_overwrite"
|
||||
)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to restore network backup to new radio")
|
||||
# User unplugged the new adapter, allow retry
|
||||
return self.async_show_progress_done(next_step_id="pre_plug_in_new_radio")
|
||||
except CannotWriteNetworkSettings as exc:
|
||||
return self.async_abort(
|
||||
self._progress_error = AbortFlow(
|
||||
reason="cannot_restore_backup",
|
||||
description_placeholders={"error": str(exc)},
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._restore_backup_task = None
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.77"],
|
||||
"requirements": ["zha==0.0.79"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -1473,16 +1473,18 @@ class ConfigEntriesFlowManager(
|
||||
if not self._pending_import_flows[handler]:
|
||||
del self._pending_import_flows[handler]
|
||||
|
||||
if (
|
||||
result["type"] != data_entry_flow.FlowResultType.ABORT
|
||||
and source in DISCOVERY_SOURCES
|
||||
):
|
||||
# Flows can abort or create an entry in their initial step, we do not want to
|
||||
# fire a discovery event if no flow is actually in progress
|
||||
flow_completed = result["type"] in {
|
||||
data_entry_flow.FlowResultType.ABORT,
|
||||
data_entry_flow.FlowResultType.CREATE_ENTRY,
|
||||
}
|
||||
|
||||
if not flow_completed and source in DISCOVERY_SOURCES:
|
||||
# Fire discovery event
|
||||
await self._discovery_event_debouncer.async_call()
|
||||
|
||||
if result["type"] != data_entry_flow.FlowResultType.ABORT and source in (
|
||||
DISCOVERY_SOURCES | {SOURCE_REAUTH}
|
||||
):
|
||||
if not flow_completed and source in DISCOVERY_SOURCES | {SOURCE_REAUTH}:
|
||||
# Notify listeners that a flow is created
|
||||
for subscription in self._flow_subscriptions:
|
||||
subscription("added", flow.flow_id)
|
||||
|
||||
@@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -645,12 +645,24 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
__progress_task: asyncio.Task[Any] | None = None
|
||||
__no_progress_task_reported = False
|
||||
deprecated_show_progress = False
|
||||
_progress_step_data: ProgressStepData[_FlowResultT] = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
__progress_step_data: ProgressStepData[_FlowResultT] | None = None
|
||||
|
||||
@property
|
||||
def _progress_step_data(self) -> ProgressStepData[_FlowResultT]:
|
||||
"""Return progress step data.
|
||||
|
||||
A property is used instead of a simple attribute as derived classes
|
||||
do not call super().__init__.
|
||||
The property makes sure that the dict is initialized if needed.
|
||||
"""
|
||||
if not self.__progress_step_data:
|
||||
self.__progress_step_data = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
return self.__progress_step_data
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
@@ -777,9 +789,10 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> _FlowResultT:
|
||||
"""Abort the flow."""
|
||||
progress_step_data = self._progress_step_data
|
||||
return self.async_abort(
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
@@ -795,14 +808,15 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
without using async_show_progress_done.
|
||||
If no next step is set, abort the flow.
|
||||
"""
|
||||
if self._progress_step_data["next_step_result"] is None:
|
||||
progress_step_data = self._progress_step_data
|
||||
if (next_step_result := progress_step_data["next_step_result"]) is None:
|
||||
return self.async_abort(
|
||||
reason=self._progress_step_data["abort_reason"],
|
||||
description_placeholders=self._progress_step_data[
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
return self._progress_step_data["next_step_result"]
|
||||
return next_step_result
|
||||
|
||||
@callback
|
||||
def async_external_step(
|
||||
@@ -1021,9 +1035,9 @@ def progress_step[
|
||||
self: FlowHandler[Any, ResultT], *args: P.args, **kwargs: P.kwargs
|
||||
) -> ResultT:
|
||||
step_id = func.__name__.replace("async_step_", "")
|
||||
|
||||
progress_step_data = self._progress_step_data
|
||||
# Check if we have a progress task running
|
||||
progress_task = self._progress_step_data["tasks"].get(step_id)
|
||||
progress_task = progress_step_data["tasks"].get(step_id)
|
||||
|
||||
if progress_task is None:
|
||||
# First call - create and start the progress task
|
||||
@@ -1031,30 +1045,30 @@ def progress_step[
|
||||
func(self, *args, **kwargs), # type: ignore[arg-type]
|
||||
f"Progress step {step_id}",
|
||||
)
|
||||
self._progress_step_data["tasks"][step_id] = progress_task
|
||||
progress_step_data["tasks"][step_id] = progress_task
|
||||
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
|
||||
# Task is done or this is a subsequent call
|
||||
try:
|
||||
self._progress_step_data["next_step_result"] = await progress_task
|
||||
progress_step_data["next_step_result"] = await progress_task
|
||||
except AbortFlow as err:
|
||||
self._progress_step_data["abort_reason"] = err.reason
|
||||
self._progress_step_data["abort_description_placeholders"] = (
|
||||
progress_step_data["abort_reason"] = err.reason
|
||||
progress_step_data["abort_description_placeholders"] = (
|
||||
err.description_placeholders or {}
|
||||
)
|
||||
return self.async_show_progress_done(
|
||||
@@ -1062,7 +1076,7 @@ def progress_step[
|
||||
)
|
||||
finally:
|
||||
# Clean up task reference
|
||||
self._progress_step_data["tasks"].pop(step_id, None)
|
||||
progress_step_data["tasks"].pop(step_id, None)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="_progress_step_progress_done"
|
||||
|
||||
@@ -28,7 +28,7 @@ bluetooth-data-tools==1.28.4
|
||||
cached-ipaddress==1.0.1
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
cronsim==2.7
|
||||
cryptography==46.0.2
|
||||
dbus-fast==2.44.5
|
||||
file-read-backwards==2.0.0
|
||||
@@ -40,7 +40,7 @@ hass-nabucasa==1.5.1
|
||||
hassil==3.4.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251105.0
|
||||
home-assistant-intents==2025.10.28
|
||||
home-assistant-intents==2025.11.7
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.11.0"
|
||||
version = "2025.11.2"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -44,7 +44,7 @@ dependencies = [
|
||||
"bcrypt==5.0.0",
|
||||
"certifi>=2021.5.30",
|
||||
"ciso8601==2.3.3",
|
||||
"cronsim==2.6",
|
||||
"cronsim==2.7",
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -20,7 +20,7 @@ awesomeversion==25.8.0
|
||||
bcrypt==5.0.0
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
cronsim==2.7
|
||||
fnv-hash-fast==1.6.0
|
||||
hass-nabucasa==1.5.1
|
||||
httpx==0.28.1
|
||||
|
||||
32
requirements_all.txt
generated
32
requirements_all.txt
generated
@@ -179,7 +179,7 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.onewire
|
||||
aio-ownet==0.0.4
|
||||
aio-ownet==0.0.5
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.17
|
||||
@@ -194,7 +194,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.5.6
|
||||
aioamazondevices==8.0.1
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -761,7 +761,7 @@ cookidoo-api==0.14.0
|
||||
|
||||
# homeassistant.components.backup
|
||||
# homeassistant.components.utility_meter
|
||||
cronsim==2.6
|
||||
cronsim==2.7
|
||||
|
||||
# homeassistant.components.crownstone
|
||||
crownstone-cloud==1.4.11
|
||||
@@ -1204,7 +1204,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251105.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.10.28
|
||||
home-assistant-intents==2025.11.7
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.3.1
|
||||
@@ -1473,7 +1473,7 @@ microBeesPy==0.3.5
|
||||
mill-local==0.3.0
|
||||
|
||||
# homeassistant.components.mill
|
||||
millheater==0.14.0
|
||||
millheater==0.14.1
|
||||
|
||||
# homeassistant.components.minio
|
||||
minio==7.1.12
|
||||
@@ -1630,7 +1630,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.15
|
||||
onedrive-personal-sdk==0.0.16
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -2284,7 +2284,7 @@ pyotp==2.9.0
|
||||
pyoverkiz==1.19.0
|
||||
|
||||
# homeassistant.components.palazzetti
|
||||
pypalazzetti==0.1.19
|
||||
pypalazzetti==0.1.20
|
||||
|
||||
# homeassistant.components.paperless_ngx
|
||||
pypaperless==4.1.1
|
||||
@@ -2308,7 +2308,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.12
|
||||
pyportainer==1.0.14
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -2405,7 +2405,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.3.1
|
||||
pysmartthings==3.3.3
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.3
|
||||
@@ -2462,7 +2462,7 @@ python-awair==0.2.4
|
||||
python-blockchain-api==0.0.2
|
||||
|
||||
# homeassistant.components.bsblan
|
||||
python-bsblan==3.1.0
|
||||
python-bsblan==3.1.1
|
||||
|
||||
# homeassistant.components.citybikes
|
||||
python-citybikes==0.3.3
|
||||
@@ -2537,7 +2537,7 @@ python-mpd2==3.1.1
|
||||
python-mystrom==2.5.0
|
||||
|
||||
# homeassistant.components.open_router
|
||||
python-open-router==0.3.2
|
||||
python-open-router==0.3.3
|
||||
|
||||
# homeassistant.components.swiss_public_transport
|
||||
python-opendata-transport==0.5.0
|
||||
@@ -2635,7 +2635,7 @@ pyvera==0.3.16
|
||||
pyversasense==0.0.6
|
||||
|
||||
# homeassistant.components.vesync
|
||||
pyvesync==3.1.4
|
||||
pyvesync==3.2.2
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.61
|
||||
@@ -2725,7 +2725,7 @@ renault-api==0.5.0
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.16.4
|
||||
reolink-aio==0.16.5
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -3051,7 +3051,7 @@ ttls==1.8.3
|
||||
ttn_client==1.2.3
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
tuya-device-sharing-sdk==0.2.5
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.2.1
|
||||
@@ -3201,7 +3201,7 @@ xbox-webapi==2.1.0
|
||||
xiaomi-ble==1.2.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.10.0
|
||||
xknx==3.10.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
@@ -3262,7 +3262,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.77
|
||||
zha==0.0.79
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
32
requirements_test_all.txt
generated
32
requirements_test_all.txt
generated
@@ -167,7 +167,7 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.onewire
|
||||
aio-ownet==0.0.4
|
||||
aio-ownet==0.0.5
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.17
|
||||
@@ -182,7 +182,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.2
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.5.6
|
||||
aioamazondevices==8.0.1
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -664,7 +664,7 @@ cookidoo-api==0.14.0
|
||||
|
||||
# homeassistant.components.backup
|
||||
# homeassistant.components.utility_meter
|
||||
cronsim==2.6
|
||||
cronsim==2.7
|
||||
|
||||
# homeassistant.components.crownstone
|
||||
crownstone-cloud==1.4.11
|
||||
@@ -1053,7 +1053,7 @@ holidays==0.84
|
||||
home-assistant-frontend==20251105.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.10.28
|
||||
home-assistant-intents==2025.11.7
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.3.1
|
||||
@@ -1268,7 +1268,7 @@ microBeesPy==0.3.5
|
||||
mill-local==0.3.0
|
||||
|
||||
# homeassistant.components.mill
|
||||
millheater==0.14.0
|
||||
millheater==0.14.1
|
||||
|
||||
# homeassistant.components.minio
|
||||
minio==7.1.12
|
||||
@@ -1401,7 +1401,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.15
|
||||
onedrive-personal-sdk==0.0.16
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -1914,7 +1914,7 @@ pyotp==2.9.0
|
||||
pyoverkiz==1.19.0
|
||||
|
||||
# homeassistant.components.palazzetti
|
||||
pypalazzetti==0.1.19
|
||||
pypalazzetti==0.1.20
|
||||
|
||||
# homeassistant.components.paperless_ngx
|
||||
pypaperless==4.1.1
|
||||
@@ -1935,7 +1935,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.12
|
||||
pyportainer==1.0.14
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -2011,7 +2011,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.3.1
|
||||
pysmartthings==3.3.3
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.3
|
||||
@@ -2062,7 +2062,7 @@ python-MotionMount==2.3.0
|
||||
python-awair==0.2.4
|
||||
|
||||
# homeassistant.components.bsblan
|
||||
python-bsblan==3.1.0
|
||||
python-bsblan==3.1.1
|
||||
|
||||
# homeassistant.components.ecobee
|
||||
python-ecobee-api==0.2.20
|
||||
@@ -2110,7 +2110,7 @@ python-mpd2==3.1.1
|
||||
python-mystrom==2.5.0
|
||||
|
||||
# homeassistant.components.open_router
|
||||
python-open-router==0.3.2
|
||||
python-open-router==0.3.3
|
||||
|
||||
# homeassistant.components.swiss_public_transport
|
||||
python-opendata-transport==0.5.0
|
||||
@@ -2193,7 +2193,7 @@ pyuptimerobot==22.2.0
|
||||
pyvera==0.3.16
|
||||
|
||||
# homeassistant.components.vesync
|
||||
pyvesync==3.1.4
|
||||
pyvesync==3.2.2
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.61
|
||||
@@ -2271,7 +2271,7 @@ renault-api==0.5.0
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.16.4
|
||||
reolink-aio==0.16.5
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.67
|
||||
@@ -2528,7 +2528,7 @@ ttls==1.8.3
|
||||
ttn_client==1.2.3
|
||||
|
||||
# homeassistant.components.tuya
|
||||
tuya-device-sharing-sdk==0.2.4
|
||||
tuya-device-sharing-sdk==0.2.5
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.2.1
|
||||
@@ -2657,7 +2657,7 @@ xbox-webapi==2.1.0
|
||||
xiaomi-ble==1.2.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.10.0
|
||||
xknx==3.10.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
@@ -2709,7 +2709,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.77
|
||||
zha==0.0.79
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.67.1
|
||||
|
||||
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@@ -32,7 +32,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.9.6,source=/uv,target=/bin/uv \
|
||||
go2rtc-client==0.2.1 \
|
||||
ha-ffmpeg==3.2.2 \
|
||||
hassil==3.4.0 \
|
||||
home-assistant-intents==2025.10.28 \
|
||||
home-assistant-intents==2025.11.7 \
|
||||
mutagen==1.47.0 \
|
||||
pymicro-vad==1.0.1 \
|
||||
pyspeex-noise==1.0.2
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
import asyncio
|
||||
from unittest.mock import patch
|
||||
|
||||
import voluptuous_serialize
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.auth import auth_manager_from_config, models as auth_models
|
||||
from homeassistant.auth.mfa_modules import auth_mfa_module_from_config
|
||||
@@ -248,6 +250,30 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None:
|
||||
assert step["step_id"] == "init"
|
||||
schema = step["data_schema"]
|
||||
schema({"notify_service": "test2"})
|
||||
# ensure the schema can be serialized
|
||||
assert voluptuous_serialize.convert(schema) == [
|
||||
{
|
||||
"name": "notify_service",
|
||||
"options": [
|
||||
(
|
||||
"test1",
|
||||
"test1",
|
||||
),
|
||||
(
|
||||
"test2",
|
||||
"test2",
|
||||
),
|
||||
],
|
||||
"required": True,
|
||||
"type": "select",
|
||||
},
|
||||
{
|
||||
"name": "target",
|
||||
"optional": True,
|
||||
"required": False,
|
||||
"type": "string",
|
||||
},
|
||||
]
|
||||
|
||||
with patch("pyotp.HOTP.at", return_value=MOCK_CODE):
|
||||
step = await flow.async_step_init({"notify_service": "test1"})
|
||||
|
||||
@@ -133,6 +133,24 @@ async def test_changing_password(data: hass_auth.Data) -> None:
|
||||
data.validate_login("test-UsEr", "new-pass")
|
||||
|
||||
|
||||
async def test_password_truncated(data: hass_auth.Data) -> None:
|
||||
"""Test long passwords are truncated before they are send to bcrypt for hashing.
|
||||
|
||||
With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
Previously the password was silently truncated.
|
||||
https://github.com/pyca/bcrypt/pull/1000
|
||||
"""
|
||||
pwd_truncated = "hWwjDpFiYtDTaaMbXdjzeuKAPI3G4Di2mC92" * 4 # 72 chars
|
||||
long_pwd = pwd_truncated * 2 # 144 chars
|
||||
data.add_auth("test-user", long_pwd)
|
||||
data.validate_login("test-user", long_pwd)
|
||||
|
||||
# As pwd are truncated, login will technically work with only the first 72 bytes.
|
||||
data.validate_login("test-user", pwd_truncated)
|
||||
with pytest.raises(hass_auth.InvalidAuth):
|
||||
data.validate_login("test-user", pwd_truncated[:71])
|
||||
|
||||
|
||||
async def test_login_flow_validates(data: hass_auth.Data, hass: HomeAssistant) -> None:
|
||||
"""Test login flow."""
|
||||
data.add_auth("test-user", "test-pass")
|
||||
|
||||
@@ -135,6 +135,27 @@ WAVE_ENHANCE_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
CORENTIUM_HOME_2_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
device=generate_ble_device(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
name="Airthings Corentium Home 2",
|
||||
),
|
||||
rssi=-61,
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_data={},
|
||||
service_uuids=[],
|
||||
source="local",
|
||||
advertisement=generate_advertisement_data(
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_uuids=[],
|
||||
),
|
||||
connectable=True,
|
||||
time=0,
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
VIEW_PLUS_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
@@ -265,6 +286,24 @@ WAVE_ENHANCE_DEVICE_INFO = AirthingsDevice(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
CORENTIUM_HOME_2_DEVICE_INFO = AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
hw_version="REV X",
|
||||
sw_version="R-SUB-1.3.4-master+0",
|
||||
model=AirthingsDeviceType.CORENTIUM_HOME_2,
|
||||
name="Airthings Corentium Home 2",
|
||||
identifier="123456",
|
||||
sensors={
|
||||
"connectivity_mode": "Bluetooth",
|
||||
"battery": 90,
|
||||
"temperature": 20.0,
|
||||
"humidity": 55.0,
|
||||
"radon_1day_avg": 45,
|
||||
"radon_1day_level": "low",
|
||||
},
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
TEMPERATURE_V1 = MockEntity(
|
||||
unique_id="Airthings Wave Plus 123456_temperature",
|
||||
name="Airthings Wave Plus 123456 Temperature",
|
||||
|
||||
@@ -7,7 +7,7 @@ from bleak import BleakError
|
||||
from home_assistant_bluetooth import BluetoothServiceInfoBleak
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.components.airthings_ble.const import DEVICE_MODEL, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_IGNORE, SOURCE_USER
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -29,12 +29,13 @@ from tests.common import MockConfigEntry
|
||||
|
||||
async def test_bluetooth_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test discovery via bluetooth with a valid device."""
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO),
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
model=wave_plus_device,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -60,6 +61,8 @@ async def test_bluetooth_discovery(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_bluetooth_discovery_no_BLEDevice(hass: HomeAssistant) -> None:
|
||||
@@ -118,6 +121,7 @@ async def test_bluetooth_discovery_already_setup(hass: HomeAssistant) -> None:
|
||||
|
||||
async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
"""Test the user initiated form."""
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airthings_ble.config_flow.async_discovered_service_info",
|
||||
@@ -127,7 +131,7 @@ async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
model=wave_plus_device,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -158,6 +162,8 @@ async def test_user_setup(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
@@ -168,6 +174,7 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
source=SOURCE_IGNORE,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
wave_plus_device = AirthingsDeviceType.WAVE_PLUS
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airthings_ble.config_flow.async_discovered_service_info",
|
||||
@@ -177,7 +184,7 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
patch_airthings_ble(
|
||||
AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
model=AirthingsDeviceType.WAVE_PLUS,
|
||||
model=wave_plus_device,
|
||||
name="Airthings Wave Plus",
|
||||
identifier="123456",
|
||||
)
|
||||
@@ -208,6 +215,8 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Airthings Wave Plus (2930123456)"
|
||||
assert result["result"].unique_id == "cc:cc:cc:cc:cc:cc"
|
||||
assert result["data"] == {DEVICE_MODEL: wave_plus_device.value}
|
||||
assert result["result"].data == {DEVICE_MODEL: wave_plus_device.value}
|
||||
|
||||
|
||||
async def test_user_setup_no_device(hass: HomeAssistant) -> None:
|
||||
|
||||
192
tests/components/airthings_ble/test_init.py
Normal file
192
tests/components/airthings_ble/test_init.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""Test the Airthings BLE integration init."""
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
from airthings_ble import AirthingsDeviceType
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import (
|
||||
CORENTIUM_HOME_2_DEVICE_INFO,
|
||||
CORENTIUM_HOME_2_SERVICE_INFO,
|
||||
WAVE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_SERVICE_INFO,
|
||||
patch_airthings_ble,
|
||||
patch_async_ble_device_from_address,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.bluetooth import inject_bluetooth_service_info
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO),
|
||||
(WAVE_ENHANCE_SERVICE_INFO, WAVE_ENHANCE_DEVICE_INFO),
|
||||
(CORENTIUM_HOME_2_SERVICE_INFO, CORENTIUM_HOME_2_DEVICE_INFO),
|
||||
],
|
||||
)
|
||||
async def test_migration_existing_entries(
|
||||
hass: HomeAssistant,
|
||||
service_info,
|
||||
device_info,
|
||||
) -> None:
|
||||
"""Test migration of existing config entry without device model."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
assert DEVICE_MODEL not in entry.data
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == device_info.model.value
|
||||
|
||||
|
||||
async def test_no_migration_when_device_model_exists(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test that migration does not run when device_model already exists."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={DEVICE_MODEL: WAVE_DEVICE_INFO.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(WAVE_DEVICE_INFO) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should have only 1 call for initial refresh (no migration call)
|
||||
assert mock_update.call_count == 1
|
||||
assert entry.data[DEVICE_MODEL] == WAVE_DEVICE_INFO.model.value
|
||||
|
||||
|
||||
async def test_scan_interval_corentium_home_2(
|
||||
hass: HomeAssistant, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test that coordinator uses radon scan interval for Corentium Home 2."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={DEVICE_MODEL: CORENTIUM_HOME_2_DEVICE_INFO.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(CORENTIUM_HOME_2_DEVICE_INFO),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "90"
|
||||
)
|
||||
|
||||
changed_info = deepcopy(CORENTIUM_HOME_2_DEVICE_INFO)
|
||||
changed_info.sensors["battery"] = 89
|
||||
|
||||
with patch_airthings_ble(changed_info):
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "90"
|
||||
)
|
||||
|
||||
freezer.tick(
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
AirthingsDeviceType.CORENTIUM_HOME_2.value
|
||||
)
|
||||
- DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("sensor.airthings_corentium_home_2_123456_battery").state
|
||||
== "89"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info", "battery_entity_id"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO, "sensor.airthings_wave_123456_battery"),
|
||||
(
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
"sensor.airthings_wave_enhance_123456_battery",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_coordinator_default_scan_interval(
|
||||
hass: HomeAssistant,
|
||||
service_info,
|
||||
device_info,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
battery_entity_id: str,
|
||||
) -> None:
|
||||
"""Test that coordinator uses default scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={DEVICE_MODEL: device_info.model.value},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(battery_entity_id).state == "85"
|
||||
|
||||
changed_info = deepcopy(device_info)
|
||||
changed_info.sensors["battery"] = 84
|
||||
|
||||
with patch_airthings_ble(changed_info):
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(battery_entity_id).state == "84"
|
||||
@@ -1,10 +1,17 @@
|
||||
"""Test the Airthings Wave sensor."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.components.airthings_ble.const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -12,6 +19,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from . import (
|
||||
CO2_V1,
|
||||
CO2_V2,
|
||||
CORENTIUM_HOME_2_DEVICE_INFO,
|
||||
HUMIDITY_V2,
|
||||
TEMPERATURE_V1,
|
||||
VOC_V1,
|
||||
@@ -21,6 +29,8 @@ from . import (
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_SERVICE_INFO,
|
||||
AirthingsDevice,
|
||||
BluetoothServiceInfoBleak,
|
||||
create_device,
|
||||
create_entry,
|
||||
patch_airthings_ble,
|
||||
@@ -29,6 +39,7 @@ from . import (
|
||||
patch_async_discovered_service_info,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.bluetooth import inject_bluetooth_service_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -267,3 +278,102 @@ async def test_translation_keys(
|
||||
|
||||
expected_name = f"Airthings Wave Enhance (123456) {expected_sensor_name}"
|
||||
assert state.attributes.get("friendly_name") == expected_name
|
||||
|
||||
|
||||
async def test_scan_interval_migration_corentium_home_2(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test that radon device migration uses 30-minute scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, WAVE_SERVICE_INFO)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_SERVICE_INFO.device),
|
||||
patch_airthings_ble(CORENTIUM_HOME_2_DEVICE_INFO) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == CORENTIUM_HOME_2_DEVICE_INFO.model.value
|
||||
|
||||
# Coordinator should have been configured with radon scan interval
|
||||
coordinator = entry.runtime_data
|
||||
assert coordinator.update_interval == timedelta(
|
||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
CORENTIUM_HOME_2_DEVICE_INFO.model.value
|
||||
)
|
||||
)
|
||||
|
||||
# Should have 2 calls: 1 for migration + 1 for initial refresh
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward by default interval (300s) - should NOT trigger update
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward to radon interval (1800s) - should trigger update
|
||||
freezer.tick(
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL.get(CORENTIUM_HOME_2_DEVICE_INFO.model.value)
|
||||
)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 3
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service_info", "device_info"),
|
||||
[
|
||||
(WAVE_SERVICE_INFO, WAVE_DEVICE_INFO),
|
||||
(WAVE_ENHANCE_SERVICE_INFO, WAVE_ENHANCE_DEVICE_INFO),
|
||||
],
|
||||
)
|
||||
async def test_default_scan_interval_migration(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
device_info: AirthingsDevice,
|
||||
) -> None:
|
||||
"""Test that non-radon device migration uses default 5-minute scan interval."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=service_info.address,
|
||||
data={},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
inject_bluetooth_service_info(hass, service_info)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(service_info.device),
|
||||
patch_airthings_ble(device_info) as mock_update,
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Migration should have added device_model to entry data
|
||||
assert DEVICE_MODEL in entry.data
|
||||
assert entry.data[DEVICE_MODEL] == device_info.model.value
|
||||
|
||||
# Coordinator should have been configured with default scan interval
|
||||
coordinator = entry.runtime_data
|
||||
assert coordinator.update_interval == timedelta(seconds=DEFAULT_SCAN_INTERVAL)
|
||||
|
||||
# Should have 2 calls: 1 for migration + 1 for initial refresh
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Fast forward by default interval (300s) - SHOULD trigger update
|
||||
freezer.tick(DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 3
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioamazondevices.const import DEVICE_TYPE_TO_MODEL
|
||||
from aioamazondevices.const.devices import DEVICE_TYPE_TO_MODEL
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.alexa_devices.const import (
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
from aioamazondevices.const import (
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice, AmazonDeviceSensor, AmazonSchedule
|
||||
|
||||
TEST_CODE = "023123"
|
||||
TEST_PASSWORD = "fake_password"
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
@@ -44,11 +44,11 @@
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, error_type=None, error_msg=None, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'__type': "<class 'aioamazondevices.structures.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, error_type=None, error_msg=None, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY, SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -127,6 +127,7 @@ async def test_attribute(hass: HomeAssistant, service, attribute) -> None:
|
||||
(SERVICE_SET_SWING_MODE, ATTR_SWING_MODE),
|
||||
(SERVICE_SET_SWING_HORIZONTAL_MODE, ATTR_SWING_HORIZONTAL_MODE),
|
||||
(SERVICE_SET_FAN_MODE, ATTR_FAN_MODE),
|
||||
(SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE),
|
||||
],
|
||||
)
|
||||
async def test_attribute_with_none(hass: HomeAssistant, service, attribute) -> None:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from collections.abc import Generator
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
from unittest.mock import ANY, AsyncMock, patch
|
||||
from unittest.mock import ANY, AsyncMock, Mock, patch
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
@@ -1002,6 +1002,36 @@ async def test_get_progress_subscribe(
|
||||
}
|
||||
|
||||
|
||||
async def test_get_progress_subscribe_create_entry(hass: HomeAssistant) -> None:
|
||||
"""Test flows creating entry immediately don't trigger subscription notification."""
|
||||
assert await async_setup_component(hass, "config", {})
|
||||
mock_platform(hass, "test.config_flow", None)
|
||||
|
||||
mock_integration(
|
||||
hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True))
|
||||
)
|
||||
|
||||
class TestFlow(core_ce.ConfigFlow):
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_import(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle import - creates entry immediately."""
|
||||
return self.async_create_entry(title="Test", data={})
|
||||
|
||||
subscription_mock = Mock()
|
||||
hass.config_entries.flow.async_subscribe_flow(subscription_mock)
|
||||
|
||||
with mock_config_flow("test", TestFlow):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
"test", context={"source": core_ce.SOURCE_IMPORT}, data={}
|
||||
)
|
||||
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert len(subscription_mock.mock_calls) == 0
|
||||
|
||||
|
||||
async def test_get_progress_subscribe_in_progress(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
||||
@@ -79,7 +79,7 @@ def setup_mock_foscam_camera(mock_foscam_camera):
|
||||
0,
|
||||
{
|
||||
"swCapabilities1": "100",
|
||||
"swCapabilities2": "768",
|
||||
"swCapabilities2": "896",
|
||||
"swCapabilities3": "100",
|
||||
"swCapabilities4": "100",
|
||||
},
|
||||
|
||||
@@ -9,8 +9,14 @@ import growattServer
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.growatt_server.const import DOMAIN
|
||||
from homeassistant.components.growatt_server.const import (
|
||||
AUTH_API_TOKEN,
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
@@ -174,3 +180,79 @@ async def test_multiple_devices_discovered(
|
||||
assert device1 == snapshot(name="device_min123456")
|
||||
assert device2 is not None
|
||||
assert device2 == snapshot(name="device_min789012")
|
||||
|
||||
|
||||
async def test_migrate_legacy_api_token_config(
|
||||
hass: HomeAssistant,
|
||||
mock_growatt_v1_api,
|
||||
) -> None:
|
||||
"""Test migration of legacy config entry with API token but no auth_type."""
|
||||
# Create a legacy config entry without CONF_AUTH_TYPE
|
||||
legacy_config = {
|
||||
CONF_TOKEN: "test_token_123",
|
||||
CONF_URL: "https://openapi.growatt.com/",
|
||||
"plant_id": "plant_123",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=legacy_config,
|
||||
unique_id="plant_123",
|
||||
)
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# Verify migration occurred and auth_type was added
|
||||
assert mock_config_entry.data[CONF_AUTH_TYPE] == AUTH_API_TOKEN
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
|
||||
async def test_migrate_legacy_password_config(
|
||||
hass: HomeAssistant,
|
||||
mock_growatt_classic_api,
|
||||
) -> None:
|
||||
"""Test migration of legacy config entry with password auth but no auth_type."""
|
||||
# Create a legacy config entry without CONF_AUTH_TYPE
|
||||
legacy_config = {
|
||||
CONF_USERNAME: "test_user",
|
||||
CONF_PASSWORD: "test_password",
|
||||
CONF_URL: "https://server.growatt.com/",
|
||||
"plant_id": "plant_456",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=legacy_config,
|
||||
unique_id="plant_456",
|
||||
)
|
||||
|
||||
# Classic API doesn't support MIN devices - use TLX device instead
|
||||
mock_growatt_classic_api.device_list.return_value = [
|
||||
{"deviceSn": "TLX123456", "deviceType": "tlx"}
|
||||
]
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# Verify migration occurred and auth_type was added
|
||||
assert mock_config_entry.data[CONF_AUTH_TYPE] == AUTH_PASSWORD
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
|
||||
async def test_migrate_legacy_config_no_auth_fields(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test that config entry with no recognizable auth fields raises error."""
|
||||
# Create a config entry without any auth fields
|
||||
invalid_config = {
|
||||
CONF_URL: "https://openapi.growatt.com/",
|
||||
"plant_id": "plant_789",
|
||||
}
|
||||
mock_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=invalid_config,
|
||||
unique_id="plant_789",
|
||||
)
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# The ConfigEntryError is caught by the config entry system
|
||||
# and the entry state is set to SETUP_ERROR
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user