mirror of
https://github.com/home-assistant/core.git
synced 2025-11-05 17:09:32 +00:00
Compare commits
43 Commits
dev_target
...
disable_ht
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3c415662cb | ||
|
|
9f595a94fb | ||
|
|
5dc215a143 | ||
|
|
306b78ba5f | ||
|
|
bccb646a07 | ||
|
|
4a5dc8cdd6 | ||
|
|
52a751507a | ||
|
|
533b9f969d | ||
|
|
5de7928bc0 | ||
|
|
aad9b07f86 | ||
|
|
3e2c401253 | ||
|
|
762e63d042 | ||
|
|
ec6d40a51c | ||
|
|
47c2c61626 | ||
|
|
73c941f6c5 | ||
|
|
685edb5f76 | ||
|
|
5987b6dcb9 | ||
|
|
cb029e0bb0 | ||
|
|
553ec35947 | ||
|
|
f93940bfa9 | ||
|
|
486f93eb28 | ||
|
|
462db36fef | ||
|
|
485f7f45e8 | ||
|
|
a446d8a98c | ||
|
|
b4a31fc578 | ||
|
|
22321c22cc | ||
|
|
4419c236e2 | ||
|
|
1731a2534c | ||
|
|
ec0edf47b1 | ||
|
|
57c69738e3 | ||
|
|
fb1f258b2b | ||
|
|
d419dd0c05 | ||
|
|
65960aa3f7 | ||
|
|
a25afe2834 | ||
|
|
4cdfa3bddb | ||
|
|
9e7bef9fa7 | ||
|
|
68a1b1f91f | ||
|
|
1659ca532d | ||
|
|
8ea16daae4 | ||
|
|
5bd89acf9a | ||
|
|
2b8db74be4 | ||
|
|
d7f9a7114d | ||
|
|
f7a59eb86e |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -88,6 +88,10 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -58,7 +58,10 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import network
|
||||
from homeassistant.util import color as color_util, dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
)
|
||||
|
||||
from .config import AbstractConfig
|
||||
from .const import (
|
||||
@@ -844,7 +847,7 @@ def temperature_from_object(
|
||||
temp -= 273.15
|
||||
|
||||
if interval:
|
||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||
|
||||
|
||||
|
||||
@@ -39,11 +39,11 @@ from .const import (
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEFAULT_DEVICE_CLASS,
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_AUTO,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
@@ -89,8 +89,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
||||
DEVICE_CLASSES
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(value=k, label=v)
|
||||
for k, v in DEVICE_CLASSES.items()
|
||||
],
|
||||
translation_key="device_class",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
},
|
||||
|
||||
@@ -15,15 +15,19 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||
|
||||
DEFAULT_ADB_SERVER_PORT = 5037
|
||||
DEFAULT_DEVICE_CLASS = "auto"
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||
DEFAULT_GET_SOURCES = True
|
||||
DEFAULT_PORT = 5555
|
||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||
|
||||
DEVICE_AUTO = "auto"
|
||||
DEVICE_ANDROIDTV = "androidtv"
|
||||
DEVICE_FIRETV = "firetv"
|
||||
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
||||
DEVICE_CLASSES = {
|
||||
DEVICE_AUTO: "auto",
|
||||
DEVICE_ANDROIDTV: "Android TV",
|
||||
DEVICE_FIRETV: "Fire TV",
|
||||
}
|
||||
|
||||
PROP_ETHMAC = "ethmac"
|
||||
PROP_SERIALNO = "serialno"
|
||||
|
||||
@@ -65,6 +65,13 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"device_class": {
|
||||
"options": {
|
||||
"auto": "Auto-detect device type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"adb_command": {
|
||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||
|
||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
@@ -50,6 +50,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
if brother.serial.lower() != entry.unique_id:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="serial_mismatch",
|
||||
translation_placeholders={
|
||||
"device": entry.title,
|
||||
},
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
30
homeassistant/components/brother/entity.py
Normal file
30
homeassistant/components/brother/entity.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Define the Brother entity."""
|
||||
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherDataUpdateCoordinator
|
||||
|
||||
|
||||
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
"""Define a Brother Printer entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BrotherDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
@@ -19,13 +19,12 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
from .entity import BrotherPrinterEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -333,12 +332,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class BrotherPrinterSensor(
|
||||
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an Brother Printer sensor."""
|
||||
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Define a Brother Printer sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: BrotherSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -348,16 +344,7 @@ class BrotherPrinterSensor(
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@@ -207,6 +207,9 @@
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||
},
|
||||
"serial_mismatch": {
|
||||
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
|
||||
@@ -6,3 +6,5 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pycoolmasternet_async import CoolMasterNet
|
||||
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +47,34 @@ class CoolmasterDataUpdateCoordinator(
|
||||
|
||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||
"""Fetch data from Coolmaster."""
|
||||
try:
|
||||
return await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
raise UpdateFailed from error
|
||||
retries_left = MAX_RETRIES
|
||||
status: dict[str, CoolMasterNetUnit] = {}
|
||||
while retries_left > 0 and not status:
|
||||
retries_left -= 1
|
||||
try:
|
||||
status = await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
if retries_left == 0:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
||||
) from error
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster (%d retries left): %s",
|
||||
retries_left,
|
||||
str(error),
|
||||
)
|
||||
else:
|
||||
if status:
|
||||
return status
|
||||
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
||||
retries_left,
|
||||
)
|
||||
|
||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
||||
)
|
||||
|
||||
@@ -151,14 +151,12 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
key="RAIN_COUNT_MM",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
|
||||
@@ -75,10 +75,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
|
||||
"""Unload an esphome config entry."""
|
||||
entry_data = await cleanup_instance(entry)
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
entry, entry_data.loaded_platforms
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, entry.runtime_data.loaded_platforms
|
||||
)
|
||||
if unload_ok:
|
||||
await cleanup_instance(entry)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["libpyfoscamcgi"],
|
||||
"requirements": ["libpyfoscamcgi==0.0.8"]
|
||||
"requirements": ["libpyfoscamcgi==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -453,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "safety")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
@@ -744,9 +744,7 @@ class ManifestJSONView(HomeAssistantView):
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "frontend/get_icons",
|
||||
vol.Required("category"): vol.In(
|
||||
{"entity", "entity_component", "services", "triggers"}
|
||||
),
|
||||
vol.Required("category"): vol.In({"entity", "entity_component", "services"}),
|
||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251103.0"]
|
||||
"requirements": ["home-assistant-frontend==20251104.0"]
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"state": {
|
||||
"name": "Running"
|
||||
},
|
||||
"mount": {
|
||||
"name": "Connected"
|
||||
},
|
||||
"state": {
|
||||
"name": "Running"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.83", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.84", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir, storage
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.http import (
|
||||
KEY_ALLOW_CONFIGURED_CORS,
|
||||
KEY_AUTHENTICATED, # noqa: F401
|
||||
@@ -109,7 +110,7 @@ HTTP_SCHEMA: Final = vol.All(
|
||||
cv.deprecated(CONF_BASE_URL),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SERVER_HOST, default=_DEFAULT_BIND): vol.All(
|
||||
vol.Optional(CONF_SERVER_HOST): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_SERVER_PORT, default=SERVER_PORT): cv.port,
|
||||
@@ -207,7 +208,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if conf is None:
|
||||
conf = cast(ConfData, HTTP_SCHEMA({}))
|
||||
|
||||
server_host = conf[CONF_SERVER_HOST]
|
||||
if CONF_SERVER_HOST in conf and is_hassio(hass):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"server_host_may_break_hassio",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="server_host_may_break_hassio",
|
||||
)
|
||||
|
||||
server_host = conf.get(CONF_SERVER_HOST, _DEFAULT_BIND)
|
||||
server_port = conf[CONF_SERVER_PORT]
|
||||
ssl_certificate = conf.get(CONF_SSL_CERTIFICATE)
|
||||
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
{
|
||||
"issues": {
|
||||
"server_host_may_break_hassio": {
|
||||
"description": "The `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed in a future release.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
|
||||
"title": "The `server_host` HTTP configuration may break Home Assistant Core - Supervisor communication"
|
||||
},
|
||||
"ssl_configured_without_configured_urls": {
|
||||
"description": "Home Assistant detected that SSL has been set up on your instance, however, no custom external internet URL has been set.\n\nThis may result in unexpected behavior. Text-to-speech may fail, and integrations may not be able to connect back to your instance correctly.\n\nTo address this issue, go to Settings > System > Network; under the \"Home Assistant URL\" section, configure your new \"Internet\" and \"Local network\" addresses that match your new SSL configuration.",
|
||||
"title": "SSL is configured without an external URL or internal URL"
|
||||
|
||||
@@ -54,15 +54,14 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
API_VERSION_2,
|
||||
BATCH_BUFFER_SIZE,
|
||||
BATCH_TIMEOUT,
|
||||
CATCHING_UP_MESSAGE,
|
||||
CLIENT_ERROR_V1,
|
||||
CLIENT_ERROR_V2,
|
||||
CODE_INVALID_INPUTS,
|
||||
COMPONENT_CONFIG_SCHEMA_BATCH,
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION,
|
||||
CONF_API_VERSION,
|
||||
CONF_BATCH_BUFFER_SIZE,
|
||||
CONF_BATCH_TIMEOUT,
|
||||
CONF_BUCKET,
|
||||
CONF_COMPONENT_CONFIG,
|
||||
CONF_COMPONENT_CONFIG_DOMAIN,
|
||||
@@ -194,12 +193,7 @@ _INFLUX_BASE_SCHEMA = INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
|
||||
)
|
||||
|
||||
INFLUX_SCHEMA = vol.All(
|
||||
_INFLUX_BASE_SCHEMA.extend(
|
||||
{
|
||||
**COMPONENT_CONFIG_SCHEMA_CONNECTION,
|
||||
**COMPONENT_CONFIG_SCHEMA_BATCH,
|
||||
}
|
||||
),
|
||||
_INFLUX_BASE_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION),
|
||||
validate_version_specific_config,
|
||||
create_influx_url,
|
||||
)
|
||||
@@ -502,9 +496,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
event_to_json = _generate_event_to_json(conf)
|
||||
max_tries = conf.get(CONF_RETRY_COUNT)
|
||||
instance = hass.data[DOMAIN] = InfluxThread(
|
||||
hass, influx, event_to_json, max_tries, conf
|
||||
)
|
||||
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
|
||||
instance.start()
|
||||
|
||||
def shutdown(event):
|
||||
@@ -521,7 +513,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InfluxThread(threading.Thread):
|
||||
"""A threaded event handler class."""
|
||||
|
||||
def __init__(self, hass, influx, event_to_json, max_tries, config):
|
||||
def __init__(self, hass, influx, event_to_json, max_tries):
|
||||
"""Initialize the listener."""
|
||||
threading.Thread.__init__(self, name=DOMAIN)
|
||||
self.queue: queue.SimpleQueue[threading.Event | tuple[float, Event] | None] = (
|
||||
@@ -532,8 +524,6 @@ class InfluxThread(threading.Thread):
|
||||
self.max_tries = max_tries
|
||||
self.write_errors = 0
|
||||
self.shutdown = False
|
||||
self._batch_timeout = config[CONF_BATCH_TIMEOUT]
|
||||
self.batch_buffer_size = config[CONF_BATCH_BUFFER_SIZE]
|
||||
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
|
||||
|
||||
@callback
|
||||
@@ -542,31 +532,23 @@ class InfluxThread(threading.Thread):
|
||||
item = (time.monotonic(), event)
|
||||
self.queue.put(item)
|
||||
|
||||
@property
|
||||
def batch_timeout(self):
|
||||
@staticmethod
|
||||
def batch_timeout():
|
||||
"""Return number of seconds to wait for more events."""
|
||||
return self._batch_timeout
|
||||
return BATCH_TIMEOUT
|
||||
|
||||
def get_events_json(self):
|
||||
"""Return a batch of events formatted for writing."""
|
||||
queue_seconds = QUEUE_BACKLOG_SECONDS + self.max_tries * RETRY_DELAY
|
||||
start_time = time.monotonic()
|
||||
batch_timeout = self.batch_timeout()
|
||||
|
||||
count = 0
|
||||
json = []
|
||||
|
||||
dropped = 0
|
||||
|
||||
with suppress(queue.Empty):
|
||||
while len(json) < self.batch_buffer_size and not self.shutdown:
|
||||
if count > 0 and time.monotonic() - start_time >= batch_timeout:
|
||||
break
|
||||
|
||||
timeout = (
|
||||
None
|
||||
if count == 0
|
||||
else batch_timeout - (time.monotonic() - start_time)
|
||||
)
|
||||
while len(json) < BATCH_BUFFER_SIZE and not self.shutdown:
|
||||
timeout = None if count == 0 else self.batch_timeout()
|
||||
item = self.queue.get(timeout=timeout)
|
||||
count += 1
|
||||
|
||||
|
||||
@@ -47,9 +47,6 @@ CONF_FUNCTION = "function"
|
||||
CONF_QUERY = "query"
|
||||
CONF_IMPORTS = "imports"
|
||||
|
||||
CONF_BATCH_BUFFER_SIZE = "batch_buffer_size"
|
||||
CONF_BATCH_TIMEOUT = "batch_timeout"
|
||||
|
||||
DEFAULT_DATABASE = "home_assistant"
|
||||
DEFAULT_HOST_V2 = "us-west-2-1.aws.cloud2.influxdata.com"
|
||||
DEFAULT_SSL_V2 = True
|
||||
@@ -63,9 +60,6 @@ DEFAULT_RANGE_STOP = "now()"
|
||||
DEFAULT_FUNCTION_FLUX = "|> limit(n: 1)"
|
||||
DEFAULT_MEASUREMENT_ATTR = "unit_of_measurement"
|
||||
|
||||
DEFAULT_BATCH_BUFFER_SIZE = 100
|
||||
DEFAULT_BATCH_TIMEOUT = 1
|
||||
|
||||
INFLUX_CONF_MEASUREMENT = "measurement"
|
||||
INFLUX_CONF_TAGS = "tags"
|
||||
INFLUX_CONF_TIME = "time"
|
||||
@@ -82,6 +76,8 @@ TIMEOUT = 10 # seconds
|
||||
RETRY_DELAY = 20
|
||||
QUEUE_BACKLOG_SECONDS = 30
|
||||
RETRY_INTERVAL = 60 # seconds
|
||||
BATCH_TIMEOUT = 1
|
||||
BATCH_BUFFER_SIZE = 100
|
||||
LANGUAGE_INFLUXQL = "influxQL"
|
||||
LANGUAGE_FLUX = "flux"
|
||||
TEST_QUERY_V1 = "SHOW DATABASES;"
|
||||
@@ -156,10 +152,3 @@ COMPONENT_CONFIG_SCHEMA_CONNECTION = {
|
||||
vol.Inclusive(CONF_ORG, "v2_authentication"): cv.string,
|
||||
vol.Optional(CONF_BUCKET, default=DEFAULT_BUCKET): cv.string,
|
||||
}
|
||||
|
||||
COMPONENT_CONFIG_SCHEMA_BATCH = {
|
||||
vol.Optional(
|
||||
CONF_BATCH_BUFFER_SIZE, default=DEFAULT_BATCH_BUFFER_SIZE
|
||||
): cv.positive_int,
|
||||
vol.Optional(CONF_BATCH_TIMEOUT, default=DEFAULT_BATCH_TIMEOUT): cv.positive_float,
|
||||
}
|
||||
|
||||
@@ -622,6 +622,7 @@ ENERGY_USAGE_SENSORS: tuple[ThinQEnergySensorEntityDescription, ...] = (
|
||||
usage_period=USAGE_MONTHLY,
|
||||
start_date_fn=lambda today: today,
|
||||
end_date_fn=lambda today: today,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
ThinQEnergySensorEntityDescription(
|
||||
key="last_month",
|
||||
|
||||
@@ -25,10 +25,5 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"state": {
|
||||
"trigger": "mdi:state-machine"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,13 +132,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"color_name": {
|
||||
"options": {
|
||||
"aliceblue": "Alice blue",
|
||||
@@ -296,12 +289,6 @@
|
||||
"long": "Long",
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"options": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -475,22 +462,5 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Light",
|
||||
"triggers": {
|
||||
"state": {
|
||||
"description": "When the state of a light changes, such as turning on or off.",
|
||||
"description_configured": "When the state of a light changes",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted entities to trigger on.",
|
||||
"name": "Behavior"
|
||||
},
|
||||
"state": {
|
||||
"description": "The state to trigger on.",
|
||||
"name": "State"
|
||||
}
|
||||
},
|
||||
"name": "State"
|
||||
}
|
||||
}
|
||||
"title": "Light"
|
||||
}
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from typing import TYPE_CHECKING, Final, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_STATE,
|
||||
CONF_TARGET,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import process_state_match
|
||||
from homeassistant.helpers.target import (
|
||||
TargetStateChangedData,
|
||||
async_track_target_selector_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
# remove when #151314 is merged
|
||||
CONF_OPTIONS: Final = "options"
|
||||
|
||||
ATTR_BEHAVIOR: Final = "behavior"
|
||||
BEHAVIOR_FIRST: Final = "first"
|
||||
BEHAVIOR_LAST: Final = "last"
|
||||
BEHAVIOR_ANY: Final = "any"
|
||||
|
||||
STATE_PLATFORM_TYPE: Final = "state"
|
||||
STATE_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_STATE): vol.In([STATE_ON, STATE_OFF]),
|
||||
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
|
||||
[BEHAVIOR_FIRST, BEHAVIOR_LAST, BEHAVIOR_ANY]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class StateTrigger(Trigger):
|
||||
"""Trigger for state changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, STATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
match_config_state = process_state_match(self._options.get(CONF_STATE))
|
||||
|
||||
def check_all_match(entity_ids: set[str]) -> bool:
|
||||
"""Check if all entity states match."""
|
||||
return all(
|
||||
match_config_state(state.state)
|
||||
for entity_id in entity_ids
|
||||
if (state := self._hass.states.get(entity_id)) is not None
|
||||
)
|
||||
|
||||
def check_one_match(entity_ids: set[str]) -> bool:
|
||||
"""Check that only one entity state matches."""
|
||||
return (
|
||||
sum(
|
||||
match_config_state(state.state)
|
||||
for entity_id in entity_ids
|
||||
if (state := self._hass.states.get(entity_id)) is not None
|
||||
)
|
||||
== 1
|
||||
)
|
||||
|
||||
behavior = self._options.get(ATTR_BEHAVIOR)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
if to_state is None:
|
||||
return
|
||||
|
||||
# This check is required for "first" behavior, to check that it went from zero
|
||||
# entities matching the state to one. Otherwise, if previously there were two
|
||||
# entities on CONF_STATE and one changed, this would trigger.
|
||||
# For "last" behavior it is not required, but serves as a quicker fail check.
|
||||
if not match_config_state(to_state.state):
|
||||
return
|
||||
if behavior == BEHAVIOR_LAST:
|
||||
if not check_all_match(target_state_change_data.targeted_entity_ids):
|
||||
return
|
||||
elif behavior == BEHAVIOR_FIRST:
|
||||
if not check_one_match(target_state_change_data.targeted_entity_ids):
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"state of {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
STATE_PLATFORM_TYPE: StateTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for lights."""
|
||||
return TRIGGERS
|
||||
@@ -1,24 +0,0 @@
|
||||
state:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
state:
|
||||
required: true
|
||||
default: "on"
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "off"
|
||||
- "on"
|
||||
translation_key: state
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: behavior
|
||||
@@ -13,5 +13,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pylitterbot==2024.2.7"]
|
||||
"requirements": ["pylitterbot==2025.0.0"]
|
||||
}
|
||||
|
||||
@@ -238,12 +238,14 @@ async def _client_listen(
|
||||
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: MusicAssistantConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
mass_entry_data: MusicAssistantEntryData = entry.runtime_data
|
||||
mass_entry_data = entry.runtime_data
|
||||
mass_entry_data.listen_task.cancel()
|
||||
await mass_entry_data.mass.disconnect()
|
||||
|
||||
@@ -251,7 +253,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
hass: HomeAssistant,
|
||||
config_entry: MusicAssistantConfigEntry,
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
player_id = next(
|
||||
|
||||
@@ -115,6 +115,13 @@ QUEUE_OPTION_MAP = {
|
||||
MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE,
|
||||
}
|
||||
|
||||
REPEAT_MODE_MAPPING_TO_HA = {
|
||||
MassRepeatMode.OFF: RepeatMode.OFF,
|
||||
MassRepeatMode.ONE: RepeatMode.ONE,
|
||||
MassRepeatMode.ALL: RepeatMode.ALL,
|
||||
# UNKNOWN is intentionally not mapped - will return None
|
||||
}
|
||||
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
@@ -657,7 +664,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
# player has an MA queue active (either its own queue or some group queue)
|
||||
self._attr_app_id = DOMAIN
|
||||
self._attr_shuffle = queue.shuffle_enabled
|
||||
self._attr_repeat = queue.repeat_mode.value
|
||||
self._attr_repeat = REPEAT_MODE_MAPPING_TO_HA.get(queue.repeat_mode)
|
||||
if not (cur_item := queue.current_item):
|
||||
# queue is empty
|
||||
return
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -13,18 +14,44 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, STATE_ON, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity
|
||||
|
||||
from .const import DOMAIN, MASTER_THERMOSTATS
|
||||
from .coordinator import PlugwiseConfigEntry, PlugwiseDataUpdateCoordinator
|
||||
from .entity import PlugwiseEntity
|
||||
from .util import plugwise_command
|
||||
|
||||
ERROR_NO_SCHEDULE = "set_schedule_first"
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlugwiseClimateExtraStoredData(ExtraStoredData):
|
||||
"""Object to hold extra stored data."""
|
||||
|
||||
last_active_schedule: str | None
|
||||
previous_action_mode: str | None
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dict representation of the text data."""
|
||||
return {
|
||||
"last_active_schedule": self.last_active_schedule,
|
||||
"previous_action_mode": self.previous_action_mode,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, restored: dict[str, Any]) -> PlugwiseClimateExtraStoredData:
|
||||
"""Initialize a stored data object from a dict."""
|
||||
return cls(
|
||||
last_active_schedule=restored.get("last_active_schedule"),
|
||||
previous_action_mode=restored.get("previous_action_mode"),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: PlugwiseConfigEntry,
|
||||
@@ -56,14 +83,26 @@ async def async_setup_entry(
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_entities))
|
||||
|
||||
|
||||
class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity, RestoreEntity):
|
||||
"""Representation of a Plugwise thermostat."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
_previous_mode: str = "heating"
|
||||
_last_active_schedule: str | None = None
|
||||
_previous_action_mode: str | None = HVACAction.HEATING.value
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if extra_data := await self.async_get_last_extra_data():
|
||||
plugwise_extra_data = PlugwiseClimateExtraStoredData.from_dict(
|
||||
extra_data.as_dict()
|
||||
)
|
||||
self._last_active_schedule = plugwise_extra_data.last_active_schedule
|
||||
self._previous_action_mode = plugwise_extra_data.previous_action_mode
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -76,7 +115,6 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
|
||||
gateway_id: str = coordinator.api.gateway_id
|
||||
self._gateway_data = coordinator.data[gateway_id]
|
||||
|
||||
self._location = device_id
|
||||
if (location := self.device.get("location")) is not None:
|
||||
self._location = location
|
||||
@@ -105,25 +143,19 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
self.device["thermostat"]["resolution"], 0.1
|
||||
)
|
||||
|
||||
def _previous_action_mode(self, coordinator: PlugwiseDataUpdateCoordinator) -> None:
|
||||
"""Return the previous action-mode when the regulation-mode is not heating or cooling.
|
||||
|
||||
Helper for set_hvac_mode().
|
||||
"""
|
||||
# When no cooling available, _previous_mode is always heating
|
||||
if (
|
||||
"regulation_modes" in self._gateway_data
|
||||
and "cooling" in self._gateway_data["regulation_modes"]
|
||||
):
|
||||
mode = self._gateway_data["select_regulation_mode"]
|
||||
if mode in ("cooling", "heating"):
|
||||
self._previous_mode = mode
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self.device["sensors"]["temperature"]
|
||||
|
||||
@property
|
||||
def extra_restore_state_data(self) -> PlugwiseClimateExtraStoredData:
|
||||
"""Return text specific state data to be restored."""
|
||||
return PlugwiseClimateExtraStoredData(
|
||||
last_active_schedule=self._last_active_schedule,
|
||||
previous_action_mode=self._previous_action_mode,
|
||||
)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the temperature we try to reach.
|
||||
@@ -170,9 +202,10 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
|
||||
if self.coordinator.api.cooling_present:
|
||||
if "regulation_modes" in self._gateway_data:
|
||||
if self._gateway_data["select_regulation_mode"] == "cooling":
|
||||
selected = self._gateway_data.get("select_regulation_mode")
|
||||
if selected == HVACAction.COOLING.value:
|
||||
hvac_modes.append(HVACMode.COOL)
|
||||
if self._gateway_data["select_regulation_mode"] == "heating":
|
||||
if selected == HVACAction.HEATING.value:
|
||||
hvac_modes.append(HVACMode.HEAT)
|
||||
else:
|
||||
hvac_modes.append(HVACMode.HEAT_COOL)
|
||||
@@ -184,8 +217,16 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return the current running hvac operation if supported."""
|
||||
# Keep track of the previous action-mode
|
||||
self._previous_action_mode(self.coordinator)
|
||||
# Keep track of the previous hvac_action mode.
|
||||
# When no cooling available, _previous_action_mode is always heating
|
||||
if (
|
||||
"regulation_modes" in self._gateway_data
|
||||
and HVACAction.COOLING.value in self._gateway_data["regulation_modes"]
|
||||
):
|
||||
mode = self._gateway_data["select_regulation_mode"]
|
||||
if mode in (HVACAction.COOLING.value, HVACAction.HEATING.value):
|
||||
self._previous_action_mode = mode
|
||||
|
||||
if (action := self.device.get("control_state")) is not None:
|
||||
return HVACAction(action)
|
||||
|
||||
@@ -219,14 +260,33 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
|
||||
return
|
||||
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.api.set_regulation_mode(hvac_mode)
|
||||
await self.coordinator.api.set_regulation_mode(hvac_mode.value)
|
||||
else:
|
||||
current = self.device.get("select_schedule")
|
||||
desired = current
|
||||
|
||||
# Capture the last valid schedule
|
||||
if desired and desired != "off":
|
||||
self._last_active_schedule = desired
|
||||
elif desired == "off":
|
||||
desired = self._last_active_schedule
|
||||
|
||||
# Enabling HVACMode.AUTO requires a previously set schedule for saving and restoring
|
||||
if hvac_mode == HVACMode.AUTO and not desired:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=ERROR_NO_SCHEDULE,
|
||||
)
|
||||
|
||||
await self.coordinator.api.set_schedule_state(
|
||||
self._location,
|
||||
"on" if hvac_mode == HVACMode.AUTO else "off",
|
||||
STATE_ON if hvac_mode == HVACMode.AUTO else STATE_OFF,
|
||||
desired,
|
||||
)
|
||||
if self.hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.api.set_regulation_mode(self._previous_mode)
|
||||
if self.hvac_mode == HVACMode.OFF and self._previous_action_mode:
|
||||
await self.coordinator.api.set_regulation_mode(
|
||||
self._previous_action_mode
|
||||
)
|
||||
|
||||
@plugwise_command
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.8.2"],
|
||||
"requirements": ["plugwise==1.8.3"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -314,6 +314,9 @@
|
||||
"invalid_xml_data": {
|
||||
"message": "[%key:component::plugwise::config::error::response_error%]"
|
||||
},
|
||||
"set_schedule_first": {
|
||||
"message": "Failed setting HVACMode, set a schedule first."
|
||||
},
|
||||
"unsupported_firmware": {
|
||||
"message": "[%key:component::plugwise::config::error::unsupported%]"
|
||||
}
|
||||
|
||||
@@ -3,13 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pooldose.client import PooldoseClient
|
||||
from pooldose.request_status import RequestStatus
|
||||
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .coordinator import PooldoseConfigEntry, PooldoseCoordinator
|
||||
|
||||
@@ -18,6 +20,36 @@ _LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: PooldoseConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
# Version 1.1 -> 1.2: Migrate entity unique IDs
|
||||
# - ofa_orp_value -> ofa_orp_time
|
||||
# - ofa_ph_value -> ofa_ph_time
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
|
||||
@callback
|
||||
def migrate_unique_id(entity_entry: er.RegistryEntry) -> dict[str, Any] | None:
|
||||
"""Migrate entity unique IDs for pooldose sensors."""
|
||||
new_unique_id = entity_entry.unique_id
|
||||
|
||||
# Check if this entry needs migration
|
||||
if "_ofa_orp_value" in new_unique_id:
|
||||
new_unique_id = new_unique_id.replace("_ofa_orp_value", "_ofa_orp_time")
|
||||
elif "_ofa_ph_value" in new_unique_id:
|
||||
new_unique_id = new_unique_id.replace("_ofa_ph_value", "_ofa_ph_time")
|
||||
else:
|
||||
# No migration needed
|
||||
return None
|
||||
|
||||
return {"new_unique_id": new_unique_id}
|
||||
|
||||
await er.async_migrate_entries(hass, entry.entry_id, migrate_unique_id)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, version=1, minor_version=2)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PooldoseConfigEntry) -> bool:
|
||||
"""Set up Seko PoolDose from a config entry."""
|
||||
# Get host from config entry data (connection-critical configuration)
|
||||
|
||||
@@ -31,6 +31,7 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for the Pooldose integration including DHCP discovery."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow and store the discovered IP address and MAC."""
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ofa_orp_value": {
|
||||
"ofa_orp_time": {
|
||||
"default": "mdi:clock"
|
||||
},
|
||||
"ofa_ph_value": {
|
||||
"ofa_ph_time": {
|
||||
"default": "mdi:clock"
|
||||
},
|
||||
"orp": {
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pooldose",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-pooldose==0.7.0"]
|
||||
"requirements": ["python-pooldose==0.7.8"]
|
||||
}
|
||||
|
||||
@@ -48,8 +48,8 @@ SENSOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
options=["proportional", "on_off", "timed"],
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="ofa_ph_value",
|
||||
translation_key="ofa_ph_value",
|
||||
key="ofa_ph_time",
|
||||
translation_key="ofa_ph_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -72,8 +72,8 @@ SENSOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = (
|
||||
options=["off", "proportional", "on_off", "timed"],
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="ofa_orp_value",
|
||||
translation_key="ofa_orp_value",
|
||||
key="ofa_orp_time",
|
||||
translation_key="ofa_orp_time",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
|
||||
@@ -34,10 +34,10 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ofa_orp_value": {
|
||||
"ofa_orp_time": {
|
||||
"name": "ORP overfeed alert time"
|
||||
},
|
||||
"ofa_ph_value": {
|
||||
"ofa_ph_time": {
|
||||
"name": "pH overfeed alert time"
|
||||
},
|
||||
"orp": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.12"]
|
||||
"requirements": ["pyportainer==1.0.13"]
|
||||
}
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.16.3"]
|
||||
"requirements": ["reolink-aio==0.16.4"]
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
@@ -257,10 +258,11 @@ async def async_migrate_entry(
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 1:
|
||||
if config_entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
# 1.2 Migrate subentries to include configured numbers to title
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
for subentry in config_entry.subentries.values():
|
||||
property_map = {
|
||||
@@ -278,6 +280,21 @@ async def async_migrate_entry(
|
||||
|
||||
hass.config_entries.async_update_entry(config_entry, minor_version=2)
|
||||
|
||||
# 2.1 Migrate all entity unique IDs to replace "satel" prefix with config entry ID, allows multiple entries to be configured
|
||||
if config_entry.version == 1:
|
||||
|
||||
@callback
|
||||
def migrate_unique_id(entity_entry: RegistryEntry) -> dict[str, str]:
|
||||
"""Migrate the unique ID to a new format."""
|
||||
return {
|
||||
"new_unique_id": entity_entry.unique_id.replace(
|
||||
"satel", config_entry.entry_id
|
||||
)
|
||||
}
|
||||
|
||||
await async_migrate_entries(hass, config_entry.entry_id, migrate_unique_id)
|
||||
hass.config_entries.async_update_entry(config_entry, version=2, minor_version=1)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
config_entry.version,
|
||||
|
||||
@@ -52,7 +52,11 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
[
|
||||
SatelIntegraAlarmPanel(
|
||||
controller, zone_name, arm_home_mode, partition_num
|
||||
controller,
|
||||
zone_name,
|
||||
arm_home_mode,
|
||||
partition_num,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
@@ -69,10 +73,12 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity):
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
|
||||
def __init__(self, controller, name, arm_home_mode, partition_id) -> None:
|
||||
def __init__(
|
||||
self, controller, name, arm_home_mode, partition_id, config_entry_id
|
||||
) -> None:
|
||||
"""Initialize the alarm panel."""
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = f"satel_alarm_panel_{partition_id}"
|
||||
self._attr_unique_id = f"{config_entry_id}_alarm_panel_{partition_id}"
|
||||
self._arm_home_mode = arm_home_mode
|
||||
self._partition_id = partition_id
|
||||
self._satel = controller
|
||||
|
||||
@@ -53,6 +53,7 @@ async def async_setup_entry(
|
||||
zone_type,
|
||||
CONF_ZONES,
|
||||
SIGNAL_ZONES_UPDATED,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
@@ -77,6 +78,7 @@ async def async_setup_entry(
|
||||
ouput_type,
|
||||
CONF_OUTPUTS,
|
||||
SIGNAL_OUTPUTS_UPDATED,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
@@ -96,10 +98,11 @@ class SatelIntegraBinarySensor(BinarySensorEntity):
|
||||
zone_type,
|
||||
sensor_type,
|
||||
react_to_signal,
|
||||
config_entry_id,
|
||||
):
|
||||
"""Initialize the binary_sensor."""
|
||||
self._device_number = device_number
|
||||
self._attr_unique_id = f"satel_{sensor_type}_{device_number}"
|
||||
self._attr_unique_id = f"{config_entry_id}_{sensor_type}_{device_number}"
|
||||
self._name = device_name
|
||||
self._zone_type = zone_type
|
||||
self._state = 0
|
||||
|
||||
@@ -90,8 +90,8 @@ SWITCHABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
|
||||
class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Satel Integra config flow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@@ -121,6 +121,8 @@ class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
valid = await self.test_connection(
|
||||
user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
)
|
||||
|
||||
@@ -7,6 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["satel_integra"],
|
||||
"requirements": ["satel-integra==0.3.7"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["satel-integra==0.3.7"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
"code_input_description": "Code to toggle switchable outputs"
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
|
||||
@@ -46,6 +46,7 @@ async def async_setup_entry(
|
||||
switchable_output_num,
|
||||
switchable_output_name,
|
||||
config_entry.options.get(CONF_CODE),
|
||||
config_entry.entry_id,
|
||||
),
|
||||
],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
@@ -57,10 +58,10 @@ class SatelIntegraSwitch(SwitchEntity):
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, controller, device_number, device_name, code):
|
||||
def __init__(self, controller, device_number, device_name, code, config_entry_id):
|
||||
"""Initialize the binary_sensor."""
|
||||
self._device_number = device_number
|
||||
self._attr_unique_id = f"satel_switch_{device_number}"
|
||||
self._attr_unique_id = f"{config_entry_id}_switch_{device_number}"
|
||||
self._name = device_name
|
||||
self._state = False
|
||||
self._code = code
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioshelly==13.15.0"],
|
||||
"requirements": ["aioshelly==13.16.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "shelly*",
|
||||
|
||||
@@ -9,7 +9,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import SMHIConfigEntry, SMHIDataUpdateCoordinator
|
||||
from .coordinator import (
|
||||
SMHIConfigEntry,
|
||||
SMHIDataUpdateCoordinator,
|
||||
SMHIFireDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
|
||||
@@ -24,7 +28,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SMHIConfigEntry) -> bool
|
||||
|
||||
coordinator = SMHIDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
fire_coordinator = SMHIFireDataUpdateCoordinator(hass, entry)
|
||||
await fire_coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = (coordinator, fire_coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
@@ -5,7 +5,14 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pysmhi import SMHIForecast, SmhiForecastException, SMHIPointForecast
|
||||
from pysmhi import (
|
||||
SMHIFireForecast,
|
||||
SmhiFireForecastException,
|
||||
SMHIFirePointForecast,
|
||||
SMHIForecast,
|
||||
SmhiForecastException,
|
||||
SMHIPointForecast,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE
|
||||
@@ -15,7 +22,9 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, TIMEOUT
|
||||
|
||||
type SMHIConfigEntry = ConfigEntry[SMHIDataUpdateCoordinator]
|
||||
type SMHIConfigEntry = ConfigEntry[
|
||||
tuple[SMHIDataUpdateCoordinator, SMHIFireDataUpdateCoordinator]
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -27,6 +36,14 @@ class SMHIForecastData:
|
||||
twice_daily: list[SMHIForecast]
|
||||
|
||||
|
||||
@dataclass
|
||||
class SMHIFireForecastData:
|
||||
"""Dataclass for SMHI fire data."""
|
||||
|
||||
fire_daily: list[SMHIFireForecast]
|
||||
fire_hourly: list[SMHIFireForecast]
|
||||
|
||||
|
||||
class SMHIDataUpdateCoordinator(DataUpdateCoordinator[SMHIForecastData]):
|
||||
"""A SMHI Data Update Coordinator."""
|
||||
|
||||
@@ -71,3 +88,49 @@ class SMHIDataUpdateCoordinator(DataUpdateCoordinator[SMHIForecastData]):
|
||||
def current(self) -> SMHIForecast:
|
||||
"""Return the current metrics."""
|
||||
return self.data.daily[0]
|
||||
|
||||
|
||||
class SMHIFireDataUpdateCoordinator(DataUpdateCoordinator[SMHIFireForecastData]):
|
||||
"""A SMHI Fire Data Update Coordinator."""
|
||||
|
||||
config_entry: SMHIConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: SMHIConfigEntry) -> None:
|
||||
"""Initialize the SMHI coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||
)
|
||||
self._smhi_fire_api = SMHIFirePointForecast(
|
||||
config_entry.data[CONF_LOCATION][CONF_LONGITUDE],
|
||||
config_entry.data[CONF_LOCATION][CONF_LATITUDE],
|
||||
session=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> SMHIFireForecastData:
|
||||
"""Fetch data from SMHI."""
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
_forecast_fire_daily = (
|
||||
await self._smhi_fire_api.async_get_daily_forecast()
|
||||
)
|
||||
_forecast_fire_hourly = (
|
||||
await self._smhi_fire_api.async_get_hourly_forecast()
|
||||
)
|
||||
except SmhiFireForecastException as ex:
|
||||
raise UpdateFailed(
|
||||
"Failed to retrieve the forecast from the SMHI API"
|
||||
) from ex
|
||||
|
||||
return SMHIFireForecastData(
|
||||
fire_daily=_forecast_fire_daily,
|
||||
fire_hourly=_forecast_fire_hourly,
|
||||
)
|
||||
|
||||
@property
|
||||
def fire_current(self) -> SMHIFireForecast:
|
||||
"""Return the current fire metrics."""
|
||||
return self.data.fire_daily[0]
|
||||
|
||||
@@ -6,13 +6,14 @@ from abc import abstractmethod
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import SMHIDataUpdateCoordinator
|
||||
from .coordinator import SMHIDataUpdateCoordinator, SMHIFireDataUpdateCoordinator
|
||||
|
||||
|
||||
class SmhiWeatherBaseEntity(CoordinatorEntity[SMHIDataUpdateCoordinator]):
|
||||
class SmhiWeatherBaseEntity(Entity):
|
||||
"""Representation of a base weather entity."""
|
||||
|
||||
_attr_attribution = "Swedish weather institute (SMHI)"
|
||||
@@ -22,10 +23,8 @@ class SmhiWeatherBaseEntity(CoordinatorEntity[SMHIDataUpdateCoordinator]):
|
||||
self,
|
||||
latitude: str,
|
||||
longitude: str,
|
||||
coordinator: SMHIDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the SMHI base weather entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{latitude}, {longitude}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
@@ -36,12 +35,50 @@ class SmhiWeatherBaseEntity(CoordinatorEntity[SMHIDataUpdateCoordinator]):
|
||||
)
|
||||
self.update_entity_data()
|
||||
|
||||
@abstractmethod
|
||||
def update_entity_data(self) -> None:
|
||||
"""Refresh the entity data."""
|
||||
|
||||
|
||||
class SmhiWeatherEntity(
|
||||
CoordinatorEntity[SMHIDataUpdateCoordinator], SmhiWeatherBaseEntity
|
||||
):
|
||||
"""Representation of a weather entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: str,
|
||||
longitude: str,
|
||||
coordinator: SMHIDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the SMHI base weather entity."""
|
||||
super().__init__(coordinator)
|
||||
SmhiWeatherBaseEntity.__init__(self, latitude, longitude)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.update_entity_data()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@abstractmethod
|
||||
def update_entity_data(self) -> None:
|
||||
"""Refresh the entity data."""
|
||||
|
||||
class SmhiFireEntity(
|
||||
CoordinatorEntity[SMHIFireDataUpdateCoordinator], SmhiWeatherBaseEntity
|
||||
):
|
||||
"""Representation of a weather entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: str,
|
||||
longitude: str,
|
||||
coordinator: SMHIFireDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the SMHI base weather entity."""
|
||||
super().__init__(coordinator)
|
||||
SmhiWeatherBaseEntity.__init__(self, latitude, longitude)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.update_entity_data()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@@ -1,12 +1,42 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"build_up_index": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"drought_code": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"duff_moisture_code": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"fine_fuel_moisture_code": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"fire_weather_index": {
|
||||
"default": "mdi:pine-tree-fire"
|
||||
},
|
||||
"forestdry": {
|
||||
"default": "mdi:forest"
|
||||
},
|
||||
"frozen_precipitation": {
|
||||
"default": "mdi:weather-snowy-rainy"
|
||||
},
|
||||
"fwi": {
|
||||
"default": "mdi:pine-tree-fire"
|
||||
},
|
||||
"fwiindex": {
|
||||
"default": "mdi:pine-tree-fire"
|
||||
},
|
||||
"grassfire": {
|
||||
"default": "mdi:fire-circle"
|
||||
},
|
||||
"high_cloud": {
|
||||
"default": "mdi:cloud-arrow-up"
|
||||
},
|
||||
"initial_spread_index": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"low_cloud": {
|
||||
"default": "mdi:cloud-arrow-down"
|
||||
},
|
||||
@@ -16,6 +46,9 @@
|
||||
"precipitation_category": {
|
||||
"default": "mdi:weather-pouring"
|
||||
},
|
||||
"rate_of_spread": {
|
||||
"default": "mdi:grass"
|
||||
},
|
||||
"thunder": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
|
||||
@@ -10,19 +10,55 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_LATITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_LONGITUDE,
|
||||
PERCENTAGE,
|
||||
UnitOfSpeed,
|
||||
)
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import SMHIConfigEntry, SMHIDataUpdateCoordinator
|
||||
from .entity import SmhiWeatherBaseEntity
|
||||
from .coordinator import (
|
||||
SMHIConfigEntry,
|
||||
SMHIDataUpdateCoordinator,
|
||||
SMHIFireDataUpdateCoordinator,
|
||||
)
|
||||
from .entity import SmhiFireEntity, SmhiWeatherEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
FWI_INDEX_MAP = {
|
||||
"1": "very_low",
|
||||
"2": "low",
|
||||
"3": "moderate",
|
||||
"4": "high",
|
||||
"5": "very_high",
|
||||
"6": "extreme",
|
||||
}
|
||||
GRASSFIRE_MAP = {
|
||||
"1": "snow_cover",
|
||||
"2": "season_over",
|
||||
"3": "low",
|
||||
"4": "moderate",
|
||||
"5": "high",
|
||||
"6": "very_high",
|
||||
}
|
||||
FORESTDRY_MAP = {
|
||||
"1": "very_wet",
|
||||
"2": "wet",
|
||||
"3": "moderate_wet",
|
||||
"4": "dry",
|
||||
"5": "very_dry",
|
||||
"6": "extremely_dry",
|
||||
}
|
||||
|
||||
def get_percentage_values(entity: SMHISensor, key: str) -> int | None:
|
||||
|
||||
def get_percentage_values(entity: SMHIWeatherSensor, key: str) -> int | None:
|
||||
"""Return percentage values in correct range."""
|
||||
value: int | None = entity.coordinator.current.get(key) # type: ignore[assignment]
|
||||
if value is not None and 0 <= value <= 100:
|
||||
@@ -32,49 +68,64 @@ def get_percentage_values(entity: SMHISensor, key: str) -> int | None:
|
||||
return None
|
||||
|
||||
|
||||
def get_fire_index_value(entity: SMHIFireSensor, key: str) -> str:
|
||||
"""Return index value as string."""
|
||||
value: int | None = entity.coordinator.fire_current.get(key) # type: ignore[assignment]
|
||||
if value is not None and value > 0:
|
||||
return str(int(value))
|
||||
return "0"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class SMHISensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes SMHI sensor entity."""
|
||||
class SMHIWeatherEntityDescription(SensorEntityDescription):
|
||||
"""Describes SMHI weather entity."""
|
||||
|
||||
value_fn: Callable[[SMHISensor], StateType | datetime]
|
||||
value_fn: Callable[[SMHIWeatherSensor], StateType | datetime]
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[SMHISensorEntityDescription, ...] = (
|
||||
SMHISensorEntityDescription(
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class SMHIFireEntityDescription(SensorEntityDescription):
|
||||
"""Describes SMHI fire entity."""
|
||||
|
||||
value_fn: Callable[[SMHIFireSensor], StateType | datetime]
|
||||
|
||||
|
||||
WEATHER_SENSOR_DESCRIPTIONS: tuple[SMHIWeatherEntityDescription, ...] = (
|
||||
SMHIWeatherEntityDescription(
|
||||
key="thunder",
|
||||
translation_key="thunder",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "thunder"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="total_cloud",
|
||||
translation_key="total_cloud",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "total_cloud"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="low_cloud",
|
||||
translation_key="low_cloud",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "low_cloud"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="medium_cloud",
|
||||
translation_key="medium_cloud",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "medium_cloud"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="high_cloud",
|
||||
translation_key="high_cloud",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "high_cloud"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="precipitation_category",
|
||||
translation_key="precipitation_category",
|
||||
value_fn=lambda entity: str(
|
||||
@@ -83,13 +134,100 @@ SENSOR_DESCRIPTIONS: tuple[SMHISensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["0", "1", "2", "3", "4", "5", "6"],
|
||||
),
|
||||
SMHISensorEntityDescription(
|
||||
SMHIWeatherEntityDescription(
|
||||
key="frozen_precipitation",
|
||||
translation_key="frozen_precipitation",
|
||||
value_fn=lambda entity: get_percentage_values(entity, "frozen_precipitation"),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
)
|
||||
FIRE_SENSOR_DESCRIPTIONS: tuple[SMHIFireEntityDescription, ...] = (
|
||||
SMHIFireEntityDescription(
|
||||
key="fwiindex",
|
||||
translation_key="fwiindex",
|
||||
value_fn=(
|
||||
lambda entity: FWI_INDEX_MAP.get(get_fire_index_value(entity, "fwiindex"))
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[*FWI_INDEX_MAP.values()],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="fire_weather_index",
|
||||
translation_key="fire_weather_index",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("fwi"),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="initial_spread_index",
|
||||
translation_key="initial_spread_index",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("isi"),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="build_up_index",
|
||||
translation_key="build_up_index",
|
||||
value_fn=(
|
||||
lambda entity: entity.coordinator.fire_current.get(
|
||||
"bui" # codespell:ignore bui
|
||||
)
|
||||
),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="fine_fuel_moisture_code",
|
||||
translation_key="fine_fuel_moisture_code",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("ffmc"),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="duff_moisture_code",
|
||||
translation_key="duff_moisture_code",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("dmc"),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="drought_code",
|
||||
translation_key="drought_code",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("dc"),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="grassfire",
|
||||
translation_key="grassfire",
|
||||
value_fn=(
|
||||
lambda entity: GRASSFIRE_MAP.get(get_fire_index_value(entity, "grassfire"))
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[*GRASSFIRE_MAP.values()],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="rate_of_spread",
|
||||
translation_key="rate_of_spread",
|
||||
value_fn=lambda entity: entity.coordinator.fire_current.get("rn"),
|
||||
device_class=SensorDeviceClass.SPEED,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.METERS_PER_MINUTE,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SMHIFireEntityDescription(
|
||||
key="forestdry",
|
||||
translation_key="forestdry",
|
||||
value_fn=(
|
||||
lambda entity: FORESTDRY_MAP.get(get_fire_index_value(entity, "forestdry"))
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[*FORESTDRY_MAP.values()],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -99,30 +237,43 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up SMHI sensor platform."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
coordinator = entry.runtime_data[0]
|
||||
fire_coordinator = entry.runtime_data[1]
|
||||
location = entry.data
|
||||
async_add_entities(
|
||||
SMHISensor(
|
||||
entities: list[SMHIWeatherSensor | SMHIFireSensor] = []
|
||||
entities.extend(
|
||||
SMHIWeatherSensor(
|
||||
location[CONF_LOCATION][CONF_LATITUDE],
|
||||
location[CONF_LOCATION][CONF_LONGITUDE],
|
||||
coordinator=coordinator,
|
||||
entity_description=description,
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for description in WEATHER_SENSOR_DESCRIPTIONS
|
||||
)
|
||||
entities.extend(
|
||||
SMHIFireSensor(
|
||||
location[CONF_LOCATION][CONF_LATITUDE],
|
||||
location[CONF_LOCATION][CONF_LONGITUDE],
|
||||
coordinator=fire_coordinator,
|
||||
entity_description=description,
|
||||
)
|
||||
for description in FIRE_SENSOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
class SMHISensor(SmhiWeatherBaseEntity, SensorEntity):
|
||||
"""Representation of a SMHI Sensor."""
|
||||
|
||||
entity_description: SMHISensorEntityDescription
|
||||
class SMHIWeatherSensor(SmhiWeatherEntity, SensorEntity):
|
||||
"""Representation of a SMHI Weather Sensor."""
|
||||
|
||||
entity_description: SMHIWeatherEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: str,
|
||||
longitude: str,
|
||||
coordinator: SMHIDataUpdateCoordinator,
|
||||
entity_description: SMHISensorEntityDescription,
|
||||
entity_description: SMHIWeatherEntityDescription,
|
||||
) -> None:
|
||||
"""Initiate SMHI Sensor."""
|
||||
self.entity_description = entity_description
|
||||
@@ -137,3 +288,30 @@ class SMHISensor(SmhiWeatherBaseEntity, SensorEntity):
|
||||
"""Refresh the entity data."""
|
||||
if self.coordinator.data.daily:
|
||||
self._attr_native_value = self.entity_description.value_fn(self)
|
||||
|
||||
|
||||
class SMHIFireSensor(SmhiFireEntity, SensorEntity):
|
||||
"""Representation of a SMHI Weather Sensor."""
|
||||
|
||||
entity_description: SMHIFireEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: str,
|
||||
longitude: str,
|
||||
coordinator: SMHIFireDataUpdateCoordinator,
|
||||
entity_description: SMHIFireEntityDescription,
|
||||
) -> None:
|
||||
"""Initiate SMHI Sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(
|
||||
latitude,
|
||||
longitude,
|
||||
coordinator,
|
||||
)
|
||||
self._attr_unique_id = f"{latitude}, {longitude}-{entity_description.key}"
|
||||
|
||||
def update_entity_data(self) -> None:
|
||||
"""Refresh the entity data."""
|
||||
if self.coordinator.data.fire_daily:
|
||||
self._attr_native_value = self.entity_description.value_fn(self)
|
||||
|
||||
@@ -26,12 +26,66 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"build_up_index": {
|
||||
"name": "Build up index"
|
||||
},
|
||||
"drought_code": {
|
||||
"name": "Drought code"
|
||||
},
|
||||
"duff_moisture_code": {
|
||||
"name": "Duff moisture code"
|
||||
},
|
||||
"fine_fuel_moisture_code": {
|
||||
"name": "Fine fuel moisture code"
|
||||
},
|
||||
"fire_weather_index": {
|
||||
"name": "Fire weather index"
|
||||
},
|
||||
"forestdry": {
|
||||
"name": "Fuel drying",
|
||||
"state": {
|
||||
"dry": "Dry",
|
||||
"extremely_dry": "Extemely dry",
|
||||
"moderate_wet": "Moderate wet",
|
||||
"very_dry": "Very dry",
|
||||
"very_wet": "Very wet",
|
||||
"wet": "Wet"
|
||||
}
|
||||
},
|
||||
"frozen_precipitation": {
|
||||
"name": "Frozen precipitation"
|
||||
},
|
||||
"fwi": {
|
||||
"name": "Fire weather index"
|
||||
},
|
||||
"fwiindex": {
|
||||
"name": "FWI index",
|
||||
"state": {
|
||||
"extreme": "Extremely high risk",
|
||||
"high": "High risk",
|
||||
"low": "Low risk",
|
||||
"moderate": "Moderate risk",
|
||||
"very_high": "Very high risk",
|
||||
"very_low": "Very low risk"
|
||||
}
|
||||
},
|
||||
"grassfire": {
|
||||
"name": "Highest grass fire risk",
|
||||
"state": {
|
||||
"high": "High",
|
||||
"low": "Low",
|
||||
"moderate": "Moderate",
|
||||
"season_over": "Grass fire season over",
|
||||
"snow_cover": "Snow cover",
|
||||
"very_high": "Very high"
|
||||
}
|
||||
},
|
||||
"high_cloud": {
|
||||
"name": "High cloud coverage"
|
||||
},
|
||||
"initial_spread_index": {
|
||||
"name": "Initial spread index"
|
||||
},
|
||||
"low_cloud": {
|
||||
"name": "Low cloud coverage"
|
||||
},
|
||||
@@ -50,6 +104,9 @@
|
||||
"6": "Freezing drizzle"
|
||||
}
|
||||
},
|
||||
"rate_of_spread": {
|
||||
"name": "Potential rate of spread"
|
||||
},
|
||||
"thunder": {
|
||||
"name": "Thunder probability"
|
||||
},
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_SMHI_THUNDER_PROBABILITY, ENTITY_ID_SENSOR_FORMAT
|
||||
from .coordinator import SMHIConfigEntry
|
||||
from .entity import SmhiWeatherBaseEntity
|
||||
from .entity import SmhiWeatherEntity
|
||||
|
||||
# Used to map condition from API results
|
||||
CONDITION_CLASSES: Final[dict[str, list[int]]] = {
|
||||
@@ -89,7 +89,7 @@ async def async_setup_entry(
|
||||
"""Add a weather entity from map location."""
|
||||
location = config_entry.data
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data[0]
|
||||
|
||||
entity = SmhiWeather(
|
||||
location[CONF_LOCATION][CONF_LATITUDE],
|
||||
@@ -101,7 +101,7 @@ async def async_setup_entry(
|
||||
async_add_entities([entity])
|
||||
|
||||
|
||||
class SmhiWeather(SmhiWeatherBaseEntity, SingleCoordinatorWeatherEntity):
|
||||
class SmhiWeather(SmhiWeatherEntity, SingleCoordinatorWeatherEntity):
|
||||
"""Representation of a weather entity."""
|
||||
|
||||
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@@ -30,44 +30,44 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class SensorData:
|
||||
"""Sensor data."""
|
||||
|
||||
disk_usage: dict[str, sdiskusage]
|
||||
swap: sswap
|
||||
memory: VirtualMemory
|
||||
io_counters: dict[str, snetio]
|
||||
addresses: dict[str, list[snicaddr]]
|
||||
load: tuple[float, float, float]
|
||||
cpu_percent: float | None
|
||||
boot_time: datetime
|
||||
processes: list[Process]
|
||||
temperatures: dict[str, list[shwtemp]]
|
||||
cpu_percent: float | None
|
||||
disk_usage: dict[str, sdiskusage]
|
||||
io_counters: dict[str, snetio]
|
||||
load: tuple[float, float, float]
|
||||
memory: VirtualMemory
|
||||
process_fds: dict[str, int]
|
||||
processes: list[Process]
|
||||
swap: sswap
|
||||
temperatures: dict[str, list[shwtemp]]
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return as dict."""
|
||||
addresses = None
|
||||
if self.addresses:
|
||||
addresses = {k: str(v) for k, v in self.addresses.items()}
|
||||
disk_usage = None
|
||||
if self.disk_usage:
|
||||
disk_usage = {k: str(v) for k, v in self.disk_usage.items()}
|
||||
io_counters = None
|
||||
if self.io_counters:
|
||||
io_counters = {k: str(v) for k, v in self.io_counters.items()}
|
||||
addresses = None
|
||||
if self.addresses:
|
||||
addresses = {k: str(v) for k, v in self.addresses.items()}
|
||||
temperatures = None
|
||||
if self.temperatures:
|
||||
temperatures = {k: str(v) for k, v in self.temperatures.items()}
|
||||
return {
|
||||
"disk_usage": disk_usage,
|
||||
"swap": str(self.swap),
|
||||
"memory": str(self.memory),
|
||||
"io_counters": io_counters,
|
||||
"addresses": addresses,
|
||||
"load": str(self.load),
|
||||
"cpu_percent": str(self.cpu_percent),
|
||||
"boot_time": str(self.boot_time),
|
||||
"processes": str(self.processes),
|
||||
"temperatures": temperatures,
|
||||
"cpu_percent": str(self.cpu_percent),
|
||||
"disk_usage": disk_usage,
|
||||
"io_counters": io_counters,
|
||||
"load": str(self.load),
|
||||
"memory": str(self.memory),
|
||||
"process_fds": self.process_fds,
|
||||
"processes": str(self.processes),
|
||||
"swap": str(self.swap),
|
||||
"temperatures": temperatures,
|
||||
}
|
||||
|
||||
|
||||
@@ -124,14 +124,14 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
_disk_defaults[("disks", argument)] = set()
|
||||
return {
|
||||
**_disk_defaults,
|
||||
("swap", ""): set(),
|
||||
("memory", ""): set(),
|
||||
("io_counters", ""): set(),
|
||||
("addresses", ""): set(),
|
||||
("load", ""): set(),
|
||||
("cpu_percent", ""): set(),
|
||||
("boot", ""): set(),
|
||||
("cpu_percent", ""): set(),
|
||||
("io_counters", ""): set(),
|
||||
("load", ""): set(),
|
||||
("memory", ""): set(),
|
||||
("processes", ""): set(),
|
||||
("swap", ""): set(),
|
||||
("temperatures", ""): set(),
|
||||
}
|
||||
|
||||
@@ -153,17 +153,17 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
|
||||
self._initial_update = False
|
||||
return SensorData(
|
||||
disk_usage=_data["disks"],
|
||||
swap=_data["swap"],
|
||||
memory=_data["memory"],
|
||||
io_counters=_data["io_counters"],
|
||||
addresses=_data["addresses"],
|
||||
load=load,
|
||||
cpu_percent=cpu_percent,
|
||||
boot_time=_data["boot_time"],
|
||||
processes=_data["processes"],
|
||||
temperatures=_data["temperatures"],
|
||||
cpu_percent=cpu_percent,
|
||||
disk_usage=_data["disks"],
|
||||
io_counters=_data["io_counters"],
|
||||
load=load,
|
||||
memory=_data["memory"],
|
||||
process_fds=_data["process_fds"],
|
||||
processes=_data["processes"],
|
||||
swap=_data["swap"],
|
||||
temperatures=_data["temperatures"],
|
||||
)
|
||||
|
||||
def update_data(self) -> dict[str, Any]:
|
||||
@@ -256,13 +256,13 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
_LOGGER.debug("OS does not provide temperature sensors")
|
||||
|
||||
return {
|
||||
"disks": disks,
|
||||
"swap": swap,
|
||||
"memory": memory,
|
||||
"io_counters": io_counters,
|
||||
"addresses": addresses,
|
||||
"boot_time": self.boot_time,
|
||||
"processes": selected_processes,
|
||||
"temperatures": temps,
|
||||
"disks": disks,
|
||||
"io_counters": io_counters,
|
||||
"memory": memory,
|
||||
"process_fds": process_fds,
|
||||
"processes": selected_processes,
|
||||
"swap": swap,
|
||||
"temperatures": temps,
|
||||
}
|
||||
|
||||
@@ -55,12 +55,18 @@ SENSOR_TYPE_MANDATORY_ARG = 4
|
||||
|
||||
SIGNAL_SYSTEMMONITOR_UPDATE = "systemmonitor_update"
|
||||
|
||||
SENSORS_NO_ARG = ("load_", "memory_", "processor_use", "swap_", "last_boot")
|
||||
SENSORS_NO_ARG = (
|
||||
"last_boot",
|
||||
"load_",
|
||||
"memory_",
|
||||
"processor_use",
|
||||
"swap_",
|
||||
)
|
||||
SENSORS_WITH_ARG = {
|
||||
"disk_": "disk_arguments",
|
||||
"ipv": "network_arguments",
|
||||
**dict.fromkeys(NET_IO_TYPES, "network_arguments"),
|
||||
"process_num_fds": "processes",
|
||||
**dict.fromkeys(NET_IO_TYPES, "network_arguments"),
|
||||
}
|
||||
|
||||
|
||||
@@ -152,11 +158,13 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: round(
|
||||
entity.coordinator.data.disk_usage[entity.argument].free / 1024**3, 1
|
||||
)
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None,
|
||||
value_fn=(
|
||||
lambda entity: round(
|
||||
entity.coordinator.data.disk_usage[entity.argument].free / 1024**3, 1
|
||||
)
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None
|
||||
),
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("disks", entity.argument),
|
||||
),
|
||||
@@ -167,11 +175,13 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: round(
|
||||
entity.coordinator.data.disk_usage[entity.argument].used / 1024**3, 1
|
||||
)
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None,
|
||||
value_fn=(
|
||||
lambda entity: round(
|
||||
entity.coordinator.data.disk_usage[entity.argument].used / 1024**3, 1
|
||||
)
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None
|
||||
),
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("disks", entity.argument),
|
||||
),
|
||||
@@ -181,11 +191,11 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
placeholder="mount_point",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: entity.coordinator.data.disk_usage[
|
||||
entity.argument
|
||||
].percent
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None,
|
||||
value_fn=(
|
||||
lambda entity: entity.coordinator.data.disk_usage[entity.argument].percent
|
||||
if entity.argument in entity.coordinator.data.disk_usage
|
||||
else None
|
||||
),
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("disks", entity.argument),
|
||||
),
|
||||
@@ -212,14 +222,6 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
value_fn=lambda entity: entity.coordinator.data.boot_time,
|
||||
add_to_update=lambda entity: ("boot", ""),
|
||||
),
|
||||
"load_15m": SysMonitorSensorEntityDescription(
|
||||
key="load_15m",
|
||||
translation_key="load_15m",
|
||||
icon=get_cpu_icon(),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: round(entity.coordinator.data.load[2], 2),
|
||||
add_to_update=lambda entity: ("load", ""),
|
||||
),
|
||||
"load_1m": SysMonitorSensorEntityDescription(
|
||||
key="load_1m",
|
||||
translation_key="load_1m",
|
||||
@@ -236,6 +238,14 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
value_fn=lambda entity: round(entity.coordinator.data.load[1], 2),
|
||||
add_to_update=lambda entity: ("load", ""),
|
||||
),
|
||||
"load_15m": SysMonitorSensorEntityDescription(
|
||||
key="load_15m",
|
||||
translation_key="load_15m",
|
||||
icon=get_cpu_icon(),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: round(entity.coordinator.data.load[2], 2),
|
||||
add_to_update=lambda entity: ("load", ""),
|
||||
),
|
||||
"memory_free": SysMonitorSensorEntityDescription(
|
||||
key="memory_free",
|
||||
translation_key="memory_free",
|
||||
@@ -253,13 +263,15 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: round(
|
||||
(
|
||||
entity.coordinator.data.memory.total
|
||||
- entity.coordinator.data.memory.available
|
||||
value_fn=(
|
||||
lambda entity: round(
|
||||
(
|
||||
entity.coordinator.data.memory.total
|
||||
- entity.coordinator.data.memory.available
|
||||
)
|
||||
/ 1024**2,
|
||||
1,
|
||||
)
|
||||
/ 1024**2,
|
||||
1,
|
||||
),
|
||||
add_to_update=lambda entity: ("memory", ""),
|
||||
),
|
||||
@@ -311,27 +323,15 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
value_fn=get_packets,
|
||||
add_to_update=lambda entity: ("io_counters", ""),
|
||||
),
|
||||
"throughput_network_in": SysMonitorSensorEntityDescription(
|
||||
key="throughput_network_in",
|
||||
translation_key="throughput_network_in",
|
||||
placeholder="interface",
|
||||
native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
"process_num_fds": SysMonitorSensorEntityDescription(
|
||||
key="process_num_fds",
|
||||
translation_key="process_num_fds",
|
||||
placeholder="process",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
mandatory_arg=True,
|
||||
value_fn=get_throughput,
|
||||
add_to_update=lambda entity: ("io_counters", ""),
|
||||
),
|
||||
"throughput_network_out": SysMonitorSensorEntityDescription(
|
||||
key="throughput_network_out",
|
||||
translation_key="throughput_network_out",
|
||||
placeholder="interface",
|
||||
native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
mandatory_arg=True,
|
||||
value_fn=get_throughput,
|
||||
add_to_update=lambda entity: ("io_counters", ""),
|
||||
value_fn=get_process_num_fds,
|
||||
add_to_update=lambda entity: ("processes", ""),
|
||||
),
|
||||
"processor_use": SysMonitorSensorEntityDescription(
|
||||
key="processor_use",
|
||||
@@ -339,10 +339,12 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
icon=get_cpu_icon(),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: (
|
||||
round(entity.coordinator.data.cpu_percent)
|
||||
if entity.coordinator.data.cpu_percent
|
||||
else None
|
||||
value_fn=(
|
||||
lambda entity: (
|
||||
round(entity.coordinator.data.cpu_percent)
|
||||
if entity.coordinator.data.cpu_percent
|
||||
else None
|
||||
)
|
||||
),
|
||||
add_to_update=lambda entity: ("cpu_percent", ""),
|
||||
),
|
||||
@@ -352,8 +354,8 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda entity: read_cpu_temperature(
|
||||
entity.coordinator.data.temperatures
|
||||
value_fn=(
|
||||
lambda entity: read_cpu_temperature(entity.coordinator.data.temperatures)
|
||||
),
|
||||
none_is_unavailable=True,
|
||||
add_to_update=lambda entity: ("temperatures", ""),
|
||||
@@ -384,15 +386,27 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
|
||||
value_fn=lambda entity: entity.coordinator.data.swap.percent,
|
||||
add_to_update=lambda entity: ("swap", ""),
|
||||
),
|
||||
"process_num_fds": SysMonitorSensorEntityDescription(
|
||||
key="process_num_fds",
|
||||
translation_key="process_num_fds",
|
||||
placeholder="process",
|
||||
"throughput_network_in": SysMonitorSensorEntityDescription(
|
||||
key="throughput_network_in",
|
||||
translation_key="throughput_network_in",
|
||||
placeholder="interface",
|
||||
native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
mandatory_arg=True,
|
||||
value_fn=get_process_num_fds,
|
||||
add_to_update=lambda entity: ("processes", ""),
|
||||
value_fn=get_throughput,
|
||||
add_to_update=lambda entity: ("io_counters", ""),
|
||||
),
|
||||
"throughput_network_out": SysMonitorSensorEntityDescription(
|
||||
key="throughput_network_out",
|
||||
translation_key="throughput_network_out",
|
||||
placeholder="interface",
|
||||
native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
mandatory_arg=True,
|
||||
value_fn=get_throughput,
|
||||
add_to_update=lambda entity: ("io_counters", ""),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -409,14 +423,17 @@ def check_legacy_resource(resource: str, resources: set[str]) -> bool:
|
||||
|
||||
|
||||
IO_COUNTER = {
|
||||
"network_out": 0,
|
||||
"network_in": 1,
|
||||
"packets_out": 2,
|
||||
"network_out": 0,
|
||||
"packets_in": 3,
|
||||
"throughput_network_out": 0,
|
||||
"packets_out": 2,
|
||||
"throughput_network_in": 1,
|
||||
"throughput_network_out": 0,
|
||||
}
|
||||
IF_ADDRS_FAMILY = {
|
||||
"ipv4_address": socket.AF_INET,
|
||||
"ipv6_address": socket.AF_INET6,
|
||||
}
|
||||
IF_ADDRS_FAMILY = {"ipv4_address": socket.AF_INET, "ipv6_address": socket.AF_INET6}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -7,7 +7,6 @@ from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from tuya_sharing import CustomerDevice, Manager
|
||||
from tuya_sharing.device import DeviceStatusRange
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DEVICE_CLASS_UNITS as SENSOR_DEVICE_CLASS_UNITS,
|
||||
@@ -40,7 +39,6 @@ from .const import (
|
||||
DeviceCategory,
|
||||
DPCode,
|
||||
DPType,
|
||||
UnitOfMeasurement,
|
||||
)
|
||||
from .entity import TuyaEntity
|
||||
from .models import ComplexValue, ElectricityValue, EnumTypeData, IntegerTypeData
|
||||
@@ -1670,10 +1668,8 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
||||
|
||||
entity_description: TuyaSensorEntityDescription
|
||||
|
||||
_status_range: DeviceStatusRange | None = None
|
||||
_type: DPType | None = None
|
||||
_type_data: IntegerTypeData | EnumTypeData | None = None
|
||||
_uom: UnitOfMeasurement | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -1701,12 +1697,17 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
||||
else:
|
||||
self._type = get_dptype(self.device, DPCode(description.key))
|
||||
|
||||
self._validate_device_class_unit()
|
||||
|
||||
def _validate_device_class_unit(self) -> None:
|
||||
"""Validate device class unit compatibility."""
|
||||
|
||||
# Logic to ensure the set device class and API received Unit Of Measurement
|
||||
# match Home Assistants requirements.
|
||||
if (
|
||||
self.device_class is not None
|
||||
and not self.device_class.startswith(DOMAIN)
|
||||
and description.native_unit_of_measurement is None
|
||||
and self.entity_description.native_unit_of_measurement is None
|
||||
# we do not need to check mappings if the API UOM is allowed
|
||||
and self.native_unit_of_measurement
|
||||
not in SENSOR_DEVICE_CLASS_UNITS[self.device_class]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import dataclasses
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
@@ -29,15 +30,6 @@ def usb_device_from_port(port: ListPortInfo) -> USBDevice:
|
||||
|
||||
def scan_serial_ports() -> Sequence[USBDevice]:
|
||||
"""Scan serial ports for USB devices."""
|
||||
return [
|
||||
usb_device_from_port(port)
|
||||
for port in comports()
|
||||
if port.vid is not None or port.pid is not None
|
||||
]
|
||||
|
||||
|
||||
def usb_device_from_path(device_path: str) -> USBDevice | None:
|
||||
"""Get USB device info from a device path."""
|
||||
|
||||
# Scan all symlinks first
|
||||
by_id = "/dev/serial/by-id"
|
||||
@@ -46,23 +38,30 @@ def usb_device_from_path(device_path: str) -> USBDevice | None:
|
||||
for path in (entry.path for entry in os.scandir(by_id) if entry.is_symlink()):
|
||||
realpath_to_by_id[os.path.realpath(path)] = path
|
||||
|
||||
# Then compare the actual path to each serial port's
|
||||
serial_ports = []
|
||||
|
||||
for port in comports():
|
||||
if port.vid is not None or port.pid is not None:
|
||||
usb_device = usb_device_from_port(port)
|
||||
device_path = realpath_to_by_id.get(port.device, port.device)
|
||||
|
||||
if device_path != port.device:
|
||||
# Prefer the unique /dev/serial/by-id/ path if it exists
|
||||
usb_device = dataclasses.replace(usb_device, device=device_path)
|
||||
|
||||
serial_ports.append(usb_device)
|
||||
|
||||
return serial_ports
|
||||
|
||||
|
||||
def usb_device_from_path(device_path: str) -> USBDevice | None:
|
||||
"""Get USB device info from a device path."""
|
||||
|
||||
device_path_real = os.path.realpath(device_path)
|
||||
|
||||
for device in scan_serial_ports():
|
||||
normalized_path = realpath_to_by_id.get(device.device, device.device)
|
||||
if (
|
||||
normalized_path == device_path
|
||||
or os.path.realpath(device.device) == device_path_real
|
||||
):
|
||||
return USBDevice(
|
||||
device=normalized_path,
|
||||
vid=device.vid,
|
||||
pid=device.pid,
|
||||
serial_number=device.serial_number,
|
||||
manufacturer=device.manufacturer,
|
||||
description=device.description,
|
||||
)
|
||||
if os.path.realpath(device.device) == device_path_real:
|
||||
return device
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -18,6 +18,10 @@
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your KLF200 gateway.",
|
||||
"password": "The password for your KLF200 gateway."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ PLATFORMS = [
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
68
homeassistant/components/vesync/update.py
Normal file
68
homeassistant/components/vesync/update.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""Update entity for VeSync.."""
|
||||
|
||||
from pyvesync.base_devices.vesyncbasedevice import VeSyncBaseDevice
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, VS_COORDINATOR, VS_DEVICES, VS_DISCOVERY, VS_MANAGER
|
||||
from .coordinator import VeSyncDataCoordinator
|
||||
from .entity import VeSyncBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up update entity."""
|
||||
coordinator = hass.data[DOMAIN][VS_COORDINATOR]
|
||||
|
||||
@callback
|
||||
def discover(devices):
|
||||
"""Add new devices to platform."""
|
||||
_setup_entities(devices, async_add_entities, coordinator)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(hass, VS_DISCOVERY.format(VS_DEVICES), discover)
|
||||
)
|
||||
|
||||
_setup_entities(
|
||||
hass.data[DOMAIN][VS_MANAGER].devices, async_add_entities, coordinator
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _setup_entities(
|
||||
devices: list[VeSyncBaseDevice],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
coordinator: VeSyncDataCoordinator,
|
||||
) -> None:
|
||||
"""Check if device is a light and add entity."""
|
||||
|
||||
async_add_entities(
|
||||
VeSyncDeviceUpdate(
|
||||
device=device,
|
||||
coordinator=coordinator,
|
||||
)
|
||||
for device in devices
|
||||
)
|
||||
|
||||
|
||||
class VeSyncDeviceUpdate(VeSyncBaseEntity, UpdateEntity):
|
||||
"""Representation of a VeSync device update entity."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
"""Return installed_version."""
|
||||
return self.device.current_firm_version
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
"""Return latest_version."""
|
||||
return self.device.latest_firm_version
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.83"]
|
||||
"requirements": ["holidays==0.84"]
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.components.homeassistant_hardware.helpers import (
|
||||
async_notify_firmware_info,
|
||||
async_register_firmware_info_provider,
|
||||
)
|
||||
from homeassistant.components.usb import usb_device_from_path
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
@@ -134,6 +135,21 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
|
||||
Will automatically load components to support devices found on the network.
|
||||
"""
|
||||
|
||||
# Try to perform an in-place migration if we detect that the device path can be made
|
||||
# unique
|
||||
device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
usb_device = await hass.async_add_executor_job(usb_device_from_path, device_path)
|
||||
|
||||
if usb_device is not None and device_path != usb_device.device:
|
||||
_LOGGER.info(
|
||||
"Migrating ZHA device path from %s to %s", device_path, usb_device.device
|
||||
)
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_DEVICE][CONF_DEVICE_PATH] = usb_device.device
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
device_path = usb_device.device
|
||||
|
||||
ha_zha_data: HAZHAData = get_zha_data(hass)
|
||||
ha_zha_data.config_entry = config_entry
|
||||
zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data)
|
||||
@@ -163,7 +179,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
_LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache)
|
||||
|
||||
# Check if firmware update is in progress for this device
|
||||
device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
_raise_if_port_in_use(hass, device_path)
|
||||
|
||||
try:
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
import collections
|
||||
from contextlib import suppress
|
||||
from enum import StrEnum
|
||||
import json
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import serial.tools.list_ports
|
||||
from serial.tools.list_ports_common import ListPortInfo
|
||||
import voluptuous as vol
|
||||
from zha.application.const import RadioType
|
||||
import zigpy.backups
|
||||
@@ -25,6 +25,7 @@ from homeassistant.components.homeassistant_hardware.firmware_config_flow import
|
||||
ZigbeeFlowStrategy,
|
||||
)
|
||||
from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware
|
||||
from homeassistant.components.usb import USBDevice, scan_serial_ports
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_ZEROCONF,
|
||||
@@ -38,7 +39,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.selector import FileSelector, FileSelectorConfig
|
||||
@@ -124,10 +125,10 @@ def _format_backup_choice(
|
||||
return f"{dt_util.as_local(backup.backup_time).strftime('%c')} ({identifier})"
|
||||
|
||||
|
||||
async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
|
||||
async def list_serial_ports(hass: HomeAssistant) -> list[USBDevice]:
|
||||
"""List all serial ports, including the Yellow radio and the multi-PAN addon."""
|
||||
ports: list[ListPortInfo] = []
|
||||
ports.extend(await hass.async_add_executor_job(serial.tools.list_ports.comports))
|
||||
ports: list[USBDevice] = []
|
||||
ports.extend(await hass.async_add_executor_job(scan_serial_ports))
|
||||
|
||||
# Add useful info to the Yellow's serial port selection screen
|
||||
try:
|
||||
@@ -137,9 +138,14 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
|
||||
else:
|
||||
# PySerial does not properly handle the Yellow's serial port with the CM5
|
||||
# so we manually include it
|
||||
port = ListPortInfo(device="/dev/ttyAMA1", skip_link_detection=True)
|
||||
port.description = "Yellow Zigbee module"
|
||||
port.manufacturer = "Nabu Casa"
|
||||
port = USBDevice(
|
||||
device="/dev/ttyAMA1",
|
||||
vid="ffff", # This is technically not a USB device
|
||||
pid="ffff",
|
||||
serial_number=None,
|
||||
manufacturer="Nabu Casa",
|
||||
description="Yellow Zigbee module",
|
||||
)
|
||||
|
||||
ports = [p for p in ports if not p.device.startswith("/dev/ttyAMA")]
|
||||
ports.insert(0, port)
|
||||
@@ -156,13 +162,15 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
|
||||
addon_info = None
|
||||
|
||||
if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED:
|
||||
addon_port = ListPortInfo(
|
||||
addon_port = USBDevice(
|
||||
device=silabs_multiprotocol_addon.get_zigbee_socket(),
|
||||
skip_link_detection=True,
|
||||
vid="ffff", # This is technically not a USB device
|
||||
pid="ffff",
|
||||
serial_number=None,
|
||||
manufacturer="Nabu Casa",
|
||||
description="Silicon Labs Multiprotocol add-on",
|
||||
)
|
||||
|
||||
addon_port.description = "Multiprotocol add-on"
|
||||
addon_port.manufacturer = "Nabu Casa"
|
||||
ports.append(addon_port)
|
||||
|
||||
return ports
|
||||
@@ -172,6 +180,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
"""Mixin for common ZHA flow steps and forms."""
|
||||
|
||||
_flow_strategy: ZigbeeFlowStrategy | None = None
|
||||
_overwrite_ieee_during_restore: bool = False
|
||||
_hass: HomeAssistant
|
||||
_title: str
|
||||
|
||||
@@ -181,6 +190,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
|
||||
self._hass = None # type: ignore[assignment]
|
||||
self._radio_mgr = ZhaRadioManager()
|
||||
self._restore_backup_task: asyncio.Task[None] | None = None
|
||||
|
||||
@property
|
||||
def hass(self) -> HomeAssistant:
|
||||
@@ -218,8 +228,15 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Choose a serial port."""
|
||||
ports = await list_serial_ports(self.hass)
|
||||
|
||||
# The full `/dev/serial/by-id/` path is too verbose to show
|
||||
resolved_paths = {
|
||||
p.device: await self.hass.async_add_executor_job(os.path.realpath, p.device)
|
||||
for p in ports
|
||||
}
|
||||
|
||||
list_of_ports = [
|
||||
f"{p}{', s/n: ' + p.serial_number if p.serial_number else ''}"
|
||||
f"{resolved_paths[p.device]} - {p.description}{', s/n: ' + p.serial_number if p.serial_number else ''}"
|
||||
+ (f" - {p.manufacturer}" if p.manufacturer else "")
|
||||
for p in ports
|
||||
]
|
||||
@@ -446,6 +463,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._radio_mgr.chosen_backup = self._radio_mgr.backups[0]
|
||||
return await self.async_step_maybe_reset_old_radio()
|
||||
|
||||
@progress_step()
|
||||
async def async_step_maybe_reset_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -473,9 +491,69 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
temp_radio_mgr.device_settings = config_entry.data[CONF_DEVICE]
|
||||
temp_radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]]
|
||||
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
try:
|
||||
await temp_radio_mgr.async_reset_adapter()
|
||||
except HomeAssistantError:
|
||||
# Old adapter not found or cannot connect, show prompt to plug back in
|
||||
return await self.async_step_plug_in_old_radio()
|
||||
|
||||
return await self.async_step_maybe_confirm_ezsp_restore()
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
async def async_step_plug_in_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Prompt user to plug in the old radio if connection fails."""
|
||||
config_entries = self.hass.config_entries.async_entries(
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
|
||||
# Unless the user removes the config entry whilst we try to reset the old radio
|
||||
# for a few seconds and then also unplugs it, we will basically never hit this
|
||||
if not config_entries:
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
config_entry = config_entries[0]
|
||||
old_device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH]
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="plug_in_old_radio",
|
||||
menu_options=["retry_old_radio", "skip_reset_old_radio"],
|
||||
description_placeholders={"device_path": old_device_path},
|
||||
)
|
||||
|
||||
async def async_step_retry_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Retry connecting to the old radio."""
|
||||
return await self.async_step_maybe_reset_old_radio()
|
||||
|
||||
async def async_step_skip_reset_old_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Skip resetting the old radio and continue with migration."""
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
async def async_step_pre_plug_in_new_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Strip user_input before showing "plug in new radio" form."""
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_plug_in_new_radio()
|
||||
|
||||
async def async_step_plug_in_new_radio(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Prompt user to plug in the new radio if connection fails."""
|
||||
if user_input is not None:
|
||||
# User confirmed, retry now
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
assert self._radio_mgr.device_path is not None
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="plug_in_new_radio",
|
||||
description_placeholders={"device_path": self._radio_mgr.device_path},
|
||||
)
|
||||
|
||||
async def async_step_migration_strategy_advanced(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -539,6 +617,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
# This step exists only for translations, it does nothing new
|
||||
return await self.async_step_form_new_network(user_input)
|
||||
|
||||
@progress_step()
|
||||
async def async_step_form_new_network(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -624,47 +703,78 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_maybe_confirm_ezsp_restore(
|
||||
async def async_step_restore_backup(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm restore for EZSP radios that require permanent IEEE writes."""
|
||||
if user_input is not None:
|
||||
if user_input[OVERWRITE_COORDINATOR_IEEE]:
|
||||
# On confirmation, overwrite destructively
|
||||
try:
|
||||
await self._radio_mgr.restore_backup(overwrite_ieee=True)
|
||||
except CannotWriteNetworkSettings as exc:
|
||||
return self.async_abort(
|
||||
reason="cannot_restore_backup",
|
||||
description_placeholders={"error": str(exc)},
|
||||
)
|
||||
"""Restore network backup to new radio."""
|
||||
if self._restore_backup_task is None:
|
||||
self._restore_backup_task = self.hass.async_create_task(
|
||||
self._radio_mgr.restore_backup(
|
||||
overwrite_ieee=self._overwrite_ieee_during_restore
|
||||
),
|
||||
"Restore backup",
|
||||
)
|
||||
|
||||
return await self._async_create_radio_entry()
|
||||
if not self._restore_backup_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="restore_backup",
|
||||
progress_action="restore_backup",
|
||||
progress_task=self._restore_backup_task,
|
||||
)
|
||||
|
||||
# On rejection, explain why we can't restore
|
||||
return self.async_abort(reason="cannot_restore_backup_no_ieee_confirm")
|
||||
|
||||
# On first attempt, just try to restore nondestructively
|
||||
try:
|
||||
await self._radio_mgr.restore_backup()
|
||||
await self._restore_backup_task
|
||||
except DestructiveWriteNetworkSettings:
|
||||
# Restore cannot happen automatically, we need to ask for permission
|
||||
pass
|
||||
# If we cannot restore without overwriting the IEEE, ask for confirmation
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="pre_confirm_ezsp_ieee_overwrite"
|
||||
)
|
||||
except HomeAssistantError:
|
||||
# User unplugged the new adapter, allow retry
|
||||
return self.async_show_progress_done(next_step_id="pre_plug_in_new_radio")
|
||||
except CannotWriteNetworkSettings as exc:
|
||||
return self.async_abort(
|
||||
reason="cannot_restore_backup",
|
||||
description_placeholders={"error": str(exc)},
|
||||
)
|
||||
else:
|
||||
return await self._async_create_radio_entry()
|
||||
finally:
|
||||
self._restore_backup_task = None
|
||||
|
||||
# If it fails, show the form
|
||||
return self.async_show_form(
|
||||
step_id="maybe_confirm_ezsp_restore",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(OVERWRITE_COORDINATOR_IEEE, default=True): bool}
|
||||
),
|
||||
)
|
||||
# Otherwise, proceed to entry creation
|
||||
return self.async_show_progress_done(next_step_id="create_entry")
|
||||
|
||||
async def async_step_pre_confirm_ezsp_ieee_overwrite(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Strip user_input before showing confirmation form."""
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_confirm_ezsp_ieee_overwrite()
|
||||
|
||||
async def async_step_confirm_ezsp_ieee_overwrite(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show confirmation form for EZSP IEEE address overwrite."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="confirm_ezsp_ieee_overwrite",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(OVERWRITE_COORDINATOR_IEEE, default=True): bool}
|
||||
),
|
||||
)
|
||||
|
||||
if not user_input[OVERWRITE_COORDINATOR_IEEE]:
|
||||
return self.async_abort(reason="cannot_restore_backup_no_ieee_confirm")
|
||||
|
||||
self._overwrite_ieee_during_restore = True
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
async def async_step_create_entry(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Create the config entry after successful setup/migration."""
|
||||
|
||||
# This step only exists so that we can create entries from other steps
|
||||
return await self._async_create_radio_entry()
|
||||
|
||||
|
||||
class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
@@ -1018,7 +1128,7 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow):
|
||||
|
||||
# If we are reconfiguring, the old radio will not be available
|
||||
if self._migration_intent is OptionsMigrationIntent.RECONFIGURE:
|
||||
return await self.async_step_maybe_confirm_ezsp_restore()
|
||||
return await self.async_step_restore_backup()
|
||||
|
||||
return await super().async_step_maybe_reset_old_radio(user_input)
|
||||
|
||||
|
||||
@@ -16,6 +16,11 @@
|
||||
"invalid_backup_json": "Invalid backup JSON"
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"progress": {
|
||||
"form_new_network": "Forming a new Zigbee network.\n\nWe scan for a clear network channel as part of this process, this can take a minute.",
|
||||
"maybe_reset_old_radio": "Resetting old adapter.\n\nYour old adapter has been backed up and is being factory reset.",
|
||||
"restore_backup": "Restoring network settings to new adapter.\n\nThis will take a minute."
|
||||
},
|
||||
"step": {
|
||||
"choose_automatic_backup": {
|
||||
"data": {
|
||||
@@ -76,8 +81,15 @@
|
||||
"confirm": {
|
||||
"description": "Do you want to set up {name}?"
|
||||
},
|
||||
"confirm_hardware": {
|
||||
"description": "Do you want to set up {name}?"
|
||||
"confirm_ezsp_ieee_overwrite": {
|
||||
"data": {
|
||||
"overwrite_coordinator_ieee": "Permanently replace the adapter IEEE address"
|
||||
},
|
||||
"description": "Your backup has a different IEEE address than your adapter. For your network to function properly, the IEEE address of your adapter should also be changed.\n\nThis is a permanent operation.",
|
||||
"title": "Overwrite adapter IEEE address"
|
||||
},
|
||||
"form_new_network": {
|
||||
"title": "Forming new network"
|
||||
},
|
||||
"manual_pick_radio_type": {
|
||||
"data": {
|
||||
@@ -100,17 +112,28 @@
|
||||
"description": "ZHA was not able to automatically detect serial port settings for your adapter. This usually is an issue with the firmware or permissions.\n\nIf you are using firmware with nonstandard settings, enter the serial port settings",
|
||||
"title": "Serial port settings"
|
||||
},
|
||||
"maybe_confirm_ezsp_restore": {
|
||||
"data": {
|
||||
"overwrite_coordinator_ieee": "Permanently replace the adapter IEEE address"
|
||||
},
|
||||
"description": "Your backup has a different IEEE address than your adapter. For your network to function properly, the IEEE address of your adapter should also be changed.\n\nThis is a permanent operation.",
|
||||
"title": "Overwrite adapter IEEE address"
|
||||
},
|
||||
"maybe_reset_old_radio": {
|
||||
"description": "A backup was created earlier and your old adapter is being reset as part of the migration.",
|
||||
"title": "Resetting old adapter"
|
||||
},
|
||||
"plug_in_new_radio": {
|
||||
"description": "Your new adapter at `{device_path}` was not found.\nPlease plug it in and click Submit to continue.",
|
||||
"title": "New adapter not found"
|
||||
},
|
||||
"plug_in_old_radio": {
|
||||
"description": "Your old adapter at `{device_path}` was not found. You can retry after plugging it back in, or skip resetting the old adapter.\n\n**Warning:** If you skip resetting the old adapter, ensure it remains permanently disconnected. Plugging it back in later will cause network issues.",
|
||||
"menu_option_descriptions": {
|
||||
"retry_old_radio": "Retry connecting to the old adapter to reset it as part of the migration.",
|
||||
"skip_reset_old_radio": "Skip resetting the old adapter and continue with the migration."
|
||||
},
|
||||
"menu_options": {
|
||||
"retry_old_radio": "Retry",
|
||||
"skip_reset_old_radio": "Skip reset"
|
||||
},
|
||||
"title": "Old adapter not found"
|
||||
},
|
||||
"restore_backup": {
|
||||
"title": "Restoring network to new adapter"
|
||||
},
|
||||
"upload_manual_backup": {
|
||||
"data": {
|
||||
"uploaded_backup_file": "Upload a file"
|
||||
@@ -1869,6 +1892,11 @@
|
||||
"invalid_backup_json": "[%key:component::zha::config::error::invalid_backup_json%]"
|
||||
},
|
||||
"flow_title": "[%key:component::zha::config::flow_title%]",
|
||||
"progress": {
|
||||
"form_new_network": "[%key:component::zha::config::progress::form_new_network%]",
|
||||
"maybe_reset_old_radio": "[%key:component::zha::config::progress::maybe_reset_old_radio%]",
|
||||
"restore_backup": "[%key:component::zha::config::progress::restore_backup%]"
|
||||
},
|
||||
"step": {
|
||||
"choose_automatic_backup": {
|
||||
"data": {
|
||||
@@ -1914,6 +1942,16 @@
|
||||
"description": "[%key:component::zha::config::step::choose_serial_port::description%]",
|
||||
"title": "[%key:component::zha::config::step::choose_serial_port::title%]"
|
||||
},
|
||||
"confirm_ezsp_ieee_overwrite": {
|
||||
"data": {
|
||||
"overwrite_coordinator_ieee": "[%key:component::zha::config::step::confirm_ezsp_ieee_overwrite::data::overwrite_coordinator_ieee%]"
|
||||
},
|
||||
"description": "[%key:component::zha::config::step::confirm_ezsp_ieee_overwrite::description%]",
|
||||
"title": "[%key:component::zha::config::step::confirm_ezsp_ieee_overwrite::title%]"
|
||||
},
|
||||
"form_new_network": {
|
||||
"title": "[%key:component::zha::config::step::form_new_network::title%]"
|
||||
},
|
||||
"init": {
|
||||
"description": "A backup will be performed and ZHA will be stopped. Do you wish to continue?",
|
||||
"title": "Reconfigure ZHA"
|
||||
@@ -1938,12 +1976,24 @@
|
||||
"description": "[%key:component::zha::config::step::manual_port_config::description%]",
|
||||
"title": "[%key:component::zha::config::step::manual_port_config::title%]"
|
||||
},
|
||||
"maybe_confirm_ezsp_restore": {
|
||||
"data": {
|
||||
"overwrite_coordinator_ieee": "[%key:component::zha::config::step::maybe_confirm_ezsp_restore::data::overwrite_coordinator_ieee%]"
|
||||
"maybe_reset_old_radio": {
|
||||
"title": "[%key:component::zha::config::step::maybe_reset_old_radio::title%]"
|
||||
},
|
||||
"plug_in_new_radio": {
|
||||
"description": "[%key:component::zha::config::step::plug_in_new_radio::description%]",
|
||||
"title": "[%key:component::zha::config::step::plug_in_new_radio::title%]"
|
||||
},
|
||||
"plug_in_old_radio": {
|
||||
"description": "[%key:component::zha::config::step::plug_in_old_radio::description%]",
|
||||
"menu_option_descriptions": {
|
||||
"retry_old_radio": "[%key:component::zha::config::step::plug_in_old_radio::menu_option_descriptions::retry_old_radio%]",
|
||||
"skip_reset_old_radio": "[%key:component::zha::config::step::plug_in_old_radio::menu_option_descriptions::skip_reset_old_radio%]"
|
||||
},
|
||||
"description": "[%key:component::zha::config::step::maybe_confirm_ezsp_restore::description%]",
|
||||
"title": "[%key:component::zha::config::step::maybe_confirm_ezsp_restore::title%]"
|
||||
"menu_options": {
|
||||
"retry_old_radio": "[%key:component::zha::config::step::plug_in_old_radio::menu_options::retry_old_radio%]",
|
||||
"skip_reset_old_radio": "[%key:component::zha::config::step::plug_in_old_radio::menu_options::skip_reset_old_radio%]"
|
||||
},
|
||||
"title": "[%key:component::zha::config::step::plug_in_old_radio::title%]"
|
||||
},
|
||||
"prompt_migrate_or_reconfigure": {
|
||||
"description": "Are you migrating to a new adapter or re-configuring the current adapter?",
|
||||
@@ -1957,6 +2007,9 @@
|
||||
},
|
||||
"title": "Migrate or re-configure"
|
||||
},
|
||||
"restore_backup": {
|
||||
"title": "[%key:component::zha::config::step::restore_backup::title%]"
|
||||
},
|
||||
"upload_manual_backup": {
|
||||
"data": {
|
||||
"uploaded_backup_file": "[%key:component::zha::config::step::upload_manual_backup::data::uploaded_backup_file%]"
|
||||
|
||||
@@ -5749,8 +5749,7 @@
|
||||
"name": "Satel Integra",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"single_config_entry": true
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"schlage": {
|
||||
"name": "Schlage",
|
||||
|
||||
@@ -806,9 +806,6 @@ async def async_get_all_descriptions(
|
||||
|
||||
description = {"fields": yaml_description.get("fields", {})}
|
||||
|
||||
if (target := yaml_description.get("target")) is not None:
|
||||
description["target"] = target
|
||||
|
||||
new_descriptions_cache[missing_trigger] = description
|
||||
|
||||
hass.data[TRIGGER_DESCRIPTION_CACHE] = new_descriptions_cache
|
||||
|
||||
@@ -39,7 +39,7 @@ habluetooth==5.7.0
|
||||
hass-nabucasa==1.5.1
|
||||
hassil==3.4.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251103.0
|
||||
home-assistant-frontend==20251104.0
|
||||
home-assistant-intents==2025.10.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
@@ -38,6 +38,7 @@ from homeassistant.const import (
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.deprecation import deprecated_function
|
||||
|
||||
# Distance conversion constants
|
||||
_MM_TO_M = 0.001 # 1 mm = 0.001 m
|
||||
@@ -707,6 +708,9 @@ class TemperatureConverter(BaseUnitConverter):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@deprecated_function(
|
||||
"TemperatureDeltaConverter.convert", breaks_in_ha_version="2026.12.0"
|
||||
)
|
||||
def convert_interval(cls, interval: float, from_unit: str, to_unit: str) -> float:
|
||||
"""Convert a temperature interval from one unit to another.
|
||||
|
||||
|
||||
18
requirements_all.txt
generated
18
requirements_all.txt
generated
@@ -389,7 +389,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.15.0
|
||||
aioshelly==13.16.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -1181,10 +1181,10 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.83
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251103.0
|
||||
home-assistant-frontend==20251104.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.10.28
|
||||
@@ -1353,7 +1353,7 @@ lektricowifi==0.1
|
||||
letpot==0.6.3
|
||||
|
||||
# homeassistant.components.foscam
|
||||
libpyfoscamcgi==0.0.8
|
||||
libpyfoscamcgi==0.0.9
|
||||
|
||||
# homeassistant.components.vivotek
|
||||
libpyvivotek==0.6.1
|
||||
@@ -1719,7 +1719,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.8.2
|
||||
plugwise==1.8.3
|
||||
|
||||
# homeassistant.components.serial_pm
|
||||
pmsensor==0.4
|
||||
@@ -2146,7 +2146,7 @@ pylibrespot-java==0.1.1
|
||||
pylitejet==0.6.3
|
||||
|
||||
# homeassistant.components.litterrobot
|
||||
pylitterbot==2024.2.7
|
||||
pylitterbot==2025.0.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.25.0
|
||||
@@ -2283,7 +2283,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.12
|
||||
pyportainer==1.0.13
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -2525,7 +2525,7 @@ python-overseerr==0.7.1
|
||||
python-picnic-api2==1.3.1
|
||||
|
||||
# homeassistant.components.pooldose
|
||||
python-pooldose==0.7.0
|
||||
python-pooldose==0.7.8
|
||||
|
||||
# homeassistant.components.rabbitair
|
||||
python-rabbitair==0.0.8
|
||||
@@ -2694,7 +2694,7 @@ renault-api==0.5.0
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.16.3
|
||||
reolink-aio==0.16.4
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
|
||||
18
requirements_test_all.txt
generated
18
requirements_test_all.txt
generated
@@ -371,7 +371,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.15.0
|
||||
aioshelly==13.16.0
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -1030,10 +1030,10 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.83
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251103.0
|
||||
home-assistant-frontend==20251104.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.10.28
|
||||
@@ -1175,7 +1175,7 @@ lektricowifi==0.1
|
||||
letpot==0.6.3
|
||||
|
||||
# homeassistant.components.foscam
|
||||
libpyfoscamcgi==0.0.8
|
||||
libpyfoscamcgi==0.0.9
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.5.0
|
||||
@@ -1456,7 +1456,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.14
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.8.2
|
||||
plugwise==1.8.3
|
||||
|
||||
# homeassistant.components.poolsense
|
||||
poolsense==0.0.8
|
||||
@@ -1790,7 +1790,7 @@ pylibrespot-java==0.1.1
|
||||
pylitejet==0.6.3
|
||||
|
||||
# homeassistant.components.litterrobot
|
||||
pylitterbot==2024.2.7
|
||||
pylitterbot==2025.0.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.25.0
|
||||
@@ -1906,7 +1906,7 @@ pyplaato==0.0.19
|
||||
pypoint==3.0.0
|
||||
|
||||
# homeassistant.components.portainer
|
||||
pyportainer==1.0.12
|
||||
pyportainer==1.0.13
|
||||
|
||||
# homeassistant.components.probe_plus
|
||||
pyprobeplus==1.1.2
|
||||
@@ -2094,7 +2094,7 @@ python-overseerr==0.7.1
|
||||
python-picnic-api2==1.3.1
|
||||
|
||||
# homeassistant.components.pooldose
|
||||
python-pooldose==0.7.0
|
||||
python-pooldose==0.7.8
|
||||
|
||||
# homeassistant.components.rabbitair
|
||||
python-rabbitair==0.0.8
|
||||
@@ -2236,7 +2236,7 @@ renault-api==0.5.0
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.16.3
|
||||
reolink-aio==0.16.4
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.67
|
||||
|
||||
@@ -94,6 +94,8 @@ FORBIDDEN_PACKAGES = {
|
||||
"async-timeout": "be replaced by asyncio.timeout (Python 3.11+)",
|
||||
# Only needed for tests
|
||||
"codecov": "not be a runtime dependency",
|
||||
# Only needed for docs
|
||||
"mkdocs": "not be a runtime dependency",
|
||||
# Does blocking I/O and should be replaced by pyserial-asyncio-fast
|
||||
# See https://github.com/home-assistant/core/pull/116635
|
||||
"pyserial-asyncio": "be replaced by pyserial-asyncio-fast",
|
||||
@@ -101,6 +103,8 @@ FORBIDDEN_PACKAGES = {
|
||||
"pytest": "not be a runtime dependency",
|
||||
# Only needed for build
|
||||
"setuptools": "not be a runtime dependency",
|
||||
# Only needed for docs
|
||||
"sphinx": "not be a runtime dependency",
|
||||
# Only needed for build
|
||||
"wheel": "not be a runtime dependency",
|
||||
}
|
||||
|
||||
@@ -98,3 +98,17 @@ async def test_migrate_entry(
|
||||
assert config_entry.minor_version == 2
|
||||
assert config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_PORT] == 161
|
||||
assert config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY] == "public"
|
||||
|
||||
|
||||
async def test_serial_mismatch(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_brother: AsyncMock,
|
||||
mock_brother_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test if the serial number matches on init."""
|
||||
mock_brother_client.serial = "DIFFERENT_SERIAL"
|
||||
|
||||
await init_integration(hass, mock_config_entry)
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
@@ -547,6 +547,7 @@ def supervisor_client() -> Generator[AsyncMock]:
|
||||
supervisor_client.homeassistant = AsyncMock()
|
||||
supervisor_client.host = AsyncMock()
|
||||
supervisor_client.jobs = AsyncMock()
|
||||
supervisor_client.jobs.info.return_value = MagicMock()
|
||||
supervisor_client.mounts.info.return_value = mounts_info_mock
|
||||
supervisor_client.os = AsyncMock()
|
||||
supervisor_client.resolution = AsyncMock()
|
||||
|
||||
@@ -111,6 +111,53 @@ class CoolMasterNetMock:
|
||||
}
|
||||
|
||||
|
||||
class CoolMasterNetErrorMock:
|
||||
"""Mock for CoolMasterNet."""
|
||||
|
||||
def __init__(self, *_args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the CoolMasterNetMock."""
|
||||
self._units = copy.deepcopy(TEST_UNITS)
|
||||
self._fail_count = 0
|
||||
|
||||
async def info(self) -> dict[str, Any]:
|
||||
"""Return info about the bridge device."""
|
||||
return DEFAULT_INFO
|
||||
|
||||
async def status(self) -> dict[str, CoolMasterNetUnitMock]:
|
||||
"""Return the units."""
|
||||
if self._fail_count > 0:
|
||||
self._fail_count -= 1
|
||||
raise OSError("Simulated communication error")
|
||||
return {
|
||||
unit_id: CoolMasterNetUnitMock(unit_id, attributes)
|
||||
for unit_id, attributes in self._units.items()
|
||||
}
|
||||
|
||||
|
||||
class CoolMasterNetEmptyStatusMock:
|
||||
"""Mock for CoolMasterNet."""
|
||||
|
||||
def __init__(self, *_args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the CoolMasterNetMock."""
|
||||
self._units = copy.deepcopy(TEST_UNITS)
|
||||
self._call_count = 0
|
||||
|
||||
async def info(self) -> dict[str, Any]:
|
||||
"""Return info about the bridge device."""
|
||||
return DEFAULT_INFO
|
||||
|
||||
async def status(self) -> dict[str, CoolMasterNetUnitMock]:
|
||||
"""Return the units."""
|
||||
self._call_count += 1
|
||||
if self._call_count == 1:
|
||||
return {
|
||||
unit_id: CoolMasterNetUnitMock(unit_id, attributes)
|
||||
for unit_id, attributes in self._units.items()
|
||||
}
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def load_int(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Set up the Coolmaster integration in Home Assistant."""
|
||||
@@ -133,3 +180,51 @@ async def load_int(hass: HomeAssistant) -> MockConfigEntry:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def config_entry_with_errors(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Set up the Coolmaster integration in Home Assistant."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "1.2.3.4",
|
||||
"port": 1234,
|
||||
"supported_modes": [HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT],
|
||||
},
|
||||
)
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.coolmaster.CoolMasterNet",
|
||||
new=CoolMasterNetErrorMock,
|
||||
):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def config_entry_with_empty_status(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Set up the Coolmaster integration in Home Assistant."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "1.2.3.4",
|
||||
"port": 1234,
|
||||
"supported_modes": [HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT],
|
||||
},
|
||||
)
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.coolmaster.CoolMasterNet",
|
||||
new=CoolMasterNetEmptyStatusMock,
|
||||
):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
@@ -2,8 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.coolmaster.const import MAX_RETRIES
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_component import async_update_entity
|
||||
|
||||
|
||||
async def test_sensor(
|
||||
@@ -13,3 +20,119 @@ async def test_sensor(
|
||||
"""Test the Coolmaster sensor."""
|
||||
assert hass.states.get("sensor.l1_100_error_code").state == "OK"
|
||||
assert hass.states.get("sensor.l1_101_error_code").state == "Err1"
|
||||
|
||||
|
||||
async def test_retry_with_no_error(
|
||||
hass: HomeAssistant,
|
||||
config_entry_with_errors: ConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test without errors."""
|
||||
|
||||
caplog.set_level(logging.DEBUG, logger="homeassistant.components.coolmaster")
|
||||
|
||||
with patch(
|
||||
"tests.components.coolmaster.conftest.CoolMasterNetErrorMock.status",
|
||||
wraps=config_entry_with_errors.runtime_data._coolmaster.status,
|
||||
) as mock_status:
|
||||
config_entry_with_errors.runtime_data._coolmaster._fail_count = 0
|
||||
await async_update_entity(hass, "sensor.l1_101_error_code")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_status.call_count == 1
|
||||
debugs, errors = count_logs(caplog.records)
|
||||
assert debugs == 0
|
||||
assert errors == 0
|
||||
|
||||
|
||||
@patch("homeassistant.components.coolmaster.coordinator.BACKOFF_BASE_DELAY", new=0)
|
||||
async def test_retry_with_less_than_max_errors(
|
||||
hass: HomeAssistant,
|
||||
config_entry_with_errors: ConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test MAX_RETRIES-1 errors."""
|
||||
|
||||
caplog.set_level(logging.DEBUG, logger="homeassistant.components.coolmaster")
|
||||
|
||||
with patch(
|
||||
"tests.components.coolmaster.conftest.CoolMasterNetErrorMock.status",
|
||||
wraps=config_entry_with_errors.runtime_data._coolmaster.status,
|
||||
) as mock_status:
|
||||
config_entry_with_errors.runtime_data._coolmaster._fail_count = MAX_RETRIES - 1
|
||||
await async_update_entity(hass, "sensor.l1_101_error_code")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_status.call_count == MAX_RETRIES # The last try succeeds
|
||||
debugs, errors = count_logs(caplog.records)
|
||||
assert errors == 0
|
||||
assert debugs == MAX_RETRIES - 1
|
||||
|
||||
|
||||
@patch("homeassistant.components.coolmaster.coordinator.BACKOFF_BASE_DELAY", new=0)
|
||||
async def test_retry_with_more_than_max_errors(
|
||||
hass: HomeAssistant,
|
||||
config_entry_with_errors: ConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test MAX_RETRIES+1 errors."""
|
||||
|
||||
caplog.set_level(logging.DEBUG, logger="homeassistant.components.coolmaster")
|
||||
|
||||
with patch(
|
||||
"tests.components.coolmaster.conftest.CoolMasterNetErrorMock.status",
|
||||
wraps=config_entry_with_errors.runtime_data._coolmaster.status,
|
||||
) as mock_status:
|
||||
config_entry_with_errors.runtime_data._coolmaster._fail_count = MAX_RETRIES + 1
|
||||
await async_update_entity(hass, "sensor.l1_101_error_code")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
mock_status.call_count == MAX_RETRIES
|
||||
) # The retries are capped at MAX_RETRIES
|
||||
debugs, errors = count_logs(caplog.records)
|
||||
assert errors == 1
|
||||
assert debugs == MAX_RETRIES - 1
|
||||
|
||||
|
||||
@patch("homeassistant.components.coolmaster.coordinator.BACKOFF_BASE_DELAY", new=0)
|
||||
async def test_retry_with_empty_status(
|
||||
hass: HomeAssistant,
|
||||
config_entry_with_empty_status: ConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test empty status response."""
|
||||
|
||||
caplog.set_level(logging.DEBUG, logger="homeassistant.components.coolmaster")
|
||||
|
||||
with patch(
|
||||
"tests.components.coolmaster.conftest.CoolMasterNetEmptyStatusMock.status",
|
||||
wraps=config_entry_with_empty_status.runtime_data._coolmaster.status,
|
||||
) as mock_status:
|
||||
await async_update_entity(hass, "sensor.l1_101_error_code")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
mock_status.call_count == MAX_RETRIES
|
||||
) # The retries are capped at MAX_RETRIES
|
||||
debugs, errors = count_logs(caplog.records)
|
||||
assert errors == 1
|
||||
assert debugs == MAX_RETRIES
|
||||
|
||||
|
||||
def count_logs(log_records: list[logging.LogRecord]) -> tuple[int, int]:
|
||||
"""Count the number of log records."""
|
||||
debug_logs = [
|
||||
rec
|
||||
for rec in log_records
|
||||
if rec.levelno == logging.DEBUG
|
||||
and "Error communicating with coolmaster" in rec.getMessage()
|
||||
]
|
||||
|
||||
error_logs = [
|
||||
rec
|
||||
for rec in log_records
|
||||
if rec.levelno == logging.ERROR
|
||||
and "Error fetching coolmaster data" in rec.getMessage()
|
||||
]
|
||||
return len(debug_logs), len(error_logs)
|
||||
|
||||
@@ -233,7 +233,7 @@ async def test_setup_api_ping(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert get_core_info(hass)["version_latest"] == "1.0.0"
|
||||
assert is_hassio(hass)
|
||||
|
||||
@@ -280,7 +280,7 @@ async def test_setup_api_push_api_data(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||
assert "watchdog" not in aioclient_mock.mock_calls[0][2]
|
||||
@@ -301,7 +301,7 @@ async def test_setup_api_push_api_data_server_host(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||
assert not aioclient_mock.mock_calls[0][2]["watchdog"]
|
||||
@@ -322,7 +322,7 @@ async def test_setup_api_push_api_data_default(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||
refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"]
|
||||
@@ -403,7 +403,7 @@ async def test_setup_api_existing_hassio_user(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||
assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token
|
||||
@@ -422,7 +422,7 @@ async def test_setup_core_push_config(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone"
|
||||
|
||||
with patch("homeassistant.util.dt.set_default_time_zone"):
|
||||
@@ -446,7 +446,7 @@ async def test_setup_hassio_no_additional_data(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456"
|
||||
|
||||
|
||||
@@ -528,14 +528,14 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 23
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 24
|
||||
assert aioclient_mock.mock_calls[-1][2] == "test"
|
||||
|
||||
await hass.services.async_call("hassio", "host_shutdown", {})
|
||||
await hass.services.async_call("hassio", "host_reboot", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 26
|
||||
|
||||
await hass.services.async_call("hassio", "backup_full", {})
|
||||
await hass.services.async_call(
|
||||
@@ -550,7 +550,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 28
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 03:48:00",
|
||||
"homeassistant": True,
|
||||
@@ -575,7 +575,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"addons": ["test"],
|
||||
"folders": ["ssl"],
|
||||
@@ -594,7 +594,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "backup_name",
|
||||
"location": "backup_share",
|
||||
@@ -610,7 +610,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 03:48:00",
|
||||
"location": None,
|
||||
@@ -629,7 +629,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 34
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 11:48:00",
|
||||
"location": None,
|
||||
@@ -1075,7 +1075,7 @@ async def test_setup_hardware_integration(
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from http import HTTPStatus
|
||||
from ipaddress import ip_network
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -667,3 +667,61 @@ async def test_ssl_issue_urls_configured(
|
||||
"http",
|
||||
"ssl_configured_without_configured_urls",
|
||||
) not in issue_registry.issues
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"hassio",
|
||||
"http_config",
|
||||
"expected_serverhost",
|
||||
"expected_issues",
|
||||
),
|
||||
[
|
||||
(False, {}, ["0.0.0.0", "::"], set()),
|
||||
(False, {"server_host": "0.0.0.0"}, ["0.0.0.0"], set()),
|
||||
(True, {}, ["0.0.0.0", "::"], set()),
|
||||
(
|
||||
True,
|
||||
{"server_host": "0.0.0.0"},
|
||||
[
|
||||
"0.0.0.0",
|
||||
],
|
||||
{("http", "server_host_may_break_hassio")},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_server_host(
|
||||
hass: HomeAssistant,
|
||||
hassio: bool,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
http_config: dict,
|
||||
expected_serverhost: list,
|
||||
expected_issues: set[tuple[str, str]],
|
||||
) -> None:
|
||||
"""Test server_host behavior."""
|
||||
mock_server = Mock()
|
||||
with (
|
||||
patch("homeassistant.components.http.is_hassio", return_value=hassio),
|
||||
patch(
|
||||
"asyncio.BaseEventLoop.create_server", return_value=mock_server
|
||||
) as mock_create_server,
|
||||
):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"http",
|
||||
{"http": http_config},
|
||||
)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
mock_create_server.assert_called_once_with(
|
||||
ANY,
|
||||
expected_serverhost,
|
||||
8123,
|
||||
ssl=None,
|
||||
backlog=128,
|
||||
reuse_address=None,
|
||||
reuse_port=None,
|
||||
)
|
||||
|
||||
assert set(issue_registry.issues) == expected_issues
|
||||
|
||||
@@ -477,7 +477,7 @@
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.TOTAL: 'total'>,
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
@@ -516,7 +516,7 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'energy',
|
||||
'friendly_name': 'Test air conditioner Energy this month',
|
||||
'state_class': <SensorStateClass.TOTAL: 'total'>,
|
||||
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||
'unit_of_measurement': <UnitOfEnergy.WATT_HOUR: 'Wh'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
|
||||
@@ -1,283 +0,0 @@
|
||||
"""Test light trigger."""
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import automation
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_FLOOR_ID,
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_PLATFORM,
|
||||
CONF_STATE,
|
||||
CONF_TARGET,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
label_registry as lr,
|
||||
)
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry, mock_device_registry
|
||||
|
||||
# remove when #151314 is merged
|
||||
CONF_OPTIONS = "options"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_lights(hass: HomeAssistant) -> None:
|
||||
"""Create multiple light entities associated with different targets."""
|
||||
await async_setup_component(hass, "light", {})
|
||||
|
||||
config_entry = MockConfigEntry(domain="test")
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
floor_reg = fr.async_get(hass)
|
||||
floor = floor_reg.async_create("Test Floor")
|
||||
|
||||
area_reg = ar.async_get(hass)
|
||||
area = area_reg.async_create("Test Area", floor_id=floor.floor_id)
|
||||
|
||||
label_reg = lr.async_get(hass)
|
||||
label = label_reg.async_create("Test Label")
|
||||
|
||||
device = dr.DeviceEntry(id="test_device", area_id=area.id, labels={label.label_id})
|
||||
mock_device_registry(hass, {device.id: device})
|
||||
|
||||
entity_reg = er.async_get(hass)
|
||||
# Light associated with area
|
||||
light_area = entity_reg.async_get_or_create(
|
||||
domain="light",
|
||||
platform="test",
|
||||
unique_id="light_area",
|
||||
suggested_object_id="area_light",
|
||||
)
|
||||
entity_reg.async_update_entity(light_area.entity_id, area_id=area.id)
|
||||
|
||||
# Light associated with device
|
||||
entity_reg.async_get_or_create(
|
||||
domain="light",
|
||||
platform="test",
|
||||
unique_id="light_device",
|
||||
suggested_object_id="device_light",
|
||||
device_id=device.id,
|
||||
)
|
||||
|
||||
# Light associated with label
|
||||
light_label = entity_reg.async_get_or_create(
|
||||
domain="light",
|
||||
platform="test",
|
||||
unique_id="light_label",
|
||||
suggested_object_id="label_light",
|
||||
)
|
||||
entity_reg.async_update_entity(light_label.entity_id, labels={label.label_id})
|
||||
|
||||
# Return all available light entities
|
||||
return [
|
||||
"light.standalone_light",
|
||||
"light.label_light",
|
||||
"light.area_light",
|
||||
"light.device_light",
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("target_lights")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id"),
|
||||
[
|
||||
({CONF_ENTITY_ID: "light.standalone_light"}, "light.standalone_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.label_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.area_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.area_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.device_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.device_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.device_light"),
|
||||
({ATTR_DEVICE_ID: "test_device"}, "light.device_light"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("state", "reverse_state"), [(STATE_ON, STATE_OFF), (STATE_OFF, STATE_ON)]
|
||||
)
|
||||
async def test_light_state_trigger_behavior_any(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
state: str,
|
||||
reverse_state: str,
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when any light state changes to a specific state."""
|
||||
await async_setup_component(hass, "light", {})
|
||||
|
||||
hass.states.async_set(entity_id, reverse_state)
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"trigger": {
|
||||
CONF_PLATFORM: "light.state",
|
||||
CONF_TARGET: {**trigger_target_config},
|
||||
CONF_OPTIONS: {CONF_STATE: state},
|
||||
},
|
||||
"action": {
|
||||
"service": "test.automation",
|
||||
"data_template": {CONF_ENTITY_ID: f"{entity_id}"},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
hass.states.async_set(entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 1
|
||||
assert service_calls[0].data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
hass.states.async_set(entity_id, reverse_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id"),
|
||||
[
|
||||
({CONF_ENTITY_ID: "light.standalone_light"}, "light.standalone_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.label_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.area_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.area_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.device_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.device_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.device_light"),
|
||||
({ATTR_DEVICE_ID: "test_device"}, "light.device_light"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("state", "reverse_state"), [(STATE_ON, STATE_OFF), (STATE_OFF, STATE_ON)]
|
||||
)
|
||||
async def test_light_state_trigger_behavior_first(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_lights: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
state: str,
|
||||
reverse_state: str,
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when the first light changes to a specific state."""
|
||||
await async_setup_component(hass, "light", {})
|
||||
|
||||
for other_entity_id in target_lights:
|
||||
hass.states.async_set(other_entity_id, reverse_state)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"trigger": {
|
||||
CONF_PLATFORM: "light.state",
|
||||
CONF_TARGET: {**trigger_target_config},
|
||||
CONF_OPTIONS: {CONF_STATE: state, "behavior": "first"},
|
||||
},
|
||||
"action": {
|
||||
"service": "test.automation",
|
||||
"data_template": {CONF_ENTITY_ID: f"{entity_id}"},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
hass.states.async_set(entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 1
|
||||
assert service_calls[0].data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Triggering other lights should not cause any service calls after the first one
|
||||
for other_entity_id in target_lights:
|
||||
hass.states.async_set(other_entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
for other_entity_id in target_lights:
|
||||
hass.states.async_set(other_entity_id, reverse_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
hass.states.async_set(entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 1
|
||||
assert service_calls[0].data[CONF_ENTITY_ID] == entity_id
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id"),
|
||||
[
|
||||
({CONF_ENTITY_ID: "light.standalone_light"}, "light.standalone_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.label_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.area_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.area_light"),
|
||||
({ATTR_LABEL_ID: "test_label"}, "light.device_light"),
|
||||
({ATTR_AREA_ID: "test_area"}, "light.device_light"),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, "light.device_light"),
|
||||
({ATTR_DEVICE_ID: "test_device"}, "light.device_light"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("state", "reverse_state"), [(STATE_ON, STATE_OFF), (STATE_OFF, STATE_ON)]
|
||||
)
|
||||
async def test_light_state_trigger_behavior_last(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_lights: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
state: str,
|
||||
reverse_state: str,
|
||||
) -> None:
|
||||
"""Test that the light state trigger fires when the last light changes to a specific state."""
|
||||
await async_setup_component(hass, "light", {})
|
||||
|
||||
for other_entity_id in target_lights:
|
||||
hass.states.async_set(other_entity_id, reverse_state)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"trigger": {
|
||||
CONF_PLATFORM: "light.state",
|
||||
CONF_TARGET: {**trigger_target_config},
|
||||
CONF_OPTIONS: {CONF_STATE: state, "behavior": "last"},
|
||||
},
|
||||
"action": {
|
||||
"service": "test.automation",
|
||||
"data_template": {CONF_ENTITY_ID: f"{entity_id}"},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
target_lights.remove(entity_id)
|
||||
for other_entity_id in target_lights:
|
||||
hass.states.async_set(other_entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
hass.states.async_set(entity_id, state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 1
|
||||
@@ -134,7 +134,7 @@
|
||||
"links": null,
|
||||
"chapters": null,
|
||||
"performers": null,
|
||||
"preview": "https://p.scdn.co/mp3-preview/98deb9c370bbaa350be058b3470fbe3bc1e28d9d?cid=2eb96f9b37494be1824999d58028a305",
|
||||
"preview": "https://p.scdn.co/mp3-preview/98deb9c370bbaa350be058b3470fbe3bc1e28d9d",
|
||||
"popularity": 77,
|
||||
"last_refresh": null
|
||||
},
|
||||
|
||||
@@ -126,7 +126,7 @@
|
||||
'media_position': 232,
|
||||
'media_position_updated_at': datetime.datetime(2024, 10, 30, 18, 31, 49, 565951, tzinfo=datetime.timezone.utc),
|
||||
'media_title': 'November Rain',
|
||||
'repeat': 'all',
|
||||
'repeat': <RepeatMode.ALL: 'all'>,
|
||||
'shuffle': True,
|
||||
'supported_features': <MediaPlayerEntityFeature: 8320959>,
|
||||
'volume_level': 0.06,
|
||||
|
||||
@@ -23,12 +23,18 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.components.plugwise.climate import PlugwiseClimateExtraStoredData
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_fire_time_changed,
|
||||
mock_restore_cache_with_extra_data,
|
||||
snapshot_platform,
|
||||
)
|
||||
|
||||
HA_PLUGWISE_SMILE_ASYNC_UPDATE = (
|
||||
"homeassistant.components.plugwise.coordinator.Smile.async_update"
|
||||
@@ -105,7 +111,9 @@ async def test_adam_climate_entity_climate_changes(
|
||||
)
|
||||
assert mock_smile_adam.set_schedule_state.call_count == 2
|
||||
mock_smile_adam.set_schedule_state.assert_called_with(
|
||||
"c50f167537524366a5af7aa3942feb1e", HVACMode.OFF
|
||||
"c50f167537524366a5af7aa3942feb1e",
|
||||
STATE_OFF,
|
||||
"GF7 Woonkamer",
|
||||
)
|
||||
|
||||
with pytest.raises(
|
||||
@@ -138,6 +146,98 @@ async def test_adam_climate_adjust_negative_testing(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["m_adam_heating"], indirect=True)
|
||||
@pytest.mark.parametrize("cooling_present", [False], indirect=True)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_adam_restore_state_climate(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_adam_heat_cool: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test restore_state for climate with restored schedule."""
|
||||
mock_restore_cache_with_extra_data(
|
||||
hass,
|
||||
[
|
||||
(
|
||||
State("climate.living_room", "heat"),
|
||||
PlugwiseClimateExtraStoredData(
|
||||
last_active_schedule=None,
|
||||
previous_action_mode="heating",
|
||||
).as_dict(),
|
||||
),
|
||||
(
|
||||
State("climate.bathroom", "heat"),
|
||||
PlugwiseClimateExtraStoredData(
|
||||
last_active_schedule="Badkamer",
|
||||
previous_action_mode=None,
|
||||
).as_dict(),
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (state := hass.states.get("climate.living_room"))
|
||||
assert state.state == "heat"
|
||||
|
||||
# Verify a HomeAssistantError is raised setting a schedule with last_active_schedule = None
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{ATTR_ENTITY_ID: "climate.living_room", ATTR_HVAC_MODE: HVACMode.AUTO},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
data = mock_smile_adam_heat_cool.async_update.return_value
|
||||
data["f2bf9048bef64cc5b6d5110154e33c81"]["climate_mode"] = "off"
|
||||
data["da224107914542988a88561b4452b0f6"]["selec_regulation_mode"] = "off"
|
||||
with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data):
|
||||
freezer.tick(timedelta(minutes=1))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (state := hass.states.get("climate.living_room"))
|
||||
assert state.state == "off"
|
||||
|
||||
# Verify restoration of previous_action_mode = heating
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{ATTR_ENTITY_ID: "climate.living_room", ATTR_HVAC_MODE: HVACMode.HEAT},
|
||||
blocking=True,
|
||||
)
|
||||
# Verify set_schedule_state was called with the restored schedule
|
||||
mock_smile_adam_heat_cool.set_regulation_mode.assert_called_with(
|
||||
"heating",
|
||||
)
|
||||
|
||||
data = mock_smile_adam_heat_cool.async_update.return_value
|
||||
data["f871b8c4d63549319221e294e4f88074"]["climate_mode"] = "heat"
|
||||
with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data):
|
||||
freezer.tick(timedelta(minutes=1))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (state := hass.states.get("climate.bathroom"))
|
||||
assert state.state == "heat"
|
||||
|
||||
# Verify restoration is used when setting a schedule
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{ATTR_ENTITY_ID: "climate.bathroom", ATTR_HVAC_MODE: HVACMode.AUTO},
|
||||
blocking=True,
|
||||
)
|
||||
# Verify set_schedule_state was called with the restored schedule
|
||||
mock_smile_adam_heat_cool.set_schedule_state.assert_called_with(
|
||||
"f871b8c4d63549319221e294e4f88074", STATE_ON, "Badkamer"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["m_adam_heating"], indirect=True)
|
||||
@pytest.mark.parametrize("cooling_present", [False], indirect=True)
|
||||
@pytest.mark.parametrize("platforms", [(CLIMATE_DOMAIN,)])
|
||||
@@ -173,6 +273,7 @@ async def test_adam_3_climate_entity_attributes(
|
||||
]
|
||||
data = mock_smile_adam_heat_cool.async_update.return_value
|
||||
data["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = "heating"
|
||||
data["f2bf9048bef64cc5b6d5110154e33c81"]["climate_mode"] = "heat"
|
||||
data["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = HVACAction.HEATING
|
||||
data["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"]["cooling_state"] = False
|
||||
data["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"]["heating_state"] = True
|
||||
@@ -193,6 +294,7 @@ async def test_adam_3_climate_entity_attributes(
|
||||
|
||||
data = mock_smile_adam_heat_cool.async_update.return_value
|
||||
data["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = "cooling"
|
||||
data["f2bf9048bef64cc5b6d5110154e33c81"]["climate_mode"] = "cool"
|
||||
data["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = HVACAction.COOLING
|
||||
data["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"]["cooling_state"] = True
|
||||
data["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"]["heating_state"] = False
|
||||
@@ -334,7 +436,9 @@ async def test_anna_climate_entity_climate_changes(
|
||||
)
|
||||
assert mock_smile_anna.set_schedule_state.call_count == 1
|
||||
mock_smile_anna.set_schedule_state.assert_called_with(
|
||||
"c784ee9fdab44e1395b8dee7d7a497d5", HVACMode.OFF
|
||||
"c784ee9fdab44e1395b8dee7d7a497d5",
|
||||
STATE_OFF,
|
||||
"standaard",
|
||||
)
|
||||
|
||||
# Mock user deleting last schedule from app or browser
|
||||
|
||||
@@ -75,6 +75,19 @@ def mock_config_entry(device_info: dict[str, Any]) -> MockConfigEntry:
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry_v1_1(device_info: dict[str, Any]) -> MockConfigEntry:
|
||||
"""Return a mocked config entry for migration testing with version 1.1."""
|
||||
return MockConfigEntry(
|
||||
title="Pool Device",
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "192.168.1.100"},
|
||||
unique_id=device_info["SERIAL_NUMBER"],
|
||||
version=1,
|
||||
minor_version=1,
|
||||
)
|
||||
|
||||
|
||||
async def init_integration(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> MockConfigEntry:
|
||||
|
||||
@@ -12,6 +12,14 @@
|
||||
"value": 718,
|
||||
"unit": "mV"
|
||||
},
|
||||
"cl": {
|
||||
"value": 1.2,
|
||||
"unit": "ppm"
|
||||
},
|
||||
"flow_rate": {
|
||||
"value": 150,
|
||||
"unit": "L/s"
|
||||
},
|
||||
"ph_type_dosing": {
|
||||
"value": "alcalyne",
|
||||
"unit": null
|
||||
@@ -20,7 +28,7 @@
|
||||
"value": "proportional",
|
||||
"unit": null
|
||||
},
|
||||
"ofa_ph_value": {
|
||||
"ofa_ph_time": {
|
||||
"value": 0,
|
||||
"unit": "min"
|
||||
},
|
||||
@@ -32,7 +40,7 @@
|
||||
"value": "proportional",
|
||||
"unit": null
|
||||
},
|
||||
"ofa_orp_value": {
|
||||
"ofa_orp_time": {
|
||||
"value": 0,
|
||||
"unit": "min"
|
||||
},
|
||||
@@ -62,25 +70,25 @@
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"pump_running": {
|
||||
"pump_alarm": {
|
||||
"value": true
|
||||
},
|
||||
"ph_level_ok": {
|
||||
"ph_level_alarm": {
|
||||
"value": false
|
||||
},
|
||||
"orp_level_ok": {
|
||||
"orp_level_alarm": {
|
||||
"value": false
|
||||
},
|
||||
"flow_rate_ok": {
|
||||
"flow_rate_alarm": {
|
||||
"value": false
|
||||
},
|
||||
"alarm_relay": {
|
||||
"value": true
|
||||
},
|
||||
"relay_aux1_ph": {
|
||||
"relay_aux1": {
|
||||
"value": false
|
||||
},
|
||||
"relay_aux2_orpcl": {
|
||||
"relay_aux2": {
|
||||
"value": false
|
||||
}
|
||||
},
|
||||
@@ -108,10 +116,10 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"stop_pool_dosing": {
|
||||
"pause_dosing": {
|
||||
"value": false
|
||||
},
|
||||
"pump_detection": {
|
||||
"pump_monitoring": {
|
||||
"value": true
|
||||
},
|
||||
"frequency_input": {
|
||||
|
||||
@@ -309,8 +309,8 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'ofa_orp_value',
|
||||
'unique_id': 'TEST123456789_ofa_orp_value',
|
||||
'translation_key': 'ofa_orp_time',
|
||||
'unique_id': 'TEST123456789_ofa_orp_time',
|
||||
'unit_of_measurement': <UnitOfTime.MINUTES: 'min'>,
|
||||
})
|
||||
# ---
|
||||
@@ -699,8 +699,8 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'ofa_ph_value',
|
||||
'unique_id': 'TEST123456789_ofa_ph_value',
|
||||
'translation_key': 'ofa_ph_time',
|
||||
'unique_id': 'TEST123456789_ofa_ph_time',
|
||||
'unit_of_measurement': <UnitOfTime.MINUTES: 'min'>,
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -8,7 +8,7 @@ from syrupy.assertion import SnapshotAssertion
|
||||
from homeassistant.components.pooldose.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .conftest import RequestStatus
|
||||
|
||||
@@ -117,3 +117,66 @@ async def test_setup_entry_timeout_error(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
|
||||
async def test_migrate_entity_unique_ids(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry_v1_1: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test migration of entity unique IDs."""
|
||||
mock_config_entry_v1_1.add_to_hass(hass)
|
||||
|
||||
# Create entities with old unique ID format
|
||||
entity_registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"TEST123456789_ofa_orp_value",
|
||||
config_entry=mock_config_entry_v1_1,
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"TEST123456789_ofa_ph_value",
|
||||
config_entry=mock_config_entry_v1_1,
|
||||
)
|
||||
# Create entity with correct unique ID that should not be changed
|
||||
unchanged_entity = entity_registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"TEST123456789_orp",
|
||||
config_entry=mock_config_entry_v1_1,
|
||||
)
|
||||
|
||||
assert mock_config_entry_v1_1.version == 1
|
||||
assert mock_config_entry_v1_1.minor_version == 1
|
||||
|
||||
# Setup the integration - this will trigger migration
|
||||
await hass.config_entries.async_setup(mock_config_entry_v1_1.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the config entry version was updated from 1.1 to 1.2
|
||||
assert mock_config_entry_v1_1.version == 1
|
||||
assert mock_config_entry_v1_1.minor_version == 2
|
||||
|
||||
# Verify the entities have been migrated
|
||||
assert entity_registry.async_get_entity_id(
|
||||
"sensor", DOMAIN, "TEST123456789_ofa_orp_time"
|
||||
)
|
||||
assert entity_registry.async_get_entity_id(
|
||||
"sensor", DOMAIN, "TEST123456789_ofa_ph_time"
|
||||
)
|
||||
|
||||
# Verify old unique IDs no longer exist
|
||||
assert not entity_registry.async_get_entity_id(
|
||||
"sensor", DOMAIN, "TEST123456789_ofa_orp_value"
|
||||
)
|
||||
assert not entity_registry.async_get_entity_id(
|
||||
"sensor", DOMAIN, "TEST123456789_ofa_ph_value"
|
||||
)
|
||||
|
||||
# Verify entity that didn't need migration is unchanged
|
||||
assert (
|
||||
entity_registry.async_get_entity_id("sensor", DOMAIN, "TEST123456789_orp")
|
||||
== unchanged_entity.entity_id
|
||||
)
|
||||
|
||||
@@ -338,6 +338,15 @@ async def test_removing_chime(
|
||||
"support_ch_uid",
|
||||
),
|
||||
[
|
||||
(
|
||||
f"{TEST_MAC}_firmware",
|
||||
f"{TEST_UID}_firmware",
|
||||
f"{TEST_MAC}",
|
||||
f"{TEST_UID}",
|
||||
Platform.UPDATE,
|
||||
True,
|
||||
False,
|
||||
),
|
||||
(
|
||||
f"{TEST_MAC}_0_record_audio",
|
||||
f"{TEST_UID}_0_record_audio",
|
||||
|
||||
@@ -20,6 +20,8 @@ from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT
|
||||
MOCK_CONFIG_DATA = {CONF_HOST: "192.168.0.2", CONF_PORT: DEFAULT_PORT}
|
||||
MOCK_CONFIG_OPTIONS = {CONF_CODE: "1234"}
|
||||
|
||||
MOCK_ENTRY_ID = "1234567890"
|
||||
|
||||
MOCK_PARTITION_SUBENTRY = ConfigSubentry(
|
||||
subentry_type=SUBENTRY_TYPE_PARTITION,
|
||||
subentry_id="ID_PARTITION",
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.components.satel_integra.const import DOMAIN
|
||||
from . import (
|
||||
MOCK_CONFIG_DATA,
|
||||
MOCK_CONFIG_OPTIONS,
|
||||
MOCK_ENTRY_ID,
|
||||
MOCK_OUTPUT_SUBENTRY,
|
||||
MOCK_PARTITION_SUBENTRY,
|
||||
MOCK_SWITCHABLE_OUTPUT_SUBENTRY,
|
||||
@@ -58,9 +59,9 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
title="192.168.0.2",
|
||||
data=MOCK_CONFIG_DATA,
|
||||
options=MOCK_CONFIG_OPTIONS,
|
||||
entry_id="SATEL_INTEGRA_CONFIG_ENTRY_1",
|
||||
version=1,
|
||||
minor_version=2,
|
||||
entry_id=MOCK_ENTRY_ID,
|
||||
version=2,
|
||||
minor_version=1,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,69 +1,192 @@
|
||||
# serializer version: 1
|
||||
# name: test_config_flow_migration_version_1_2
|
||||
ConfigEntrySnapshot({
|
||||
# name: test_config_flow_migration_version_1_2[original0-partition_number]
|
||||
dict({
|
||||
'data': dict({
|
||||
'host': '192.168.0.2',
|
||||
'port': 7094,
|
||||
'arm_home_mode': 1,
|
||||
'name': 'Home',
|
||||
'partition_number': 1,
|
||||
}),
|
||||
'disabled_by': None,
|
||||
'discovery_keys': dict({
|
||||
}),
|
||||
'domain': 'satel_integra',
|
||||
'entry_id': <ANY>,
|
||||
'minor_version': 2,
|
||||
'options': dict({
|
||||
'code': '1234',
|
||||
}),
|
||||
'pref_disable_new_entities': False,
|
||||
'pref_disable_polling': False,
|
||||
'source': 'user',
|
||||
'subentries': list([
|
||||
dict({
|
||||
'data': dict({
|
||||
'arm_home_mode': 1,
|
||||
'name': 'Home',
|
||||
'partition_number': 1,
|
||||
}),
|
||||
'subentry_id': 'ID_PARTITION',
|
||||
'subentry_type': 'partition',
|
||||
'title': 'Home (1) (1)',
|
||||
'unique_id': 'partition_1',
|
||||
}),
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Zone',
|
||||
'type': <BinarySensorDeviceClass.MOTION: 'motion'>,
|
||||
'zone_number': 1,
|
||||
}),
|
||||
'subentry_id': 'ID_ZONE',
|
||||
'subentry_type': 'zone',
|
||||
'title': 'Zone (1) (1)',
|
||||
'unique_id': 'zone_1',
|
||||
}),
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Output',
|
||||
'output_number': 1,
|
||||
'type': <BinarySensorDeviceClass.SAFETY: 'safety'>,
|
||||
}),
|
||||
'subentry_id': 'ID_OUTPUT',
|
||||
'subentry_type': 'output',
|
||||
'title': 'Output (1) (1)',
|
||||
'unique_id': 'output_1',
|
||||
}),
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Switchable Output',
|
||||
'switchable_output_number': 1,
|
||||
}),
|
||||
'subentry_id': 'ID_SWITCHABLE_OUTPUT',
|
||||
'subentry_type': 'switchable_output',
|
||||
'title': 'Switchable Output (1) (1)',
|
||||
'unique_id': 'switchable_output_1',
|
||||
}),
|
||||
]),
|
||||
'title': '192.168.0.2',
|
||||
'unique_id': None,
|
||||
'version': 1,
|
||||
'subentry_id': 'ID_PARTITION',
|
||||
'subentry_type': 'partition',
|
||||
'title': 'Home (1) (1)',
|
||||
'unique_id': 'partition_1',
|
||||
})
|
||||
# ---
|
||||
# name: test_config_flow_migration_version_1_2[original1-zone_number]
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Zone',
|
||||
'type': <BinarySensorDeviceClass.MOTION: 'motion'>,
|
||||
'zone_number': 1,
|
||||
}),
|
||||
'subentry_id': 'ID_ZONE',
|
||||
'subentry_type': 'zone',
|
||||
'title': 'Zone (1) (1)',
|
||||
'unique_id': 'zone_1',
|
||||
})
|
||||
# ---
|
||||
# name: test_config_flow_migration_version_1_2[original2-output_number]
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Output',
|
||||
'output_number': 1,
|
||||
'type': <BinarySensorDeviceClass.SAFETY: 'safety'>,
|
||||
}),
|
||||
'subentry_id': 'ID_OUTPUT',
|
||||
'subentry_type': 'output',
|
||||
'title': 'Output (1) (1)',
|
||||
'unique_id': 'output_1',
|
||||
})
|
||||
# ---
|
||||
# name: test_config_flow_migration_version_1_2[original3-switchable_output_number]
|
||||
dict({
|
||||
'data': dict({
|
||||
'name': 'Switchable Output',
|
||||
'switchable_output_number': 1,
|
||||
}),
|
||||
'subentry_id': 'ID_SWITCHABLE_OUTPUT',
|
||||
'subentry_type': 'switchable_output',
|
||||
'title': 'Switchable Output (1) (1)',
|
||||
'unique_id': 'switchable_output_1',
|
||||
})
|
||||
# ---
|
||||
# name: test_unique_id_migration_from_single_config[alarm_control_panel-satel_alarm_panel_1-1234567890_alarm_panel_1]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'alarm_control_panel',
|
||||
'entity_category': None,
|
||||
'entity_id': 'alarm_control_panel.satel_integra_satel_alarm_panel_1',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'satel_integra',
|
||||
'previous_unique_id': 'satel_alarm_panel_1',
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '1234567890_alarm_panel_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_unique_id_migration_from_single_config[binary_sensor-satel_output_1-1234567890_output_1]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.satel_integra_satel_output_1',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'satel_integra',
|
||||
'previous_unique_id': 'satel_output_1',
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '1234567890_output_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_unique_id_migration_from_single_config[binary_sensor-satel_zone_1-1234567890_zone_1]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.satel_integra_satel_zone_1',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'satel_integra',
|
||||
'previous_unique_id': 'satel_zone_1',
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '1234567890_zone_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_unique_id_migration_from_single_config[switch-satel_switch_1-1234567890_switch_1]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.satel_integra_satel_switch_1',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'satel_integra',
|
||||
'previous_unique_id': 'satel_switch_1',
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '1234567890_switch_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -399,7 +399,7 @@ async def test_cannot_create_same_subentry(
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
|
||||
async def test_one_config_allowed(
|
||||
async def test_same_host_config_disallowed(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that only one Satel Integra configuration is allowed."""
|
||||
@@ -409,5 +409,14 @@ async def test_one_config_allowed(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
assert result["errors"] == {}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
MOCK_CONFIG_DATA,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
@@ -3,14 +3,25 @@
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_PANEL_DOMAIN
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
from homeassistant.components.satel_integra.const import DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
|
||||
from . import (
|
||||
CONF_OUTPUT_NUMBER,
|
||||
CONF_PARTITION_NUMBER,
|
||||
CONF_SWITCHABLE_OUTPUT_NUMBER,
|
||||
CONF_ZONE_NUMBER,
|
||||
MOCK_CONFIG_DATA,
|
||||
MOCK_CONFIG_OPTIONS,
|
||||
MOCK_ENTRY_ID,
|
||||
MOCK_OUTPUT_SUBENTRY,
|
||||
MOCK_PARTITION_SUBENTRY,
|
||||
MOCK_SWITCHABLE_OUTPUT_SUBENTRY,
|
||||
@@ -20,37 +31,94 @@ from . import (
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("original", "number_property"),
|
||||
[
|
||||
(MOCK_PARTITION_SUBENTRY, CONF_PARTITION_NUMBER),
|
||||
(MOCK_ZONE_SUBENTRY, CONF_ZONE_NUMBER),
|
||||
(MOCK_OUTPUT_SUBENTRY, CONF_OUTPUT_NUMBER),
|
||||
(MOCK_SWITCHABLE_OUTPUT_SUBENTRY, CONF_SWITCHABLE_OUTPUT_NUMBER),
|
||||
],
|
||||
)
|
||||
async def test_config_flow_migration_version_1_2(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_satel: AsyncMock,
|
||||
original: ConfigSubentry,
|
||||
number_property: str,
|
||||
) -> None:
|
||||
"""Test that the unique ID is migrated to the new format."""
|
||||
"""Test that the configured number is added to the subentry title."""
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="192.168.0.2",
|
||||
data=MOCK_CONFIG_DATA,
|
||||
options=MOCK_CONFIG_OPTIONS,
|
||||
entry_id="SATEL_INTEGRA_CONFIG_ENTRY_1",
|
||||
entry_id=MOCK_ENTRY_ID,
|
||||
version=1,
|
||||
minor_version=1,
|
||||
)
|
||||
config_entry.subentries = deepcopy(
|
||||
{
|
||||
MOCK_PARTITION_SUBENTRY.subentry_id: MOCK_PARTITION_SUBENTRY,
|
||||
MOCK_ZONE_SUBENTRY.subentry_id: MOCK_ZONE_SUBENTRY,
|
||||
MOCK_OUTPUT_SUBENTRY.subentry_id: MOCK_OUTPUT_SUBENTRY,
|
||||
MOCK_SWITCHABLE_OUTPUT_SUBENTRY.subentry_id: MOCK_SWITCHABLE_OUTPUT_SUBENTRY,
|
||||
}
|
||||
)
|
||||
config_entry.subentries = deepcopy({original.subentry_id: original})
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.version == 1
|
||||
assert config_entry.minor_version == 2
|
||||
assert config_entry.version == 2
|
||||
assert config_entry.minor_version == 1
|
||||
|
||||
assert config_entry == snapshot
|
||||
subentry = config_entry.subentries.get(original.subentry_id)
|
||||
assert subentry is not None
|
||||
assert subentry.title == f"{original.title} ({original.data[number_property]})"
|
||||
|
||||
assert subentry == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("platform", "old_id", "new_id"),
|
||||
[
|
||||
(ALARM_PANEL_DOMAIN, "satel_alarm_panel_1", f"{MOCK_ENTRY_ID}_alarm_panel_1"),
|
||||
(BINARY_SENSOR_DOMAIN, "satel_zone_1", f"{MOCK_ENTRY_ID}_zone_1"),
|
||||
(BINARY_SENSOR_DOMAIN, "satel_output_1", f"{MOCK_ENTRY_ID}_output_1"),
|
||||
(SWITCH_DOMAIN, "satel_switch_1", f"{MOCK_ENTRY_ID}_switch_1"),
|
||||
],
|
||||
)
|
||||
async def test_unique_id_migration_from_single_config(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_satel: AsyncMock,
|
||||
entity_registry: EntityRegistry,
|
||||
platform: str,
|
||||
old_id: str,
|
||||
new_id: str,
|
||||
) -> None:
|
||||
"""Test that the unique ID is migrated to use the config entry id."""
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="192.168.0.2",
|
||||
data=MOCK_CONFIG_DATA,
|
||||
options=MOCK_CONFIG_OPTIONS,
|
||||
entry_id=MOCK_ENTRY_ID,
|
||||
version=1,
|
||||
minor_version=1,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
entity = entity_registry.async_get_or_create(
|
||||
platform,
|
||||
DOMAIN,
|
||||
old_id,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = entity_registry.async_get(entity.entity_id)
|
||||
|
||||
assert entity is not None
|
||||
assert entity.unique_id == new_id
|
||||
|
||||
assert entity == snapshot
|
||||
|
||||
@@ -7,6 +7,8 @@ import json
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
from pysmhi.smhi_fire_forecast import SMHIFireForecast, SMHIFirePointForecast
|
||||
from pysmhi.smhi_forecast import SMHIForecast, SMHIPointForecast
|
||||
import pytest
|
||||
|
||||
@@ -40,6 +42,7 @@ async def patch_platform_constant() -> list[Platform]:
|
||||
async def load_int(
|
||||
hass: HomeAssistant,
|
||||
mock_client: SMHIPointForecast,
|
||||
mock_fire_client: SMHIFirePointForecast,
|
||||
load_platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the SMHI integration."""
|
||||
@@ -87,6 +90,23 @@ async def get_client(
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_fire_client")
|
||||
async def get_fire_client(
|
||||
hass: HomeAssistant,
|
||||
get_fire_data: tuple[list[SMHIFireForecast], list[SMHIFireForecast]],
|
||||
) -> AsyncGenerator[MagicMock]:
|
||||
"""Mock SMHIFirePointForecast client."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.smhi.coordinator.SMHIFirePointForecast",
|
||||
autospec=True,
|
||||
) as mock_client:
|
||||
client = mock_client.return_value
|
||||
client.async_get_daily_forecast.return_value = get_fire_data[0]
|
||||
client.async_get_hourly_forecast.return_value = get_fire_data[1]
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(name="get_data")
|
||||
async def get_data_from_library(
|
||||
hass: HomeAssistant,
|
||||
@@ -112,9 +132,44 @@ async def get_data_from_library(
|
||||
await client._api._session.close()
|
||||
|
||||
|
||||
@pytest.fixture(name="get_fire_data")
|
||||
async def get_fire_data_from_library(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
load_fire_json: dict[str, Any],
|
||||
) -> AsyncGenerator[tuple[list[SMHIFireForecast], list[SMHIFireForecast]]]:
|
||||
"""Get data from api."""
|
||||
client = SMHIFirePointForecast(
|
||||
TEST_CONFIG[CONF_LOCATION][CONF_LONGITUDE],
|
||||
TEST_CONFIG[CONF_LOCATION][CONF_LATITUDE],
|
||||
aioclient_mock.create_session(hass.loop),
|
||||
)
|
||||
with (
|
||||
freeze_time("2025-10-03"),
|
||||
patch.object(
|
||||
client._api,
|
||||
"async_get_data",
|
||||
return_value=load_fire_json,
|
||||
),
|
||||
):
|
||||
data_daily = await client.async_get_daily_forecast()
|
||||
data_hourly = await client.async_get_hourly_forecast()
|
||||
|
||||
yield (data_daily, data_hourly)
|
||||
await client._api._session.close()
|
||||
|
||||
|
||||
@pytest.fixture(name="load_fire_json")
|
||||
def load_fire_json_from_fixture(
|
||||
load_data: tuple[str, str, str, str],
|
||||
) -> dict[str, Any]:
|
||||
"""Load fixture with json data and return."""
|
||||
return json.loads(load_data[3])
|
||||
|
||||
|
||||
@pytest.fixture(name="load_json")
|
||||
def load_json_from_fixture(
|
||||
load_data: tuple[str, str, str],
|
||||
load_data: tuple[str, str, str, str],
|
||||
to_load: int,
|
||||
) -> dict[str, Any]:
|
||||
"""Load fixture with json data and return."""
|
||||
@@ -122,12 +177,13 @@ def load_json_from_fixture(
|
||||
|
||||
|
||||
@pytest.fixture(name="load_data", scope="package")
|
||||
def load_data_from_fixture() -> tuple[str, str, str]:
|
||||
def load_data_from_fixture() -> tuple[str, str, str, str]:
|
||||
"""Load fixture with fixture data and return."""
|
||||
return (
|
||||
load_fixture("smhi.json", "smhi"),
|
||||
load_fixture("smhi_night.json", "smhi"),
|
||||
load_fixture("smhi_short.json", "smhi"),
|
||||
load_fixture("smhi_fire.json", "smhi"),
|
||||
)
|
||||
|
||||
|
||||
|
||||
1365
tests/components/smhi/fixtures/smhi_fire.json
Normal file
1365
tests/components/smhi/fixtures/smhi_fire.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,264 @@
|
||||
# serializer version: 1
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_build_up_index-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_build_up_index',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Build up index',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'build_up_index',
|
||||
'unique_id': '59.32624, 17.84197-build_up_index',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_build_up_index-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Build up index',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_build_up_index',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '39.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_drought_code-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_drought_code',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Drought code',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'drought_code',
|
||||
'unique_id': '59.32624, 17.84197-drought_code',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_drought_code-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Drought code',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_drought_code',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '508.2',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_duff_moisture_code-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_duff_moisture_code',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Duff moisture code',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'duff_moisture_code',
|
||||
'unique_id': '59.32624, 17.84197-duff_moisture_code',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_duff_moisture_code-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Duff moisture code',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_duff_moisture_code',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '21.6',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fine_fuel_moisture_code-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_fine_fuel_moisture_code',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Fine fuel moisture code',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'fine_fuel_moisture_code',
|
||||
'unique_id': '59.32624, 17.84197-fine_fuel_moisture_code',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fine_fuel_moisture_code-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Fine fuel moisture code',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_fine_fuel_moisture_code',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '83.2',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fire_weather_index-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_fire_weather_index',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Fire weather index',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'fire_weather_index',
|
||||
'unique_id': '59.32624, 17.84197-fire_weather_index',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fire_weather_index-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Fire weather index',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_fire_weather_index',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '6.6',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_frozen_precipitation-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -49,6 +309,140 @@
|
||||
'state': '0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fuel_drying-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'very_wet',
|
||||
'wet',
|
||||
'moderate_wet',
|
||||
'dry',
|
||||
'very_dry',
|
||||
'extremely_dry',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_fuel_drying',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Fuel drying',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'forestdry',
|
||||
'unique_id': '59.32624, 17.84197-forestdry',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fuel_drying-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Test Fuel drying',
|
||||
'options': list([
|
||||
'very_wet',
|
||||
'wet',
|
||||
'moderate_wet',
|
||||
'dry',
|
||||
'very_dry',
|
||||
'extremely_dry',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_fuel_drying',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'moderate_wet',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fwi_index-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'very_low',
|
||||
'low',
|
||||
'moderate',
|
||||
'high',
|
||||
'very_high',
|
||||
'extreme',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_fwi_index',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'FWI index',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'fwiindex',
|
||||
'unique_id': '59.32624, 17.84197-fwiindex',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_fwi_index-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Test FWI index',
|
||||
'options': list([
|
||||
'very_low',
|
||||
'low',
|
||||
'moderate',
|
||||
'high',
|
||||
'very_high',
|
||||
'extreme',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_fwi_index',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'low',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_high_cloud_coverage-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -99,6 +493,125 @@
|
||||
'state': '88',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_highest_grass_fire_risk-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'snow_cover',
|
||||
'season_over',
|
||||
'low',
|
||||
'moderate',
|
||||
'high',
|
||||
'very_high',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_highest_grass_fire_risk',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Highest grass fire risk',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'grassfire',
|
||||
'unique_id': '59.32624, 17.84197-grassfire',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_highest_grass_fire_risk-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Test Highest grass fire risk',
|
||||
'options': list([
|
||||
'snow_cover',
|
||||
'season_over',
|
||||
'low',
|
||||
'moderate',
|
||||
'high',
|
||||
'very_high',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_highest_grass_fire_risk',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_initial_spread_index-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_initial_spread_index',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Initial spread index',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'initial_spread_index',
|
||||
'unique_id': '59.32624, 17.84197-initial_spread_index',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_initial_spread_index-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'friendly_name': 'Test Initial spread index',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_initial_spread_index',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '2.6',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_low_cloud_coverage-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -199,6 +712,63 @@
|
||||
'state': '88',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_potential_rate_of_spread-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.test_potential_rate_of_spread',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.SPEED: 'speed'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Potential rate of spread',
|
||||
'platform': 'smhi',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'rate_of_spread',
|
||||
'unique_id': '59.32624, 17.84197-rate_of_spread',
|
||||
'unit_of_measurement': <UnitOfSpeed.METERS_PER_MINUTE: 'm/min'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_potential_rate_of_spread-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Swedish weather institute (SMHI)',
|
||||
'device_class': 'speed',
|
||||
'friendly_name': 'Test Potential rate of spread',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfSpeed.METERS_PER_MINUTE: 'm/min'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.test_potential_rate_of_spread',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '0.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor_setup[load_platforms0][sensor.test_precipitation_category-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -23,6 +23,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry")
|
||||
async def test_form(
|
||||
hass: HomeAssistant,
|
||||
mock_client: MagicMock,
|
||||
mock_fire_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test we get the form and create an entry."""
|
||||
|
||||
@@ -87,6 +88,7 @@ async def test_form(
|
||||
async def test_form_invalid_coordinates(
|
||||
hass: HomeAssistant,
|
||||
mock_client: MagicMock,
|
||||
mock_fire_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test we handle invalid coordinates."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@@ -133,6 +135,7 @@ async def test_form_invalid_coordinates(
|
||||
async def test_form_unique_id_exist(
|
||||
hass: HomeAssistant,
|
||||
mock_client: MagicMock,
|
||||
mock_fire_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test we handle unique id already exist."""
|
||||
entry = MockConfigEntry(
|
||||
@@ -168,6 +171,7 @@ async def test_form_unique_id_exist(
|
||||
async def test_reconfigure_flow(
|
||||
hass: HomeAssistant,
|
||||
mock_client: MagicMock,
|
||||
mock_fire_client: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user